summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
authorjiesun@google.com <jiesun@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2010-06-23 00:54:59 +0000
committerjiesun@google.com <jiesun@google.com@0039d316-1c4b-4281-b951-d872f2087c98>2010-06-23 00:54:59 +0000
commit36bfbcb2eeca4ad0c72525fb8469d5214679d626 (patch)
tree01a35b1c24e3a0a5e32586da97c23c6fb06a6faf /media
parent69e248cdb27c449d37ddac21058751c322c38dd2 (diff)
downloadchromium_src-36bfbcb2eeca4ad0c72525fb8469d5214679d626.zip
chromium_src-36bfbcb2eeca4ad0c72525fb8469d5214679d626.tar.gz
chromium_src-36bfbcb2eeca4ad0c72525fb8469d5214679d626.tar.bz2
code cleanup for media code for video renerers.
add more generic surface format and surface type signalling from video decoder to video renderer. if we want to allocate buffer in renderer, we had to know these kind of information. remove redundant ParseMediaFormat and duplicated width,height, egl_image_supports in derived class of VideoRenderBase. Review URL: http://codereview.chromium.org/2829009 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@50558 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media')
-rw-r--r--media/base/media_format.cc10
-rw-r--r--media/base/media_format.h1
-rw-r--r--media/base/video_frame.cc4
-rw-r--r--media/base/video_frame.h12
-rw-r--r--media/filters/ffmpeg_video_decoder.cc19
-rw-r--r--media/filters/ffmpeg_video_decoder_unittest.cc2
-rw-r--r--media/filters/omx_video_decode_engine.cc4
-rw-r--r--media/filters/omx_video_decoder.cc28
-rw-r--r--media/filters/video_renderer_base.cc47
-rw-r--r--media/filters/video_renderer_base.h33
-rw-r--r--media/filters/video_renderer_base_unittest.cc4
-rw-r--r--media/tools/player_wtl/wtl_renderer.cc13
-rw-r--r--media/tools/player_x11/gl_video_renderer.cc11
-rw-r--r--media/tools/player_x11/gl_video_renderer.h4
-rw-r--r--media/tools/player_x11/gles_video_renderer.cc31
-rw-r--r--media/tools/player_x11/gles_video_renderer.h4
-rw-r--r--media/tools/player_x11/x11_video_renderer.cc33
-rw-r--r--media/tools/player_x11/x11_video_renderer.h4
18 files changed, 126 insertions, 138 deletions
diff --git a/media/base/media_format.cc b/media/base/media_format.cc
index fca4f89..37371c5 100644
--- a/media/base/media_format.cc
+++ b/media/base/media_format.cc
@@ -44,15 +44,6 @@ const char kUncompressedAudio[] = "audio/x-uncompressed";
// kHeight Integer Display height of the surface
const char kUncompressedVideo[] = "video/x-uncompressed";
-// Represents decoded video data in EGLImage, typically from OpenMAX video
-// decoder.
-// Other information, such as surface format (i.e., YV12), stride and planes are
-// included with the buffer itself and is not part of the MediaFormat.
-// Expected keys:
-// kWidth Integer Display width of the surface
-// kHeight Integer Display height of the surface
-const char kUncompressedVideoEglImage[] = "video/x-uncompressed-eglimage";
-
// Major types of media types begin with the prefix "audio/" or "video/".
const char kMajorTypeVideo[] = "video/";
const char kMajorTypeAudio[] = "audio/";
@@ -63,6 +54,7 @@ const char kMajorTypeAudio[] = "audio/";
const char MediaFormat::kMimeType[] = "MimeType";
const char MediaFormat::kURL[] = "URL";
const char MediaFormat::kSurfaceFormat[] = "SurfaceFormat";
+const char MediaFormat::kSurfaceType[] = "SurfaceType";
const char MediaFormat::kSampleRate[] = "SampleRate";
const char MediaFormat::kSampleBits[] = "SampleBits";
const char MediaFormat::kChannels[] = "Channels";
diff --git a/media/base/media_format.h b/media/base/media_format.h
index 3a7cb3d..8b9cdb0 100644
--- a/media/base/media_format.h
+++ b/media/base/media_format.h
@@ -47,6 +47,7 @@ class MediaFormat {
// Common keys.
static const char kMimeType[];
static const char kURL[];
+ static const char kSurfaceType[];
static const char kSurfaceFormat[];
static const char kSampleRate[];
static const char kSampleBits[];
diff --git a/media/base/video_frame.cc b/media/base/video_frame.cc
index fd684e7..857af70 100644
--- a/media/base/video_frame.cc
+++ b/media/base/video_frame.cc
@@ -118,7 +118,7 @@ void VideoFrame::CreateBlackFrame(int width, int height,
}
// static
-void VideoFrame::CreatePrivateFrame(VideoFrame::BufferType type,
+void VideoFrame::CreatePrivateFrame(VideoFrame::SurfaceType type,
VideoFrame::Format format,
size_t width,
size_t height,
@@ -194,7 +194,7 @@ bool VideoFrame::AllocateYUV() {
return false;
}
-VideoFrame::VideoFrame(VideoFrame::BufferType type,
+VideoFrame::VideoFrame(VideoFrame::SurfaceType type,
VideoFrame::Format format,
size_t width,
size_t height) {
diff --git a/media/base/video_frame.h b/media/base/video_frame.h
index 10e85b2..ddf6644 100644
--- a/media/base/video_frame.h
+++ b/media/base/video_frame.h
@@ -37,7 +37,7 @@ class VideoFrame : public StreamSample {
ASCII, // A frame with ASCII content. For testing only.
};
- enum BufferType {
+ enum SurfaceType {
TYPE_SYSTEM_MEMORY,
TYPE_OMX_BUFFER_HEAD,
TYPE_EGL_IMAGE,
@@ -75,7 +75,7 @@ class VideoFrame : public StreamSample {
scoped_refptr<VideoFrame>* frame_out);
// Creates a new frame of |type| with given parameters.
- static void CreatePrivateFrame(VideoFrame::BufferType type,
+ static void CreatePrivateFrame(VideoFrame::SurfaceType type,
VideoFrame::Format format,
size_t width,
size_t height,
@@ -84,7 +84,7 @@ class VideoFrame : public StreamSample {
void* private_buffer,
scoped_refptr<VideoFrame>* frame_out);
- virtual BufferType type() const { return type_; }
+ virtual SurfaceType type() const { return type_; }
Format format() const { return format_; }
@@ -107,7 +107,7 @@ class VideoFrame : public StreamSample {
protected:
// Clients must use the static CreateFrame() method to create a new frame.
- VideoFrame(BufferType type,
+ VideoFrame(SurfaceType type,
Format format,
size_t video_width,
size_t video_height);
@@ -121,8 +121,8 @@ class VideoFrame : public StreamSample {
// Frame format.
Format format_;
- // Buffer type.
- BufferType type_;
+ // Surface type.
+ SurfaceType type_;
// Width and height of surface.
size_t width_;
diff --git a/media/filters/ffmpeg_video_decoder.cc b/media/filters/ffmpeg_video_decoder.cc
index 592f260..361e5f2 100644
--- a/media/filters/ffmpeg_video_decoder.cc
+++ b/media/filters/ffmpeg_video_decoder.cc
@@ -56,13 +56,6 @@ void FFmpegVideoDecoder::DoInitialize(DemuxerStream* demuxer_stream,
return;
}
- // Only set kMimeType when derived class has not done so.
- if (!media_format_.Contains(MediaFormat::kMimeType))
- media_format_.SetAsString(MediaFormat::kMimeType,
- mime_type::kUncompressedVideo);
- media_format_.SetAsInteger(MediaFormat::kWidth, width_);
- media_format_.SetAsInteger(MediaFormat::kHeight, height_);
-
decode_engine_->Initialize(
message_loop(),
av_stream,
@@ -78,6 +71,18 @@ void FFmpegVideoDecoder::OnInitializeComplete(bool* success, Task* done_cb) {
AutoTaskRunner done_runner(done_cb);
*success = decode_engine_->state() == VideoDecodeEngine::kNormal;
+ if (*success) {
+ media_format_.SetAsString(MediaFormat::kMimeType,
+ mime_type::kUncompressedVideo);
+ media_format_.SetAsInteger(MediaFormat::kWidth, width_);
+ media_format_.SetAsInteger(MediaFormat::kHeight, height_);
+ media_format_.SetAsInteger(
+ MediaFormat::kSurfaceType,
+ static_cast<int>(VideoFrame::TYPE_SYSTEM_MEMORY));
+ media_format_.SetAsInteger(
+ MediaFormat::kSurfaceFormat,
+ static_cast<int>(decode_engine_->GetSurfaceFormat()));
+ }
}
void FFmpegVideoDecoder::DoSeek(base::TimeDelta time, Task* done_cb) {
diff --git a/media/filters/ffmpeg_video_decoder_unittest.cc b/media/filters/ffmpeg_video_decoder_unittest.cc
index 90c16fa..d564a73 100644
--- a/media/filters/ffmpeg_video_decoder_unittest.cc
+++ b/media/filters/ffmpeg_video_decoder_unittest.cc
@@ -255,6 +255,8 @@ TEST_F(FFmpegVideoDecoderTest, Initialize_Successful) {
WithArg<4>(InvokeRunnable())));
EXPECT_CALL(*engine_, state())
.WillOnce(Return(VideoDecodeEngine::kNormal));
+ EXPECT_CALL(*engine_, GetSurfaceFormat())
+ .WillOnce(Return(VideoFrame::YV12));
EXPECT_CALL(callback_, OnFilterCallback());
EXPECT_CALL(callback_, OnCallbackDestroyed());
diff --git a/media/filters/omx_video_decode_engine.cc b/media/filters/omx_video_decode_engine.cc
index b07e4db..cb42138 100644
--- a/media/filters/omx_video_decode_engine.cc
+++ b/media/filters/omx_video_decode_engine.cc
@@ -152,7 +152,9 @@ void OmxVideoDecodeEngine::Flush(Task* done_cb) {
}
VideoFrame::Format OmxVideoDecodeEngine::GetSurfaceFormat() const {
- return VideoFrame::YV12;
+ // TODO(jiesun): Both OmxHeaderType and EGLImage surface type could have
+ // different surface formats.
+ return uses_egl_image_ ? VideoFrame::RGBA : VideoFrame::YV12;
}
VideoDecodeEngine::State OmxVideoDecodeEngine::state() const {
diff --git a/media/filters/omx_video_decoder.cc b/media/filters/omx_video_decoder.cc
index 48d2cb6..eb9d598 100644
--- a/media/filters/omx_video_decoder.cc
+++ b/media/filters/omx_video_decoder.cc
@@ -95,19 +95,6 @@ void OmxVideoDecoder::DoInitialize(DemuxerStream* demuxer_stream,
return;
}
- // Sets the output format.
- if (supports_egl_image_) {
- media_format_.SetAsString(MediaFormat::kMimeType,
- mime_type::kUncompressedVideoEglImage);
- }
- else {
- media_format_.SetAsString(MediaFormat::kMimeType,
- mime_type::kUncompressedVideo);
- }
-
- media_format_.SetAsInteger(MediaFormat::kWidth, width_);
- media_format_.SetAsInteger(MediaFormat::kHeight, height_);
-
// Savs the demuxer stream.
demuxer_stream_ = demuxer_stream;
@@ -139,8 +126,21 @@ void OmxVideoDecoder::InitCompleteTask(FilterCallback* callback) {
DCHECK_EQ(message_loop(), MessageLoop::current());
// Check the status of the decode engine.
- if (omx_engine_->state() == VideoDecodeEngine::kError)
+ if (omx_engine_->state() == VideoDecodeEngine::kError) {
host()->SetError(PIPELINE_ERROR_DECODE);
+ } else {
+ media_format_.SetAsString(MediaFormat::kMimeType,
+ mime_type::kUncompressedVideo);
+ // TODO(jiesun): recycle OmxHeadType instead of copy back.
+ media_format_.SetAsInteger(MediaFormat::kSurfaceType,
+ supports_egl_image_ ? VideoFrame::TYPE_EGL_IMAGE
+ : VideoFrame::TYPE_SYSTEM_MEMORY);
+ media_format_.SetAsInteger(MediaFormat::kWidth, width_);
+ media_format_.SetAsInteger(MediaFormat::kHeight, height_);
+ VideoFrame::Format format = omx_engine_->GetSurfaceFormat();
+ media_format_.SetAsInteger(MediaFormat::kSurfaceFormat,
+ static_cast<int>(format));
+ }
callback->Run();
delete callback;
diff --git a/media/filters/video_renderer_base.cc b/media/filters/video_renderer_base.cc
index 13b6dee..6eeed60 100644
--- a/media/filters/video_renderer_base.cc
+++ b/media/filters/video_renderer_base.cc
@@ -41,7 +41,6 @@ static const int kIdleMilliseconds = 10;
VideoRendererBase::VideoRendererBase()
: width_(0),
height_(0),
- uses_egl_image_(false),
frame_available_(&lock_),
state_(kUninitialized),
thread_(kNullThreadHandle),
@@ -55,24 +54,36 @@ VideoRendererBase::~VideoRendererBase() {
}
// static
-bool VideoRendererBase::ParseMediaFormat(const MediaFormat& media_format,
- int* width_out, int* height_out,
- bool* uses_egl_image_out) {
+bool VideoRendererBase::ParseMediaFormat(
+ const MediaFormat& media_format,
+ VideoFrame::SurfaceType* surface_type_out,
+ VideoFrame::Format* surface_format_out,
+ int* width_out, int* height_out) {
std::string mime_type;
if (!media_format.GetAsString(MediaFormat::kMimeType, &mime_type))
return false;
- if (mime_type.compare(mime_type::kUncompressedVideo) != 0 &&
- mime_type.compare(mime_type::kUncompressedVideoEglImage) != 0)
+ if (mime_type.compare(mime_type::kUncompressedVideo) != 0)
return false;
- if (mime_type.compare(mime_type::kUncompressedVideoEglImage) == 0)
- *uses_egl_image_out = true;
- else
- *uses_egl_image_out = false;
- if (!media_format.GetAsInteger(MediaFormat::kWidth, width_out))
+ int surface_type;
+ if (!media_format.GetAsInteger(MediaFormat::kSurfaceType, &surface_type))
return false;
- if (!media_format.GetAsInteger(MediaFormat::kHeight, height_out))
+ if (surface_type_out)
+ *surface_type_out = static_cast<VideoFrame::SurfaceType>(surface_type);
+
+ int surface_format;
+ if (!media_format.GetAsInteger(MediaFormat::kSurfaceFormat, &surface_format))
+ return false;
+ if (surface_format_out)
+ *surface_format_out = static_cast<VideoFrame::Format>(surface_format);
+
+ int width, height;
+ if (!media_format.GetAsInteger(MediaFormat::kWidth, &width))
+ return false;
+ if (!media_format.GetAsInteger(MediaFormat::kHeight, &height))
return false;
+ if (width_out) *width_out = width;
+ if (height_out) *height_out = height;
return true;
}
@@ -146,7 +157,7 @@ void VideoRendererBase::Seek(base::TimeDelta time, FilterCallback* callback) {
// TODO(wjia): This would be removed if "Paint" thread allows renderer to
// allocate EGL images before filters are in playing state.
- if (uses_egl_image_) {
+ if (uses_egl_image()) {
state_ = kPaused;
VideoFrame::CreateBlackFrame(width_, height_, &current_frame_);
DCHECK(current_frame_);
@@ -168,8 +179,10 @@ void VideoRendererBase::Initialize(VideoDecoder* decoder,
decoder_->set_fill_buffer_done_callback(
NewCallback(this, &VideoRendererBase::OnFillBufferDone));
// Notify the pipeline of the video dimensions.
- if (!ParseMediaFormat(decoder->media_format(), &width_, &height_,
- &uses_egl_image_)) {
+ if (!ParseMediaFormat(decoder->media_format(),
+ &surface_type_,
+ &surface_format_,
+ &width_, &height_)) {
host()->SetError(PIPELINE_ERROR_INITIALIZATION_FAILED);
callback->Run();
return;
@@ -270,7 +283,7 @@ void VideoRendererBase::ThreadMain() {
if (!frames_.empty() && !frames_.front()->IsEndOfStream()) {
DCHECK_EQ(current_frame_, frames_.front());
frames_.pop_front();
- if (uses_egl_image_ &&
+ if (uses_egl_image() &&
media::VideoFrame::TYPE_EGL_IMAGE == current_frame_->type()) {
decoder_->FillThisBuffer(current_frame_);
}
@@ -354,7 +367,7 @@ void VideoRendererBase::OnFillBufferDone(scoped_refptr<VideoFrame> frame) {
// Enqueue the frame.
frames_.push_back(frame);
- if (uses_egl_image_ &&
+ if (uses_egl_image() &&
media::VideoFrame::TYPE_EGL_IMAGE != current_frame_->type())
current_frame_ = frame;
DCHECK_LE(frames_.size(), kMaxFrames);
diff --git a/media/filters/video_renderer_base.h b/media/filters/video_renderer_base.h
index ba30f52..9f9c865 100644
--- a/media/filters/video_renderer_base.h
+++ b/media/filters/video_renderer_base.h
@@ -20,6 +20,7 @@
#include "base/condition_variable.h"
#include "base/lock.h"
#include "media/base/filters.h"
+#include "media/base/video_frame.h"
namespace media {
@@ -31,12 +32,15 @@ class VideoRendererBase : public VideoRenderer,
VideoRendererBase();
virtual ~VideoRendererBase();
- // Helper method for subclasses to parse out video-related information from
- // a MediaFormat. Returns true if |width_out|, |height_out| and
- // |uses_egl_image_out| were assigned.
- static bool ParseMediaFormat(const MediaFormat& media_format,
- int* width_out, int* height_out,
- bool* uses_egl_image_out);
+ // Helper method to parse out video-related information from a MediaFormat.
+ // Returns true all the required parameters are existent in |media_format|.
+ // |surface_type_out|, |surface_format_out|, |width_out|, |height_out| can
+ // be NULL where the result is not needed.
+ static bool ParseMediaFormat(
+ const MediaFormat& media_format,
+ VideoFrame::SurfaceType* surface_type_out,
+ VideoFrame::Format* surface_format_out,
+ int* width_out, int* height_out);
// MediaFilter implementation.
virtual void Play(FilterCallback* callback);
@@ -85,6 +89,16 @@ class VideoRendererBase : public VideoRenderer,
return decoder_.get();
}
+ int width() { return width_; }
+ int height() { return height_; }
+ VideoFrame::Format surface_format() { return surface_format_; }
+ VideoFrame::SurfaceType surface_type() { return surface_type_; }
+
+ // TODO(jiesun): move this to gles_video_render.cc.
+ inline bool uses_egl_image() {
+ return surface_type_ == media::VideoFrame::TYPE_EGL_IMAGE;
+ }
+
private:
// Callback from video decoder to deliver decoded video frames and decrements
// |pending_reads_|.
@@ -108,13 +122,10 @@ class VideoRendererBase : public VideoRenderer,
scoped_refptr<VideoDecoder> decoder_;
- // TODO(wjia): can we move this to at least protected? Seems all derived
- // classes have width_, height_, uses_egl_image_ and same logic to
- // calculate those values.
- // Video dimensions parsed from the decoder's media format.
int width_;
int height_;
- bool uses_egl_image_;
+ VideoFrame::Format surface_format_;
+ VideoFrame::SurfaceType surface_type_;
// Queue of incoming frames as well as the current frame since the last time
// OnFrameAvailable() was called.
diff --git a/media/filters/video_renderer_base_unittest.cc b/media/filters/video_renderer_base_unittest.cc
index 81af7d7..02b352a 100644
--- a/media/filters/video_renderer_base_unittest.cc
+++ b/media/filters/video_renderer_base_unittest.cc
@@ -54,6 +54,10 @@ class VideoRendererBaseTest : public ::testing::Test {
// Sets the essential media format keys for this decoder.
decoder_media_format_.SetAsString(MediaFormat::kMimeType,
mime_type::kUncompressedVideo);
+ decoder_media_format_.SetAsInteger(MediaFormat::kSurfaceType,
+ VideoFrame::TYPE_SYSTEM_MEMORY);
+ decoder_media_format_.SetAsInteger(MediaFormat::kSurfaceFormat,
+ VideoFrame::YV12);
decoder_media_format_.SetAsInteger(MediaFormat::kWidth, kWidth);
decoder_media_format_.SetAsInteger(MediaFormat::kHeight, kHeight);
EXPECT_CALL(*decoder_, media_format())
diff --git a/media/tools/player_wtl/wtl_renderer.cc b/media/tools/player_wtl/wtl_renderer.cc
index 112037c..28a6f3e 100644
--- a/media/tools/player_wtl/wtl_renderer.cc
+++ b/media/tools/player_wtl/wtl_renderer.cc
@@ -16,23 +16,14 @@ WtlVideoRenderer::~WtlVideoRenderer() {
// static
bool WtlVideoRenderer::IsMediaFormatSupported(
const media::MediaFormat& media_format) {
- int width = 0;
- int height = 0;
- bool uses_egl_image = false;
- return ParseMediaFormat(media_format, &width, &height, &uses_egl_image);
+ return ParseMediaFormat(media_format, NULL, NULL, NULL, NULL);
}
void WtlVideoRenderer::OnStop() {
}
bool WtlVideoRenderer::OnInitialize(media::VideoDecoder* decoder) {
- int width = 0;
- int height = 0;
- bool uses_egl_image = false;
- if (!ParseMediaFormat(decoder->media_format(), &width, &height,
- &uses_egl_image))
- return false;
- window_->SetSize(width, height);
+ window_->SetSize(width(), height());
return true;
}
diff --git a/media/tools/player_x11/gl_video_renderer.cc b/media/tools/player_x11/gl_video_renderer.cc
index 10d28d1..a828fbe 100644
--- a/media/tools/player_x11/gl_video_renderer.cc
+++ b/media/tools/player_x11/gl_video_renderer.cc
@@ -26,10 +26,7 @@ GlVideoRenderer::~GlVideoRenderer() {
// static
bool GlVideoRenderer::IsMediaFormatSupported(
const media::MediaFormat& media_format) {
- int width = 0;
- int height = 0;
- bool uses_egl_image = false;
- return ParseMediaFormat(media_format, &width, &height, &uses_egl_image);
+ return ParseMediaFormat(media_format, NULL, NULL, NULL, NULL);
}
void GlVideoRenderer::OnStop() {
@@ -131,14 +128,10 @@ static const char kFragmentShader[] =
static const unsigned int kErrorSize = 4096;
bool GlVideoRenderer::OnInitialize(media::VideoDecoder* decoder) {
- if (!ParseMediaFormat(decoder->media_format(), &width_, &height_,
- &uses_egl_image_))
- return false;
-
LOG(INFO) << "Initializing GL Renderer...";
// Resize the window to fit that of the video.
- XResizeWindow(display_, window_, width_, height_);
+ XResizeWindow(display_, window_, width(), height());
gl_context_ = InitGLContext(display_, window_);
if (!gl_context_)
diff --git a/media/tools/player_x11/gl_video_renderer.h b/media/tools/player_x11/gl_video_renderer.h
index e645950..24599bf 100644
--- a/media/tools/player_x11/gl_video_renderer.h
+++ b/media/tools/player_x11/gl_video_renderer.h
@@ -49,10 +49,6 @@ class GlVideoRenderer : public media::VideoRendererBase {
friend class scoped_refptr<GlVideoRenderer>;
virtual ~GlVideoRenderer();
- int width_;
- int height_;
- bool uses_egl_image_;
-
Display* display_;
Window window_;
diff --git a/media/tools/player_x11/gles_video_renderer.cc b/media/tools/player_x11/gles_video_renderer.cc
index 4d14fee..4bca69c 100644
--- a/media/tools/player_x11/gles_video_renderer.cc
+++ b/media/tools/player_x11/gles_video_renderer.cc
@@ -35,10 +35,7 @@ GlesVideoRenderer::~GlesVideoRenderer() {
// static
bool GlesVideoRenderer::IsMediaFormatSupported(
const media::MediaFormat& media_format) {
- int width = 0;
- int height = 0;
- bool uses_egl_image_ = false;
- return ParseMediaFormat(media_format, &width, &height, &uses_egl_image_);
+ return ParseMediaFormat(media_format, NULL, NULL, NULL, NULL);
}
void GlesVideoRenderer::OnStop() {
@@ -136,10 +133,6 @@ static const char kFragmentShaderEgl[] =
static const unsigned int kErrorSize = 4096;
bool GlesVideoRenderer::OnInitialize(media::VideoDecoder* decoder) {
- if (!ParseMediaFormat(decoder->media_format(), &width_, &height_,
- &uses_egl_image_))
- return false;
-
LOG(INFO) << "Initializing GLES Renderer...";
// Save this instance.
@@ -173,7 +166,7 @@ void GlesVideoRenderer::Paint() {
return;
}
- if (uses_egl_image_) {
+ if (uses_egl_image()) {
if (media::VideoFrame::TYPE_EGL_IMAGE == video_frame->type()) {
GLuint texture = FindTexture(video_frame);
@@ -248,7 +241,7 @@ GLuint GlesVideoRenderer::FindTexture(
bool GlesVideoRenderer::InitializeGles() {
// Resize the window to fit that of the video.
- XResizeWindow(display_, window_, width_, height_);
+ XResizeWindow(display_, window_, width(), height());
egl_display_ = eglGetDisplay(display_);
if (eglGetError() != EGL_SUCCESS) {
@@ -328,13 +321,13 @@ bool GlesVideoRenderer::InitializeGles() {
return false;
}
- EGLint width;
- EGLint height;
- eglQuerySurface(egl_display_, egl_surface_, EGL_WIDTH, &width);
- eglQuerySurface(egl_display_, egl_surface_, EGL_HEIGHT, &height);
- glViewport(0, 0, width_, height_);
+ EGLint surface_width;
+ EGLint surface_height;
+ eglQuerySurface(egl_display_, egl_surface_, EGL_WIDTH, &surface_width);
+ eglQuerySurface(egl_display_, egl_surface_, EGL_HEIGHT, &surface_height);
+ glViewport(0, 0, width(), height());
- if (uses_egl_image_) {
+ if (uses_egl_image()) {
CreateTextureAndProgramEgl();
return true;
}
@@ -404,8 +397,8 @@ void GlesVideoRenderer::CreateTextureAndProgramEgl() {
GL_TEXTURE_2D,
0,
GL_RGBA,
- width_,
- height_,
+ width(),
+ height(),
0,
GL_RGBA,
GL_UNSIGNED_BYTE,
@@ -425,7 +418,7 @@ void GlesVideoRenderer::CreateTextureAndProgramEgl() {
media::VideoFrame:: CreatePrivateFrame(
media::VideoFrame::TYPE_EGL_IMAGE,
media::VideoFrame::RGB565,
- width_, height_, kZero, kZero,
+ width(), height(), kZero, kZero,
egl_image,
&video_frame);
egl_frames_.push_back(std::make_pair(video_frame, texture));
diff --git a/media/tools/player_x11/gles_video_renderer.h b/media/tools/player_x11/gles_video_renderer.h
index 8256c890..195f33e 100644
--- a/media/tools/player_x11/gles_video_renderer.h
+++ b/media/tools/player_x11/gles_video_renderer.h
@@ -65,10 +65,6 @@ class GlesVideoRenderer : public media::VideoRendererBase {
PFNEGLCREATEIMAGEKHRPROC egl_create_image_khr_;
PFNEGLDESTROYIMAGEKHRPROC egl_destroy_image_khr_;
- int width_;
- int height_;
- bool uses_egl_image_;
-
Display* display_;
Window window_;
diff --git a/media/tools/player_x11/x11_video_renderer.cc b/media/tools/player_x11/x11_video_renderer.cc
index a803dd8..a1a9f15 100644
--- a/media/tools/player_x11/x11_video_renderer.cc
+++ b/media/tools/player_x11/x11_video_renderer.cc
@@ -68,10 +68,7 @@ X11VideoRenderer::~X11VideoRenderer() {
// static
bool X11VideoRenderer::IsMediaFormatSupported(
const media::MediaFormat& media_format) {
- int width = 0;
- int height = 0;
- bool uses_egl_image = false;
- return ParseMediaFormat(media_format, &width, &height, &uses_egl_image);
+ return ParseMediaFormat(media_format, NULL, NULL, NULL, NULL);
}
void X11VideoRenderer::OnStop() {
@@ -82,14 +79,10 @@ void X11VideoRenderer::OnStop() {
}
bool X11VideoRenderer::OnInitialize(media::VideoDecoder* decoder) {
- if (!ParseMediaFormat(decoder->media_format(), &width_, &height_,
- &uses_egl_image_))
- return false;
-
LOG(INFO) << "Initializing X11 Renderer...";
// Resize the window to fit that of the video.
- XResizeWindow(display_, window_, width_, height_);
+ XResizeWindow(display_, window_, width(), height());
// Testing XRender support. We'll use the very basic of XRender
// so if it presents it is already good enough. We don't need
@@ -118,11 +111,11 @@ bool X11VideoRenderer::OnInitialize(media::VideoDecoder* decoder) {
DefaultDepth(display_, DefaultScreen(display_)),
ZPixmap,
0,
- static_cast<char*>(malloc(width_ * height_ * 4)),
- width_,
- height_,
+ static_cast<char*>(malloc(width() * height() * 4)),
+ width(),
+ height(),
32,
- width_ * 4);
+ width() * 4);
DCHECK(image_);
// Save this instance.
@@ -175,8 +168,8 @@ void X11VideoRenderer::Paint() {
// Creates a XImage.
XImage image;
memset(&image, 0, sizeof(image));
- image.width = width_;
- image.height = height_;
+ image.width = width();
+ image.height = height();
image.depth = 32;
image.bits_per_pixel = 32;
image.format = ZPixmap;
@@ -192,13 +185,13 @@ void X11VideoRenderer::Paint() {
// Creates a pixmap and uploads from the XImage.
unsigned long pixmap = XCreatePixmap(display_,
window_,
- width_,
- height_,
+ width(),
+ height(),
32);
GC gc = XCreateGC(display_, pixmap, 0, NULL);
XPutImage(display_, pixmap, gc, &image,
0, 0, 0, 0,
- width_, height_);
+ width(), height());
XFreeGC(display_, gc);
// Creates the picture representing the pixmap.
@@ -208,7 +201,7 @@ void X11VideoRenderer::Paint() {
// Composite the picture over the picture representing the window.
XRenderComposite(display_, PictOpSrc, picture, 0,
picture_, 0, 0, 0, 0, 0, 0,
- width_, height_);
+ width(), height());
XRenderFreePicture(display_, picture);
XFreePixmap(display_, pixmap);
@@ -221,7 +214,7 @@ void X11VideoRenderer::Paint() {
// to the window.
GC gc = XCreateGC(display_, window_, 0, NULL);
XPutImage(display_, window_, gc, image_,
- 0, 0, 0, 0, width_, height_);
+ 0, 0, 0, 0, width(), height());
XFlush(display_);
XFreeGC(display_, gc);
}
diff --git a/media/tools/player_x11/x11_video_renderer.h b/media/tools/player_x11/x11_video_renderer.h
index 025ed56..5b7a954 100644
--- a/media/tools/player_x11/x11_video_renderer.h
+++ b/media/tools/player_x11/x11_video_renderer.h
@@ -50,10 +50,6 @@ class X11VideoRenderer : public media::VideoRendererBase {
friend class scoped_refptr<X11VideoRenderer>;
virtual ~X11VideoRenderer();
- int width_;
- int height_;
- bool uses_egl_image_;
-
Display* display_;
Window window_;