diff options
author | jiesun@google.com <jiesun@google.com@0039d316-1c4b-4281-b951-d872f2087c98> | 2010-07-14 21:00:25 +0000 |
---|---|---|
committer | jiesun@google.com <jiesun@google.com@0039d316-1c4b-4281-b951-d872f2087c98> | 2010-07-14 21:00:25 +0000 |
commit | d3d048776df1ba9ad87a884a2cd5a0a944b52cda (patch) | |
tree | a95dbf055838997db5b5b88d0a04a51f1fcc4beb /media/tools | |
parent | e7d9e170144853b9ef889ab5e5fcf44b4c9ef39d (diff) | |
download | chromium_src-d3d048776df1ba9ad87a884a2cd5a0a944b52cda.zip chromium_src-d3d048776df1ba9ad87a884a2cd5a0a944b52cda.tar.gz chromium_src-d3d048776df1ba9ad87a884a2cd5a0a944b52cda.tar.bz2 |
media: refactoring video_render_base to recycle buffers
To make recycle work, we had to define the usage scope of current frame. otherwise we are introducing tearing because we will begin to decode into the buffer before it is done by the renderer/painter/compositor.
current mechanism depends on hold reference of a copied picture. we had no that luxury if we do not copy output buffers. we had to compromise by
1. in pause() ( which is not the sense of pipeline->pause(), which is implemented by set playrate = 0 ) in filter->pause() ( or in the future flush() ), which is part of seeking ( and in the future, part of stop() too) , we had to return all the buffers to owner. we had no current buffer to display. we use NULL as current frame in this case.
2. remove black frame from render base, this is only valid for system memory based video frame, even that should we use color fill instead of color conversion and scale.
3. pause and stop has to wait for pending read (actually flush) and pending paint.
4. we only advance frame when there are two or more frames in ready queue.
Review URL: http://codereview.chromium.org/2836038
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@52398 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media/tools')
-rw-r--r-- | media/tools/player_x11/gl_video_renderer.cc | 6 | ||||
-rw-r--r-- | media/tools/player_x11/gles_video_renderer.cc | 5 | ||||
-rw-r--r-- | media/tools/player_x11/player_x11.cc | 5 | ||||
-rw-r--r-- | media/tools/player_x11/x11_video_renderer.cc | 7 |
4 files changed, 18 insertions, 5 deletions
diff --git a/media/tools/player_x11/gl_video_renderer.cc b/media/tools/player_x11/gl_video_renderer.cc index e2f4681..29b4f38 100644 --- a/media/tools/player_x11/gl_video_renderer.cc +++ b/media/tools/player_x11/gl_video_renderer.cc @@ -250,8 +250,11 @@ void GlVideoRenderer::Paint() { scoped_refptr<media::VideoFrame> video_frame; GetCurrentFrame(&video_frame); - if (!video_frame) + if (!video_frame) { + // TODO(jiesun): Use color fill rather than create black frame then scale. + PutCurrentFrame(video_frame); return; + } // Convert YUV frame to RGB. DCHECK(video_frame->format() == media::VideoFrame::YV12 || @@ -275,6 +278,7 @@ void GlVideoRenderer::Paint() { glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, video_frame->data(i)); } + PutCurrentFrame(video_frame); glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); glXSwapBuffers(display_, window_); diff --git a/media/tools/player_x11/gles_video_renderer.cc b/media/tools/player_x11/gles_video_renderer.cc index 0b36e74..f1942ba 100644 --- a/media/tools/player_x11/gles_video_renderer.cc +++ b/media/tools/player_x11/gles_video_renderer.cc @@ -175,8 +175,8 @@ void GlesVideoRenderer::Paint() { scoped_refptr<media::VideoFrame> video_frame; GetCurrentFrame(&video_frame); - if (!video_frame.get()) { + PutCurrentFrame(video_frame); return; } @@ -191,6 +191,8 @@ void GlesVideoRenderer::Paint() { eglSwapBuffers(egl_display_, egl_surface_); } } + // TODO(jiesun/wjia): use fence before call put. + PutCurrentFrame(video_frame); return; } @@ -237,6 +239,7 @@ void GlesVideoRenderer::Paint() { glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, data); } + PutCurrentFrame(video_frame); glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); eglSwapBuffers(egl_display_, egl_surface_); diff --git a/media/tools/player_x11/player_x11.cc b/media/tools/player_x11/player_x11.cc index 16b453a..a361368 100644 --- a/media/tools/player_x11/player_x11.cc +++ b/media/tools/player_x11/player_x11.cc @@ -137,7 +137,10 @@ void PeriodicalUpdate( MessageLoop* message_loop, bool audio_only) { if (!g_running) { - message_loop->Quit(); + // interrupt signal is received during lat time period. + // Quit message_loop only when pipeline is fully stopped. + pipeline->Stop(media::TaskToCallbackAdapter::NewCallback( + NewRunnableFunction(Quit, message_loop))); return; } diff --git a/media/tools/player_x11/x11_video_renderer.cc b/media/tools/player_x11/x11_video_renderer.cc index 881a719..707a578 100644 --- a/media/tools/player_x11/x11_video_renderer.cc +++ b/media/tools/player_x11/x11_video_renderer.cc @@ -139,9 +139,11 @@ void X11VideoRenderer::OnFrameAvailable() { void X11VideoRenderer::Paint() { scoped_refptr<media::VideoFrame> video_frame; GetCurrentFrame(&video_frame); - - if (!image_ ||!video_frame) + if (!image_ || !video_frame) { + // TODO(jiesun): Use color fill rather than create black frame then scale. + PutCurrentFrame(video_frame); return; + } // Convert YUV frame to RGB. DCHECK(video_frame->format() == media::VideoFrame::YV12 || @@ -164,6 +166,7 @@ void X11VideoRenderer::Paint() { video_frame->stride(media::VideoFrame::kUPlane), image_->bytes_per_line, yuv_type); + PutCurrentFrame(video_frame); if (use_render_) { // If XRender is used, we'll upload the image to a pixmap. And then |