summaryrefslogtreecommitdiffstats
path: root/content
diff options
context:
space:
mode:
authorperkj@chromium.org <perkj@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-12-12 22:19:24 +0000
committerperkj@chromium.org <perkj@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-12-12 22:19:24 +0000
commita59be9be6129926be77c65e454fcb757efe1ebc1 (patch)
tree540aa9e0b0a97247507beff1b917f4779b34f743 /content
parentb13830c20ee4e449d9a96e229f923d375f3c2213 (diff)
downloadchromium_src-a59be9be6129926be77c65e454fcb757efe1ebc1.zip
chromium_src-a59be9be6129926be77c65e454fcb757efe1ebc1.tar.gz
chromium_src-a59be9be6129926be77c65e454fcb757efe1ebc1.tar.bz2
Wire up OnEnabledChanged to the MediaStreamVideoSink and use it in RtcVideoRenderer.
This is to fix a problem where the audio renderer do not start since no video frame has been rendered. https://code.google.com/p/webrtc/issues/detail?id=2162 BUG=328021 Review URL: https://codereview.chromium.org/112873003 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@240445 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'content')
-rw-r--r--content/public/renderer/media_stream_sink.h1
-rw-r--r--content/renderer/media/rtc_video_renderer.cc60
-rw-r--r--content/renderer/media/rtc_video_renderer.h11
-rw-r--r--content/renderer/media/webrtc/webrtc_video_sink_adapter.cc46
-rw-r--r--content/renderer/media/webrtc/webrtc_video_sink_adapter.h1
5 files changed, 73 insertions, 46 deletions
diff --git a/content/public/renderer/media_stream_sink.h b/content/public/renderer/media_stream_sink.h
index ba9d726..9374182 100644
--- a/content/public/renderer/media_stream_sink.h
+++ b/content/public/renderer/media_stream_sink.h
@@ -19,6 +19,7 @@ class MediaStreamSink {
public:
virtual void OnReadyStateChanged(
blink::WebMediaStreamSource::ReadyState state) {}
+ virtual void OnEnabledChanged(bool enabled) {}
protected:
virtual ~MediaStreamSink() {}
diff --git a/content/renderer/media/rtc_video_renderer.cc b/content/renderer/media/rtc_video_renderer.cc
index 5ea461f..013fcad 100644
--- a/content/renderer/media/rtc_video_renderer.cc
+++ b/content/renderer/media/rtc_video_renderer.cc
@@ -18,9 +18,9 @@ RTCVideoRenderer::RTCVideoRenderer(
: error_cb_(error_cb),
repaint_cb_(repaint_cb),
message_loop_proxy_(base::MessageLoopProxy::current()),
- state_(kStopped),
+ state_(STOPPED),
+ first_frame_rendered_(false),
video_track_(video_track) {
- MaybeRenderSignalingFrame(video_track_.source().readyState());
}
RTCVideoRenderer::~RTCVideoRenderer() {
@@ -28,41 +28,58 @@ RTCVideoRenderer::~RTCVideoRenderer() {
void RTCVideoRenderer::Start() {
DCHECK(message_loop_proxy_->BelongsToCurrentThread());
- DCHECK_EQ(state_, kStopped);
+ DCHECK_EQ(state_, STOPPED);
+ DCHECK(!first_frame_rendered_);
AddToVideoTrack(this, video_track_);
- state_ = kStarted;
+ state_ = STARTED;
+
+ if (video_track_.source().readyState() ==
+ blink::WebMediaStreamSource::ReadyStateEnded ||
+ !video_track_.isEnabled()) {
+ MaybeRenderSignalingFrame();
+ }
}
void RTCVideoRenderer::Stop() {
DCHECK(message_loop_proxy_->BelongsToCurrentThread());
+ DCHECK(state_ == STARTED || state_ == PAUSED);
RemoveFromVideoTrack(this, video_track_);
+ state_ = STOPPED;
+ first_frame_rendered_ = false;
}
void RTCVideoRenderer::Play() {
DCHECK(message_loop_proxy_->BelongsToCurrentThread());
- if (state_ == kPaused) {
- state_ = kStarted;
+ if (state_ == PAUSED) {
+ state_ = STARTED;
}
}
void RTCVideoRenderer::Pause() {
DCHECK(message_loop_proxy_->BelongsToCurrentThread());
- if (state_ == kStarted) {
- state_ = kPaused;
+ if (state_ == STARTED) {
+ state_ = PAUSED;
}
}
void RTCVideoRenderer::OnReadyStateChanged(
blink::WebMediaStreamSource::ReadyState state) {
DCHECK(message_loop_proxy_->BelongsToCurrentThread());
- MaybeRenderSignalingFrame(state);
+ if (state == blink::WebMediaStreamSource::ReadyStateEnded)
+ MaybeRenderSignalingFrame();
+}
+
+void RTCVideoRenderer::OnEnabledChanged(bool enabled) {
+ DCHECK(message_loop_proxy_->BelongsToCurrentThread());
+ if (!enabled)
+ MaybeRenderSignalingFrame();
}
void RTCVideoRenderer::OnVideoFrame(
const scoped_refptr<media::VideoFrame>& frame) {
DCHECK(message_loop_proxy_->BelongsToCurrentThread());
- if (state_ != kStarted) {
+ if (state_ != STARTED) {
return;
}
@@ -72,20 +89,21 @@ void RTCVideoRenderer::OnVideoFrame(
"timestamp",
frame->GetTimestamp().InMilliseconds());
repaint_cb_.Run(frame);
+ first_frame_rendered_ = true;
}
-void RTCVideoRenderer::MaybeRenderSignalingFrame(
- blink::WebMediaStreamSource::ReadyState state) {
- // Render a small black frame if the track transition to ended.
+void RTCVideoRenderer::MaybeRenderSignalingFrame() {
+ // Render a small black frame if no frame has been rendered.
// This is necessary to make sure audio can play if the video tag src is
- // a MediaStream video track that has been rejected or ended.
- if (state == blink::WebMediaStreamSource::ReadyStateEnded) {
- const int kMinFrameSize = 2;
- const gfx::Size size(kMinFrameSize, kMinFrameSize);
- scoped_refptr<media::VideoFrame> video_frame =
- media::VideoFrame::CreateBlackFrame(size);
- OnVideoFrame(video_frame);
- }
+ // a MediaStream video track that has been rejected, ended or disabled.
+ if (first_frame_rendered_)
+ return;
+
+ const int kMinFrameSize = 2;
+ const gfx::Size size(kMinFrameSize, kMinFrameSize);
+ scoped_refptr<media::VideoFrame> video_frame =
+ media::VideoFrame::CreateBlackFrame(size);
+ OnVideoFrame(video_frame);
}
} // namespace content
diff --git a/content/renderer/media/rtc_video_renderer.h b/content/renderer/media/rtc_video_renderer.h
index e891b88..b72a586 100644
--- a/content/renderer/media/rtc_video_renderer.h
+++ b/content/renderer/media/rtc_video_renderer.h
@@ -47,9 +47,9 @@ class CONTENT_EXPORT RTCVideoRenderer
private:
enum State {
- kStarted,
- kPaused,
- kStopped,
+ STARTED,
+ PAUSED,
+ STOPPED,
};
// VideoTrackSink implementation. Called on the main thread.
@@ -57,14 +57,15 @@ class CONTENT_EXPORT RTCVideoRenderer
const scoped_refptr<media::VideoFrame>& frame) OVERRIDE;
virtual void OnReadyStateChanged(
blink::WebMediaStreamSource::ReadyState state) OVERRIDE;
+ virtual void OnEnabledChanged(bool enabled) OVERRIDE;
- void MaybeRenderSignalingFrame(
- blink::WebMediaStreamSource::ReadyState state);
+ void MaybeRenderSignalingFrame();
base::Closure error_cb_;
RepaintCB repaint_cb_;
scoped_refptr<base::MessageLoopProxy> message_loop_proxy_;
State state_;
+ bool first_frame_rendered_;
blink::WebMediaStreamTrack video_track_;
DISALLOW_COPY_AND_ASSIGN(RTCVideoRenderer);
diff --git a/content/renderer/media/webrtc/webrtc_video_sink_adapter.cc b/content/renderer/media/webrtc/webrtc_video_sink_adapter.cc
index 0f3233f..652dec0 100644
--- a/content/renderer/media/webrtc/webrtc_video_sink_adapter.cc
+++ b/content/renderer/media/webrtc/webrtc_video_sink_adapter.cc
@@ -26,11 +26,12 @@ WebRtcVideoSinkAdapter::WebRtcVideoSinkAdapter(
MediaStreamVideoSink* sink)
: message_loop_proxy_(base::MessageLoopProxy::current()),
sink_(sink),
- video_track_(video_track) {
+ video_track_(video_track),
+ state_(video_track->state()),
+ track_enabled_(video_track->enabled()) {
DCHECK(sink);
video_track_->AddRenderer(this);
video_track_->RegisterObserver(this);
- state_ = video_track_->state();
DVLOG(1) << "WebRtcVideoSinkAdapter";
}
@@ -81,26 +82,31 @@ void WebRtcVideoSinkAdapter::RenderFrame(const cricket::VideoFrame* frame) {
void WebRtcVideoSinkAdapter::OnChanged() {
DCHECK(message_loop_proxy_->BelongsToCurrentThread());
- // TODO(perkj) OnChanged belong to base class of WebRtcVideoSinkAdapter
+ // TODO(perkj): OnChanged belongs to the base class of WebRtcVideoSinkAdapter
// common for both webrtc audio and video.
webrtc::MediaStreamTrackInterface::TrackState state = video_track_->state();
- if (state == state_)
- return;
- state_ = state;
- switch (state) {
- case webrtc::MediaStreamTrackInterface::kInitializing:
- // Ignore the kInitializing state since there is no match in
- // WebMediaStreamSource::ReadyState.
- break;
- case webrtc::MediaStreamTrackInterface::kLive:
- sink_->OnReadyStateChanged(blink::WebMediaStreamSource::ReadyStateLive);
- break;
- case webrtc::MediaStreamTrackInterface::kEnded:
- sink_->OnReadyStateChanged(blink::WebMediaStreamSource::ReadyStateEnded);
- break;
- default:
- NOTREACHED();
- break;
+ if (state != state_) {
+ state_ = state;
+ switch (state) {
+ case webrtc::MediaStreamTrackInterface::kInitializing:
+ // Ignore the kInitializing state since there is no match in
+ // WebMediaStreamSource::ReadyState.
+ break;
+ case webrtc::MediaStreamTrackInterface::kLive:
+ sink_->OnReadyStateChanged(blink::WebMediaStreamSource::ReadyStateLive);
+ break;
+ case webrtc::MediaStreamTrackInterface::kEnded:
+ sink_->OnReadyStateChanged(
+ blink::WebMediaStreamSource::ReadyStateEnded);
+ break;
+ default:
+ NOTREACHED();
+ break;
+ }
+ }
+ if (track_enabled_ != video_track_->enabled()) {
+ track_enabled_ = video_track_->enabled();
+ sink_->OnEnabledChanged(track_enabled_);
}
}
diff --git a/content/renderer/media/webrtc/webrtc_video_sink_adapter.h b/content/renderer/media/webrtc/webrtc_video_sink_adapter.h
index 66a88e1..2f75a95 100644
--- a/content/renderer/media/webrtc/webrtc_video_sink_adapter.h
+++ b/content/renderer/media/webrtc/webrtc_video_sink_adapter.h
@@ -50,6 +50,7 @@ class CONTENT_EXPORT WebRtcVideoSinkAdapter
// The video track the renderer is connected to.
scoped_refptr<webrtc::VideoTrackInterface> video_track_;
webrtc::MediaStreamTrackInterface::TrackState state_;
+ bool track_enabled_;
DISALLOW_COPY_AND_ASSIGN(WebRtcVideoSinkAdapter);
};