1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
|
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/renderer/media/rtc_video_renderer.h"
#include "base/bind.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/message_loop_proxy.h"
#include "media/base/video_frame.h"
#include "media/base/video_util.h"
#include "third_party/libjingle/source/talk/base/timeutils.h"
#include "third_party/libjingle/source/talk/media/base/videoframe.h"
using media::CopyYPlane;
using media::CopyUPlane;
using media::CopyVPlane;
namespace content {
RTCVideoRenderer::RTCVideoRenderer(
webrtc::VideoTrackInterface* video_track,
const base::Closure& error_cb,
const RepaintCB& repaint_cb)
: error_cb_(error_cb),
repaint_cb_(repaint_cb),
message_loop_proxy_(base::MessageLoopProxy::current()),
state_(kStopped),
video_track_(video_track) {
}
RTCVideoRenderer::~RTCVideoRenderer() {
}
void RTCVideoRenderer::Start() {
DCHECK(message_loop_proxy_->BelongsToCurrentThread());
DCHECK_EQ(state_, kStopped);
if (video_track_)
video_track_->AddRenderer(this);
state_ = kStarted;
}
void RTCVideoRenderer::Stop() {
DCHECK(message_loop_proxy_->BelongsToCurrentThread());
if (video_track_) {
state_ = kStopped;
video_track_->RemoveRenderer(this);
video_track_ = NULL;
}
}
void RTCVideoRenderer::Play() {
DCHECK(message_loop_proxy_->BelongsToCurrentThread());
if (video_track_ && state_ == kPaused) {
state_ = kStarted;
}
}
void RTCVideoRenderer::Pause() {
DCHECK(message_loop_proxy_->BelongsToCurrentThread());
if (video_track_ && state_ == kStarted) {
state_ = kPaused;
}
}
void RTCVideoRenderer::SetSize(int width, int height) {
}
void RTCVideoRenderer::RenderFrame(const cricket::VideoFrame* frame) {
base::TimeDelta timestamp = base::TimeDelta::FromMilliseconds(
frame->GetTimeStamp() / talk_base::kNumNanosecsPerMillisec);
gfx::Size size(frame->GetWidth(), frame->GetHeight());
scoped_refptr<media::VideoFrame> video_frame =
media::VideoFrame::CreateFrame(media::VideoFrame::YV12,
size,
gfx::Rect(size),
size,
timestamp);
// Aspect ratio unsupported; DCHECK when there are non-square pixels.
DCHECK_EQ(frame->GetPixelWidth(), 1u);
DCHECK_EQ(frame->GetPixelHeight(), 1u);
int y_rows = frame->GetHeight();
int uv_rows = frame->GetHeight() / 2; // YV12 format.
CopyYPlane(frame->GetYPlane(), frame->GetYPitch(), y_rows, video_frame);
CopyUPlane(frame->GetUPlane(), frame->GetUPitch(), uv_rows, video_frame);
CopyVPlane(frame->GetVPlane(), frame->GetVPitch(), uv_rows, video_frame);
message_loop_proxy_->PostTask(
FROM_HERE, base::Bind(&RTCVideoRenderer::DoRenderFrameOnMainThread,
this, video_frame));
}
void RTCVideoRenderer::DoRenderFrameOnMainThread(
scoped_refptr<media::VideoFrame> video_frame) {
DCHECK(message_loop_proxy_->BelongsToCurrentThread());
if (state_ != kStarted) {
return;
}
repaint_cb_.Run(video_frame);
}
} // namespace content
|