1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
|
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/base/video_frame.h"
#include <algorithm>
#include "base/bind.h"
#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/memory/aligned_memory.h"
#include "base/string_piece.h"
#include "media/base/limits.h"
#include "media/base/video_util.h"
namespace media {
// static
scoped_refptr<VideoFrame> VideoFrame::CreateFrame(
VideoFrame::Format format,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
base::TimeDelta timestamp) {
DCHECK(IsValidConfig(format, coded_size, visible_rect, natural_size));
scoped_refptr<VideoFrame> frame(new VideoFrame(
format, coded_size, visible_rect, natural_size, timestamp));
switch (format) {
case VideoFrame::RGB32:
frame->AllocateRGB(4u);
break;
case VideoFrame::YV12:
case VideoFrame::YV16:
frame->AllocateYUV();
break;
default:
LOG(FATAL) << "Unsupported frame format: " << format;
}
return frame;
}
// static
bool VideoFrame::IsValidConfig(VideoFrame::Format format,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size) {
return (format != VideoFrame::INVALID &&
!coded_size.IsEmpty() &&
coded_size.GetArea() <= limits::kMaxCanvas &&
coded_size.width() <= limits::kMaxDimension &&
coded_size.height() <= limits::kMaxDimension &&
!visible_rect.IsEmpty() &&
visible_rect.x() >= 0 && visible_rect.y() >= 0 &&
visible_rect.right() <= coded_size.width() &&
visible_rect.bottom() <= coded_size.height() &&
!natural_size.IsEmpty() &&
natural_size.GetArea() <= limits::kMaxCanvas &&
natural_size.width() <= limits::kMaxDimension &&
natural_size.height() <= limits::kMaxDimension);
}
// static
scoped_refptr<VideoFrame> VideoFrame::WrapNativeTexture(
uint32 texture_id,
uint32 texture_target,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
base::TimeDelta timestamp,
const ReadPixelsCB& read_pixels_cb,
const base::Closure& no_longer_needed_cb) {
scoped_refptr<VideoFrame> frame(new VideoFrame(
NATIVE_TEXTURE, coded_size, visible_rect, natural_size, timestamp));
frame->texture_id_ = texture_id;
frame->texture_target_ = texture_target;
frame->read_pixels_cb_ = read_pixels_cb;
frame->no_longer_needed_cb_ = no_longer_needed_cb;
return frame;
}
void VideoFrame::ReadPixelsFromNativeTexture(void* pixels) {
DCHECK_EQ(format_, NATIVE_TEXTURE);
if (!read_pixels_cb_.is_null())
read_pixels_cb_.Run(pixels);
}
// static
scoped_refptr<VideoFrame> VideoFrame::WrapExternalYuvData(
Format format,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
int32 y_stride, int32 u_stride, int32 v_stride,
uint8* y_data, uint8* u_data, uint8* v_data,
base::TimeDelta timestamp,
const base::Closure& no_longer_needed_cb) {
DCHECK(format == YV12 || format == YV16 || format == I420) << format;
scoped_refptr<VideoFrame> frame(new VideoFrame(
format, coded_size, visible_rect, natural_size, timestamp));
frame->strides_[kYPlane] = y_stride;
frame->strides_[kUPlane] = u_stride;
frame->strides_[kVPlane] = v_stride;
frame->data_[kYPlane] = y_data;
frame->data_[kUPlane] = u_data;
frame->data_[kVPlane] = v_data;
frame->no_longer_needed_cb_ = no_longer_needed_cb;
return frame;
}
// static
scoped_refptr<VideoFrame> VideoFrame::CreateEmptyFrame() {
return new VideoFrame(
VideoFrame::EMPTY, gfx::Size(), gfx::Rect(), gfx::Size(),
base::TimeDelta());
}
// static
scoped_refptr<VideoFrame> VideoFrame::CreateColorFrame(
const gfx::Size& size,
uint8 y, uint8 u, uint8 v,
base::TimeDelta timestamp) {
DCHECK(IsValidConfig(VideoFrame::YV12, size, gfx::Rect(size), size));
scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame(
VideoFrame::YV12, size, gfx::Rect(size), size, timestamp);
FillYUV(frame, y, u, v);
return frame;
}
// static
scoped_refptr<VideoFrame> VideoFrame::CreateBlackFrame(const gfx::Size& size) {
const uint8 kBlackY = 0x00;
const uint8 kBlackUV = 0x80;
const base::TimeDelta kZero;
return CreateColorFrame(size, kBlackY, kBlackUV, kBlackUV, kZero);
}
static inline size_t RoundUp(size_t value, size_t alignment) {
// Check that |alignment| is a power of 2.
DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1)));
return ((value + (alignment - 1)) & ~(alignment-1));
}
// Release data allocated by AllocateRGB() or AllocateYUV().
static void ReleaseData(uint8* data) {
DCHECK(data);
base::AlignedFree(data);
}
void VideoFrame::AllocateRGB(size_t bytes_per_pixel) {
// Round up to align at least at a 16-byte boundary for each row.
// This is sufficient for MMX and SSE2 reads (movq/movdqa).
size_t bytes_per_row = RoundUp(coded_size_.width(),
kFrameSizeAlignment) * bytes_per_pixel;
size_t aligned_height = RoundUp(coded_size_.height(), kFrameSizeAlignment);
strides_[VideoFrame::kRGBPlane] = bytes_per_row;
data_[VideoFrame::kRGBPlane] = reinterpret_cast<uint8*>(
base::AlignedAlloc(bytes_per_row * aligned_height + kFrameSizePadding,
kFrameAddressAlignment));
no_longer_needed_cb_ = base::Bind(&ReleaseData, data_[VideoFrame::kRGBPlane]);
DCHECK(!(reinterpret_cast<intptr_t>(data_[VideoFrame::kRGBPlane]) & 7));
COMPILE_ASSERT(0 == VideoFrame::kRGBPlane, RGB_data_must_be_index_0);
}
void VideoFrame::AllocateYUV() {
DCHECK(format_ == VideoFrame::YV12 || format_ == VideoFrame::YV16);
// Align Y rows at least at 16 byte boundaries. The stride for both
// YV12 and YV16 is 1/2 of the stride of Y. For YV12, every row of bytes for
// U and V applies to two rows of Y (one byte of UV for 4 bytes of Y), so in
// the case of YV12 the strides are identical for the same width surface, but
// the number of bytes allocated for YV12 is 1/2 the amount for U & V as
// YV16. We also round the height of the surface allocated to be an even
// number to avoid any potential of faulting by code that attempts to access
// the Y values of the final row, but assumes that the last row of U & V
// applies to a full two rows of Y.
size_t y_stride = RoundUp(row_bytes(VideoFrame::kYPlane),
kFrameSizeAlignment);
size_t uv_stride = RoundUp(row_bytes(VideoFrame::kUPlane),
kFrameSizeAlignment);
// The *2 here is because some formats (e.g. h264) allow interlaced coding,
// and then the size needs to be a multiple of two macroblocks (vertically).
// See libavcodec/utils.c:avcodec_align_dimensions2().
size_t y_height = RoundUp(coded_size_.height(), kFrameSizeAlignment * 2);
size_t uv_height = format_ == VideoFrame::YV12 ? y_height / 2 : y_height;
size_t y_bytes = y_height * y_stride;
size_t uv_bytes = uv_height * uv_stride;
// The extra line of UV being allocated is because h264 chroma MC
// overreads by one line in some cases, see libavcodec/utils.c:
// avcodec_align_dimensions2() and libavcodec/x86/h264_chromamc.asm:
// put_h264_chroma_mc4_ssse3().
uint8* data = reinterpret_cast<uint8*>(
base::AlignedAlloc(
y_bytes + (uv_bytes * 2 + uv_stride) + kFrameSizePadding,
kFrameAddressAlignment));
no_longer_needed_cb_ = base::Bind(&ReleaseData, data);
COMPILE_ASSERT(0 == VideoFrame::kYPlane, y_plane_data_must_be_index_0);
data_[VideoFrame::kYPlane] = data;
data_[VideoFrame::kUPlane] = data + y_bytes;
data_[VideoFrame::kVPlane] = data + y_bytes + uv_bytes;
strides_[VideoFrame::kYPlane] = y_stride;
strides_[VideoFrame::kUPlane] = uv_stride;
strides_[VideoFrame::kVPlane] = uv_stride;
}
VideoFrame::VideoFrame(VideoFrame::Format format,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
base::TimeDelta timestamp)
: format_(format),
coded_size_(coded_size),
visible_rect_(visible_rect),
natural_size_(natural_size),
texture_id_(0),
texture_target_(0),
timestamp_(timestamp) {
memset(&strides_, 0, sizeof(strides_));
memset(&data_, 0, sizeof(data_));
}
VideoFrame::~VideoFrame() {
if (!no_longer_needed_cb_.is_null())
base::ResetAndReturn(&no_longer_needed_cb_).Run();
}
bool VideoFrame::IsValidPlane(size_t plane) const {
switch (format_) {
case RGB32:
return plane == kRGBPlane;
case YV12:
case YV16:
return plane == kYPlane || plane == kUPlane || plane == kVPlane;
case NATIVE_TEXTURE:
NOTREACHED() << "NATIVE_TEXTUREs don't use plane-related methods!";
return false;
default:
break;
}
// Intentionally leave out non-production formats.
NOTREACHED() << "Unsupported video frame format: " << format_;
return false;
}
int VideoFrame::stride(size_t plane) const {
DCHECK(IsValidPlane(plane));
return strides_[plane];
}
int VideoFrame::row_bytes(size_t plane) const {
DCHECK(IsValidPlane(plane));
int width = coded_size_.width();
switch (format_) {
// 32bpp.
case RGB32:
return width * 4;
// Planar, 8bpp.
case YV12:
case YV16:
if (plane == kYPlane)
return width;
return RoundUp(width, 2) / 2;
default:
break;
}
// Intentionally leave out non-production formats.
NOTREACHED() << "Unsupported video frame format: " << format_;
return 0;
}
int VideoFrame::rows(size_t plane) const {
DCHECK(IsValidPlane(plane));
int height = coded_size_.height();
switch (format_) {
case RGB32:
case YV16:
return height;
case YV12:
if (plane == kYPlane)
return height;
return RoundUp(height, 2) / 2;
default:
break;
}
// Intentionally leave out non-production formats.
NOTREACHED() << "Unsupported video frame format: " << format_;
return 0;
}
uint8* VideoFrame::data(size_t plane) const {
DCHECK(IsValidPlane(plane));
return data_[plane];
}
uint32 VideoFrame::texture_id() const {
DCHECK_EQ(format_, NATIVE_TEXTURE);
return texture_id_;
}
uint32 VideoFrame::texture_target() const {
DCHECK_EQ(format_, NATIVE_TEXTURE);
return texture_target_;
}
bool VideoFrame::IsEndOfStream() const {
return format_ == VideoFrame::EMPTY;
}
void VideoFrame::HashFrameForTesting(base::MD5Context* context) {
for (int plane = 0; plane < kMaxPlanes; ++plane) {
if (!IsValidPlane(plane))
break;
for (int row = 0; row < rows(plane); ++row) {
base::MD5Update(context, base::StringPiece(
reinterpret_cast<char*>(data(plane) + stride(plane) * row),
row_bytes(plane)));
}
}
}
} // namespace media
|