1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
|
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_VIDEO_VIDEO_DECODE_ENGINE_H_
#define MEDIA_VIDEO_VIDEO_DECODE_ENGINE_H_
#include "base/callback.h"
#include "base/message_loop.h"
#include "media/base/video_frame.h"
namespace media {
class Buffer;
enum VideoCodec {
kCodecH264,
kCodecVC1,
kCodecMPEG2,
kCodecMPEG4,
kCodecTheora,
kCodecVP8,
};
static const uint32 kProfileDoNotCare = static_cast<uint32>(-1);
static const uint32 kLevelDoNotCare = static_cast<uint32>(-1);
struct VideoCodecConfig {
VideoCodecConfig() : codec_(kCodecH264),
profile_(kProfileDoNotCare),
level_(kLevelDoNotCare),
width_(0),
height_(0),
opaque_context_(NULL) {}
VideoCodec codec_;
// TODO(jiesun): video profile and level are specific to individual codec.
// Define enum to.
uint32 profile_;
uint32 level_;
// Container's concept of width and height of this video.
int32 width_;
int32 height_; // TODO(jiesun): Do we allow height to be negative to
// indicate output is upside-down?
// FFMPEG's will use this to pass AVStream. Otherwise, we should remove this.
void* opaque_context_;
};
struct VideoStreamInfo {
VideoFrame::Format surface_format_;
VideoFrame::SurfaceType surface_type_;
uint32 surface_width_; // Can be different with container's value.
uint32 surface_height_; // Can be different with container's value.
};
struct VideoCodecInfo {
// Other parameter is only meaningful when this is true.
bool success_;
// Whether decoder provides output buffer pool.
bool provides_buffers_;
// Initial Stream Info. Only part of them could be valid.
// If they are not valid, Engine should update with OnFormatChange.
VideoStreamInfo stream_info_;
};
class VideoDecodeEngine {
public:
struct EventHandler {
public:
virtual ~EventHandler() {}
virtual void OnInitializeComplete(const VideoCodecInfo& info) = 0;
virtual void OnUninitializeComplete() = 0;
virtual void OnFlushComplete() = 0;
virtual void OnSeekComplete() = 0;
virtual void OnError() = 0;
virtual void OnFormatChange(VideoStreamInfo stream_info) = 0;
virtual void OnEmptyBufferCallback(scoped_refptr<Buffer> buffer) = 0;
virtual void OnFillBufferCallback(scoped_refptr<VideoFrame> frame) = 0;
};
virtual ~VideoDecodeEngine() {}
// Initialized the engine with specified configuration. |message_loop| could
// be NULL if every operation is synchronous. Engine should call the
// EventHandler::OnInitializeDone() no matter finished successfully or not.
// TODO(jiesun): remove message_loop and create thread inside openmax engine?
// or create thread in GpuVideoDecoder and pass message loop here?
virtual void Initialize(MessageLoop* message_loop,
EventHandler* event_handler,
const VideoCodecConfig& config) = 0;
// Uninitialize the engine. Engine should destroy all resources and call
// EventHandler::OnUninitializeComplete().
virtual void Uninitialize() = 0;
// Flush the engine. Engine should return all the buffers to owner ( which
// could be itself. ) then call EventHandler::OnFlushDone().
virtual void Flush() = 0;
// This method is used as a signal for the decode engine to prefoll and
// issue read requests after Flush() us made.
virtual void Seek() = 0;
// Buffer exchange method for input and output stream.
// These functions and callbacks could be used in two scenarios for both
// input and output streams:
// 1. Engine provide buffers.
// 2. Outside party provide buffers.
// The currently planned engine implementation:
// 1. provides the input buffer request inside engine through
// |EmptyThisBufferCallback|. The engine implementation has better knowledge
// of the decoder reordering delay and jittery removal requirements. Input
// buffers are returned into engine through |EmptyThisBuffer|.
// 2. Output buffers are provided from outside the engine, and feed into
// engine through |FillThisBuffer|. Output buffers are returned to outside
// by |FillThisBufferCallback|.
virtual void EmptyThisBuffer(scoped_refptr<Buffer> buffer) = 0;
virtual void FillThisBuffer(scoped_refptr<VideoFrame> frame) = 0;
};
} // namespace media
#endif // MEDIA_VIDEO_VIDEO_DECODE_ENGINE_H_
|