summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorfischman@chromium.org <fischman@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-12-04 09:27:57 +0000
committerfischman@chromium.org <fischman@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-12-04 09:27:57 +0000
commit6998f01f2c0dfb3a304eafeabddf7cadd1d08db9 (patch)
tree70c2a550552db75bbdd11944be6a17501367844d
parentc3998d38cf9c8f3008dc6c6b79a696bda5399115 (diff)
downloadchromium_src-6998f01f2c0dfb3a304eafeabddf7cadd1d08db9.zip
chromium_src-6998f01f2c0dfb3a304eafeabddf7cadd1d08db9.tar.gz
chromium_src-6998f01f2c0dfb3a304eafeabddf7cadd1d08db9.tar.bz2
AndroidVideoEncodeAccelerator is born!
AVEA is the encode-side analogue of AndroidVideoDecodeAccelerator, or the Android analogue of ExynosVideoEncodeAccelerator, depending on your POV. Also included in this CL: - MediaCodecBridge learns how to be an encoder, too. - MediaCodecBridge::Start is sunk into Create since they were always called together. - android.os.Log() is given an exception parameter instead of concatenating its .toString() (and losing its stacktrace!) - MediaCodecBridge exposes its buffers to reduce unnecessary memcpy'ing Performance impact: isolating encode performance by making Android decode only 240p and no audio send/receive with the following URLs: z620/gprecise: https://apprtc.appspot.com/?video=maxHeight=240&audio=false&r=<ROOM> Nexus5: https://apprtc.appspot.com/?video=minHeight=720,maxHeight=720,minWidth=1280,maxWidth=1280&audio=false&r=<ROOM> All 4 cores are max'd are running at top speed (ondemand governor ramps them up on its own with this workload). SW encode: CPU utilization 80% and desktop receives 0.1-0.5FPS (jankily). HW encode: CPU utilization 60-70% and desktop receives 30FPS reliably. Comparing an easier workload of encoding 360p: z620/gprecise: https://apprtc.appspot.com/?video=maxHeight=240&audio=false&r=<ROOM> Nexus5: https://apprtc.appspot.com/?video=minHeight=360,maxHeight=360,minWidth=640,maxWidth=640&audio=false&r=<ROOM> Set all 4 cores to "performance" governor for stable comparison. SW encode: CPU utilization 63% and desktop receives 30FPS reliably. HW encode: CPU utilization 53% and desktop receives 30FPS reliably. BUG=313115 Review URL: https://codereview.chromium.org/74563002 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@238658 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--chrome/browser/about_flags.cc2
-rw-r--r--content/browser/gpu/compositor_util.cc11
-rw-r--r--content/browser/gpu/gpu_data_manager_impl_private.cc11
-rw-r--r--content/browser/gpu/gpu_process_host.cc3
-rw-r--r--content/browser/resources/gpu/info_view.js1
-rw-r--r--content/common/gpu/media/DEPS1
-rw-r--r--content/common/gpu/media/android_video_decode_accelerator.cc20
-rw-r--r--content/common/gpu/media/android_video_encode_accelerator.cc400
-rw-r--r--content/common/gpu/media/android_video_encode_accelerator.h115
-rw-r--r--content/common/gpu/media/gpu_video_encode_accelerator.cc7
-rw-r--r--content/content_common.gypi10
-rw-r--r--content/renderer/media/media_stream_dependency_factory.cc11
-rw-r--r--gpu/config/gpu_blacklist.cc2
-rw-r--r--gpu/config/gpu_blacklist_unittest.cc4
-rw-r--r--gpu/config/gpu_feature_type.h1
-rw-r--r--gpu/config/software_rendering_list_json.cc10
-rw-r--r--media/base/android/java/src/org/chromium/media/MediaCodecBridge.java136
-rw-r--r--media/base/android/media_codec_bridge.cc369
-rw-r--r--media/base/android/media_codec_bridge.h112
-rw-r--r--media/base/android/media_codec_bridge_unittest.cc69
-rw-r--r--media/base/android/media_decoder_job.cc11
-rw-r--r--media/base/android/video_decoder_job.cc6
22 files changed, 1105 insertions, 207 deletions
diff --git a/chrome/browser/about_flags.cc b/chrome/browser/about_flags.cc
index 0f7a721..b5e024b 100644
--- a/chrome/browser/about_flags.cc
+++ b/chrome/browser/about_flags.cc
@@ -522,7 +522,7 @@ const Experiment kExperiments[] = {
"disable-webrtc-hw-encoding",
IDS_FLAGS_DISABLE_WEBRTC_HW_ENCODING_NAME,
IDS_FLAGS_DISABLE_WEBRTC_HW_ENCODING_DESCRIPTION,
- kOsCrOS,
+ kOsAndroid | kOsCrOS,
SINGLE_VALUE_TYPE(switches::kDisableWebRtcHWEncoding)
},
#endif
diff --git a/content/browser/gpu/compositor_util.cc b/content/browser/gpu/compositor_util.cc
index 81436e2..73af130 100644
--- a/content/browser/gpu/compositor_util.cc
+++ b/content/browser/gpu/compositor_util.cc
@@ -133,6 +133,17 @@ const GpuFeatureInfo GetGpuFeatureInfo(size_t index) {
" or command line.",
true
},
+#if defined(ENABLE_WEBRTC)
+ {
+ "video_encode",
+ manager->IsFeatureBlacklisted(
+ gpu::GPU_FEATURE_TYPE_ACCELERATED_VIDEO_ENCODE),
+ command_line.HasSwitch(switches::kDisableWebRtcHWEncoding),
+ "Accelerated video encode has been disabled, either via about:flags"
+ " or command line.",
+ true
+ },
+#endif
{
"video",
manager->IsFeatureBlacklisted(
diff --git a/content/browser/gpu/gpu_data_manager_impl_private.cc b/content/browser/gpu/gpu_data_manager_impl_private.cc
index b5b44ff..3409fb0 100644
--- a/content/browser/gpu/gpu_data_manager_impl_private.cc
+++ b/content/browser/gpu/gpu_data_manager_impl_private.cc
@@ -646,6 +646,11 @@ void GpuDataManagerImplPrivate::AppendRendererCommandLine(
if (IsFeatureBlacklisted(gpu::GPU_FEATURE_TYPE_ACCELERATED_VIDEO_DECODE) &&
!command_line->HasSwitch(switches::kDisableAcceleratedVideoDecode))
command_line->AppendSwitch(switches::kDisableAcceleratedVideoDecode);
+#if defined(ENABLE_WEBRTC)
+ if (IsFeatureBlacklisted(gpu::GPU_FEATURE_TYPE_ACCELERATED_VIDEO_ENCODE) &&
+ !command_line->HasSwitch(switches::kDisableWebRtcHWEncoding))
+ command_line->AppendSwitch(switches::kDisableWebRtcHWEncoding);
+#endif
if (use_software_compositor_ &&
!command_line->HasSwitch(switches::kEnableSoftwareCompositing))
@@ -713,6 +718,12 @@ void GpuDataManagerImplPrivate::AppendGpuCommandLine(
!command_line->HasSwitch(switches::kDisableAcceleratedVideoDecode)) {
command_line->AppendSwitch(switches::kDisableAcceleratedVideoDecode);
}
+#if defined(ENABLE_WEBRTC)
+ if (IsFeatureBlacklisted(gpu::GPU_FEATURE_TYPE_ACCELERATED_VIDEO_ENCODE) &&
+ !command_line->HasSwitch(switches::kDisableWebRtcHWEncoding)) {
+ command_line->AppendSwitch(switches::kDisableWebRtcHWEncoding);
+ }
+#endif
#if defined(OS_WIN)
// DisplayLink 7.1 and earlier can cause the GPU process to crash on startup.
diff --git a/content/browser/gpu/gpu_process_host.cc b/content/browser/gpu/gpu_process_host.cc
index 53e9841..3d480a0 100644
--- a/content/browser/gpu/gpu_process_host.cc
+++ b/content/browser/gpu/gpu_process_host.cc
@@ -1112,6 +1112,9 @@ bool GpuProcessHost::LaunchGpuProcess(const std::string& channel_id) {
switches::kDisableImageTransportSurface,
switches::kDisableLogging,
switches::kDisableSeccompFilterSandbox,
+#if defined(ENABLE_WEBRTC)
+ switches::kDisableWebRtcHWEncoding,
+#endif
switches::kEnableLogging,
switches::kEnableShareGroupAsyncTextureUpload,
switches::kGpuStartupDialog,
diff --git a/content/browser/resources/gpu/info_view.js b/content/browser/resources/gpu/info_view.js
index 13afa4a..e736d8e 100644
--- a/content/browser/resources/gpu/info_view.js
+++ b/content/browser/resources/gpu/info_view.js
@@ -92,6 +92,7 @@ cr.define('gpu', function() {
'flash_stage3d_baseline': 'Flash Stage3D Baseline profile',
'texture_sharing': 'Texture Sharing',
'video_decode': 'Video Decode',
+ 'video_encode': 'Video Encode',
'video': 'Video',
// GPU Switching
'gpu_switching': 'GPU Switching',
diff --git a/content/common/gpu/media/DEPS b/content/common/gpu/media/DEPS
index 6c858fb..987a2b1 100644
--- a/content/common/gpu/media/DEPS
+++ b/content/common/gpu/media/DEPS
@@ -1,4 +1,5 @@
include_rules = [
"+media",
"+third_party/libva",
+ "+third_party/libyuv",
]
diff --git a/content/common/gpu/media/android_video_decode_accelerator.cc b/content/common/gpu/media/android_video_decode_accelerator.cc
index ccf2903..c0ca517 100644
--- a/content/common/gpu/media/android_video_decode_accelerator.cc
+++ b/content/common/gpu/media/android_video_decode_accelerator.cc
@@ -84,8 +84,10 @@ bool AndroidVideoDecodeAccelerator::Initialize(
}
// Only consider using MediaCodec if it's likely backed by hardware.
- if (media::VideoCodecBridge::IsKnownUnaccelerated(codec_))
+ if (media::VideoCodecBridge::IsKnownUnaccelerated(
+ codec_, media::MEDIA_CODEC_DECODER)) {
return false;
+ }
if (!make_context_current_.Run()) {
LOG(ERROR) << "Failed to make this decoder's GL context current.";
@@ -210,7 +212,7 @@ void AndroidVideoDecodeAccelerator::DequeueOutput() {
size_t size = 0;
media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer(
- NoWaitTimeOut(), &buf_index, &offset, &size, &timestamp, &eos);
+ NoWaitTimeOut(), &buf_index, &offset, &size, &timestamp, &eos, NULL);
switch (status) {
case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER:
case media::MEDIA_CODEC_ERROR:
@@ -399,23 +401,21 @@ void AndroidVideoDecodeAccelerator::Flush() {
bool AndroidVideoDecodeAccelerator::ConfigureMediaCodec() {
DCHECK(surface_texture_.get());
- media_codec_.reset(media::VideoCodecBridge::Create(codec_, false));
-
- if (!media_codec_)
- return false;
gfx::ScopedJavaSurface surface(surface_texture_.get());
+
// Pass a dummy 320x240 canvas size and let the codec signal the real size
// when it's known from the bitstream.
- if (!media_codec_->Start(
- codec_, gfx::Size(320, 240), surface.j_surface().obj(), NULL)) {
+ media_codec_.reset(media::VideoCodecBridge::CreateDecoder(
+ codec_, false, gfx::Size(320, 240), surface.j_surface().obj(), NULL));
+ if (!media_codec_)
return false;
- }
+
io_timer_.Start(FROM_HERE,
DecodePollDelay(),
this,
&AndroidVideoDecodeAccelerator::DoIOTask);
- return media_codec_->GetOutputBuffers();
+ return true;
}
void AndroidVideoDecodeAccelerator::Reset() {
diff --git a/content/common/gpu/media/android_video_encode_accelerator.cc b/content/common/gpu/media/android_video_encode_accelerator.cc
new file mode 100644
index 0000000..278da31
--- /dev/null
+++ b/content/common/gpu/media/android_video_encode_accelerator.cc
@@ -0,0 +1,400 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/common/gpu/media/android_video_encode_accelerator.h"
+
+#include "base/bind.h"
+#include "base/command_line.h"
+#include "base/logging.h"
+#include "base/message_loop/message_loop.h"
+#include "base/metrics/histogram.h"
+#include "content/common/gpu/gpu_channel.h"
+#include "content/public/common/content_switches.h"
+#include "gpu/command_buffer/service/gles2_cmd_decoder.h"
+#include "media/base/android/media_codec_bridge.h"
+#include "media/base/bitstream_buffer.h"
+#include "media/base/limits.h"
+#include "media/video/picture.h"
+#include "third_party/libyuv/include/libyuv/convert_from.h"
+#include "ui/gl/android/scoped_java_surface.h"
+#include "ui/gl/gl_bindings.h"
+
+using media::MediaCodecBridge;
+using media::VideoCodecBridge;
+using media::VideoFrame;
+
+namespace content {
+
+enum {
+ // Subset of MediaCodecInfo.CodecCapabilities.
+ COLOR_FORMAT_YUV420_SEMIPLANAR = 21,
+};
+
+// Helper macros for dealing with failure. If |result| evaluates false, emit
+// |log| to DLOG(ERROR), register |error| with the client, and return.
+#define RETURN_ON_FAILURE(result, log, error) \
+ do { \
+ if (!(result)) { \
+ DLOG(ERROR) << log; \
+ if (client_ptr_factory_.GetWeakPtr()) { \
+ client_ptr_factory_.GetWeakPtr()->NotifyError(error); \
+ client_ptr_factory_.InvalidateWeakPtrs(); \
+ } \
+ return; \
+ } \
+ } while (0)
+
+static inline const base::TimeDelta EncodePollDelay() {
+ // Arbitrary choice that trades off outgoing latency against CPU utilization.
+ // Mirrors android_video_decode_accelerator.cc::DecodePollDelay().
+ return base::TimeDelta::FromMilliseconds(10);
+}
+
+static inline const base::TimeDelta NoWaitTimeOut() {
+ return base::TimeDelta::FromMicroseconds(0);
+}
+
+AndroidVideoEncodeAccelerator::AndroidVideoEncodeAccelerator(
+ media::VideoEncodeAccelerator::Client* client)
+ : client_ptr_factory_(client),
+ num_buffers_at_codec_(0),
+ num_output_buffers_(-1),
+ output_buffers_capacity_(0),
+ last_set_bitrate_(0) {}
+
+AndroidVideoEncodeAccelerator::~AndroidVideoEncodeAccelerator() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+}
+
+// static
+std::vector<media::VideoEncodeAccelerator::SupportedProfile>
+AndroidVideoEncodeAccelerator::GetSupportedProfiles() {
+ std::vector<MediaCodecBridge::CodecsInfo> codecs_info =
+ MediaCodecBridge::GetCodecsInfo();
+
+ std::vector<SupportedProfile> profiles;
+
+ const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
+ if (cmd_line->HasSwitch(switches::kDisableWebRtcHWEncoding))
+ return profiles;
+
+ for (size_t i = 0; i < codecs_info.size(); ++i) {
+ const MediaCodecBridge::CodecsInfo& info = codecs_info[i];
+ if (info.direction != media::MEDIA_CODEC_ENCODER || info.codecs != "vp8" ||
+ VideoCodecBridge::IsKnownUnaccelerated(media::kCodecVP8,
+ media::MEDIA_CODEC_ENCODER)) {
+ // We're only looking for a HW VP8 encoder.
+ continue;
+ }
+ SupportedProfile profile;
+ profile.profile = media::VP8PROFILE_MAIN;
+ // Wouldn't it be nice if MediaCodec exposed the maximum capabilities of the
+ // encoder? Sure would be. Too bad it doesn't. So we hard-code some
+ // reasonable defaults.
+ profile.max_resolution.SetSize(1920, 1088);
+ profile.max_framerate.numerator = 30;
+ profile.max_framerate.denominator = 1;
+ profiles.push_back(profile);
+ }
+ return profiles;
+}
+
+void AndroidVideoEncodeAccelerator::Initialize(
+ VideoFrame::Format format,
+ const gfx::Size& input_visible_size,
+ media::VideoCodecProfile output_profile,
+ uint32 initial_bitrate) {
+ DVLOG(3) << __PRETTY_FUNCTION__ << " format: " << format
+ << ", input_visible_size: " << input_visible_size.ToString()
+ << ", output_profile: " << output_profile
+ << ", initial_bitrate: " << initial_bitrate;
+ DCHECK(!media_codec_);
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ RETURN_ON_FAILURE(media::MediaCodecBridge::IsAvailable() &&
+ media::MediaCodecBridge::SupportsSetParameters() &&
+ format == VideoFrame::I420 &&
+ output_profile == media::VP8PROFILE_MAIN,
+ "Unexpected combo: " << format << ", " << output_profile,
+ kInvalidArgumentError);
+
+ last_set_bitrate_ = initial_bitrate;
+
+ // Only consider using MediaCodec if it's likely backed by hardware.
+ RETURN_ON_FAILURE(!media::VideoCodecBridge::IsKnownUnaccelerated(
+ media::kCodecVP8, media::MEDIA_CODEC_ENCODER),
+ "No HW support",
+ kPlatformFailureError);
+
+ // TODO(fischman): when there is more HW out there with different color-space
+ // support, this should turn into a negotiation with the codec for supported
+ // formats. For now we use the only format supported by the only available
+ // HW.
+ media_codec_.reset(
+ media::VideoCodecBridge::CreateEncoder(media::kCodecVP8,
+ input_visible_size,
+ initial_bitrate,
+ INITIAL_FRAMERATE,
+ IFRAME_INTERVAL,
+ COLOR_FORMAT_YUV420_SEMIPLANAR));
+
+ RETURN_ON_FAILURE(
+ media_codec_,
+ "Failed to create/start the codec: " << input_visible_size.ToString(),
+ kPlatformFailureError);
+
+ base::MessageLoop::current()->PostTask(
+ FROM_HERE,
+ base::Bind(&VideoEncodeAccelerator::Client::NotifyInitializeDone,
+ client_ptr_factory_.GetWeakPtr()));
+
+ num_output_buffers_ = media_codec_->GetOutputBuffersCount();
+ output_buffers_capacity_ = media_codec_->GetOutputBuffersCapacity();
+ base::MessageLoop::current()->PostTask(
+ FROM_HERE,
+ base::Bind(&VideoEncodeAccelerator::Client::RequireBitstreamBuffers,
+ client_ptr_factory_.GetWeakPtr(),
+ num_output_buffers_,
+ input_visible_size,
+ output_buffers_capacity_));
+}
+
+void AndroidVideoEncodeAccelerator::MaybeStartIOTimer() {
+ if (!io_timer_.IsRunning() &&
+ (num_buffers_at_codec_ > 0 || !pending_frames_.empty())) {
+ io_timer_.Start(FROM_HERE,
+ EncodePollDelay(),
+ this,
+ &AndroidVideoEncodeAccelerator::DoIOTask);
+ }
+}
+
+void AndroidVideoEncodeAccelerator::MaybeStopIOTimer() {
+ if (io_timer_.IsRunning() &&
+ (num_buffers_at_codec_ == 0 && pending_frames_.empty())) {
+ io_timer_.Stop();
+ }
+}
+
+void AndroidVideoEncodeAccelerator::Encode(
+ const scoped_refptr<VideoFrame>& frame,
+ bool force_keyframe) {
+ DVLOG(3) << __PRETTY_FUNCTION__ << ": " << force_keyframe;
+ DCHECK(thread_checker_.CalledOnValidThread());
+ RETURN_ON_FAILURE(frame->format() == VideoFrame::I420,
+ "Unexpected format",
+ kInvalidArgumentError);
+
+ // MediaCodec doesn't have a way to specify stride for non-Packed formats, so
+ // we insist on being called with packed frames and no cropping :(
+ RETURN_ON_FAILURE(frame->row_bytes(VideoFrame::kYPlane) ==
+ frame->stride(VideoFrame::kYPlane) &&
+ frame->row_bytes(VideoFrame::kUPlane) ==
+ frame->stride(VideoFrame::kUPlane) &&
+ frame->row_bytes(VideoFrame::kVPlane) ==
+ frame->stride(VideoFrame::kVPlane) &&
+ gfx::Rect(frame->coded_size()) == frame->visible_rect(),
+ "Non-packed frame, or visible rect != coded size",
+ kInvalidArgumentError);
+
+ pending_frames_.push(MakeTuple(frame, force_keyframe, base::Time::Now()));
+ DoIOTask();
+}
+
+void AndroidVideoEncodeAccelerator::UseOutputBitstreamBuffer(
+ const media::BitstreamBuffer& buffer) {
+ DVLOG(3) << __PRETTY_FUNCTION__ << ": bitstream_buffer_id=" << buffer.id();
+ DCHECK(thread_checker_.CalledOnValidThread());
+ RETURN_ON_FAILURE(buffer.size() >= media_codec_->GetOutputBuffersCapacity(),
+ "Output buffers too small!",
+ kInvalidArgumentError);
+ available_bitstream_buffers_.push_back(buffer);
+ DoIOTask();
+}
+
+void AndroidVideoEncodeAccelerator::RequestEncodingParametersChange(
+ uint32 bitrate,
+ uint32 framerate) {
+ DVLOG(3) << __PRETTY_FUNCTION__ << ": bitrate: " << bitrate
+ << ", framerate: " << framerate;
+ DCHECK(thread_checker_.CalledOnValidThread());
+ if (bitrate != last_set_bitrate_) {
+ last_set_bitrate_ = bitrate;
+ media_codec_->SetVideoBitrate(bitrate);
+ }
+ // Note: Android's MediaCodec doesn't allow mid-stream adjustments to
+ // framerate, so we ignore that here. This is OK because Android only uses
+ // the framerate value from MediaFormat during configure() as a proxy for
+ // bitrate, and we set that explicitly.
+}
+
+void AndroidVideoEncodeAccelerator::Destroy() {
+ DVLOG(3) << __PRETTY_FUNCTION__;
+ DCHECK(thread_checker_.CalledOnValidThread());
+ client_ptr_factory_.InvalidateWeakPtrs();
+ if (media_codec_) {
+ if (io_timer_.IsRunning())
+ io_timer_.Stop();
+ media_codec_->Stop();
+ }
+ delete this;
+}
+
+void AndroidVideoEncodeAccelerator::DoIOTask() {
+ QueueInput();
+ DequeueOutput();
+ MaybeStartIOTimer();
+ MaybeStopIOTimer();
+}
+
+void AndroidVideoEncodeAccelerator::QueueInput() {
+ if (!client_ptr_factory_.GetWeakPtr() || pending_frames_.empty())
+ return;
+
+ int input_buf_index = 0;
+ media::MediaCodecStatus status =
+ media_codec_->DequeueInputBuffer(NoWaitTimeOut(), &input_buf_index);
+ if (status != media::MEDIA_CODEC_OK) {
+ DCHECK(status == media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER ||
+ status == media::MEDIA_CODEC_ERROR);
+ RETURN_ON_FAILURE(status != media::MEDIA_CODEC_ERROR,
+ "MediaCodec error",
+ kPlatformFailureError);
+ return;
+ }
+
+ const PendingFrames::value_type& input = pending_frames_.front();
+ bool is_key_frame = input.b;
+ if (is_key_frame) {
+ // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
+ // indicate this in the QueueInputBuffer() call below and guarantee _this_
+ // frame be encoded as a key frame, but sadly that flag is ignored.
+ // Instead, we request a key frame "soon".
+ media_codec_->RequestKeyFrameSoon();
+ }
+ scoped_refptr<VideoFrame> frame = input.a;
+
+ uint8* buffer = NULL;
+ size_t capacity = 0;
+ media_codec_->GetInputBuffer(input_buf_index, &buffer, &capacity);
+
+ size_t queued_size =
+ VideoFrame::AllocationSize(VideoFrame::I420, frame->coded_size());
+ RETURN_ON_FAILURE(capacity >= queued_size,
+ "Failed to get input buffer: " << input_buf_index,
+ kPlatformFailureError);
+
+ uint8* dst_y = buffer;
+ int dst_stride_y = frame->stride(VideoFrame::kYPlane);
+ uint8* dst_uv = buffer + frame->stride(VideoFrame::kYPlane) *
+ frame->rows(VideoFrame::kYPlane);
+ int dst_stride_uv = frame->stride(VideoFrame::kUPlane) * 2;
+ // Why NV12? Because COLOR_FORMAT_YUV420_SEMIPLANAR. See comment at other
+ // mention of that constant.
+ bool converted = !libyuv::I420ToNV12(frame->data(VideoFrame::kYPlane),
+ frame->stride(VideoFrame::kYPlane),
+ frame->data(VideoFrame::kUPlane),
+ frame->stride(VideoFrame::kUPlane),
+ frame->data(VideoFrame::kVPlane),
+ frame->stride(VideoFrame::kVPlane),
+ dst_y,
+ dst_stride_y,
+ dst_uv,
+ dst_stride_uv,
+ frame->coded_size().width(),
+ frame->coded_size().height());
+ RETURN_ON_FAILURE(converted, "Failed to I420ToNV12!", kPlatformFailureError);
+
+ fake_input_timestamp_ += base::TimeDelta::FromMicroseconds(1);
+ status = media_codec_->QueueInputBuffer(
+ input_buf_index, NULL, queued_size, fake_input_timestamp_);
+ UMA_HISTOGRAM_TIMES("Media.AVEA.InputQueueTime", base::Time::Now() - input.c);
+ RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK,
+ "Failed to QueueInputBuffer: " << status,
+ kPlatformFailureError);
+ ++num_buffers_at_codec_;
+ pending_frames_.pop();
+}
+
+bool AndroidVideoEncodeAccelerator::DoOutputBuffersSuffice() {
+ // If this returns false ever, then the VEA::Client interface will need to
+ // grow a DismissBitstreamBuffer() call, and VEA::Client impls will have to be
+ // prepared to field multiple requests to RequireBitstreamBuffers().
+ int count = media_codec_->GetOutputBuffersCount();
+ size_t capacity = media_codec_->GetOutputBuffersCapacity();
+ bool ret = media_codec_->GetOutputBuffers() && count <= num_output_buffers_ &&
+ capacity <= output_buffers_capacity_;
+ LOG_IF(ERROR, !ret) << "Need more/bigger buffers; before: "
+ << num_output_buffers_ << "x" << output_buffers_capacity_
+ << ", now: " << count << "x" << capacity;
+ UMA_HISTOGRAM_BOOLEAN("Media.AVEA.OutputBuffersSuffice", ret);
+ return ret;
+}
+
+void AndroidVideoEncodeAccelerator::DequeueOutput() {
+ if (!client_ptr_factory_.GetWeakPtr() ||
+ available_bitstream_buffers_.empty() || num_buffers_at_codec_ == 0) {
+ return;
+ }
+
+ int32 buf_index = 0;
+ size_t offset = 0;
+ size_t size = 0;
+ bool key_frame = false;
+ do {
+ media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer(
+ NoWaitTimeOut(), &buf_index, &offset, &size, NULL, NULL, &key_frame);
+ switch (status) {
+ case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER:
+ return;
+
+ case media::MEDIA_CODEC_ERROR:
+ RETURN_ON_FAILURE(false, "Codec error", kPlatformFailureError);
+ // Unreachable because of previous statement, but included for clarity.
+ return;
+
+ case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: // Fall-through.
+ case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED:
+ RETURN_ON_FAILURE(DoOutputBuffersSuffice(),
+ "Bitstream now requires more/larger buffers",
+ kPlatformFailureError);
+ break;
+
+ case media::MEDIA_CODEC_OK:
+ DCHECK_GE(buf_index, 0);
+ break;
+
+ default:
+ NOTREACHED();
+ break;
+ }
+ } while (buf_index < 0);
+
+ media::BitstreamBuffer bitstream_buffer = available_bitstream_buffers_.back();
+ available_bitstream_buffers_.pop_back();
+ scoped_ptr<base::SharedMemory> shm(
+ new base::SharedMemory(bitstream_buffer.handle(), false));
+ RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()),
+ "Failed to map SHM",
+ kPlatformFailureError);
+ RETURN_ON_FAILURE(size <= shm->mapped_size(),
+ "Encoded buffer too large: " << size << ">"
+ << shm->mapped_size(),
+ kPlatformFailureError);
+
+ media_codec_->CopyFromOutputBuffer(buf_index, offset, shm->memory(), size);
+ media_codec_->ReleaseOutputBuffer(buf_index, false);
+ --num_buffers_at_codec_;
+
+ UMA_HISTOGRAM_COUNTS_10000("Media.AVEA.EncodedBufferSizeKB", size / 1024);
+ base::MessageLoop::current()->PostTask(
+ FROM_HERE,
+ base::Bind(&VideoEncodeAccelerator::Client::BitstreamBufferReady,
+ client_ptr_factory_.GetWeakPtr(),
+ bitstream_buffer.id(),
+ size,
+ key_frame));
+}
+
+} // namespace content
diff --git a/content/common/gpu/media/android_video_encode_accelerator.h b/content/common/gpu/media/android_video_encode_accelerator.h
new file mode 100644
index 0000000..e519a27
--- /dev/null
+++ b/content/common/gpu/media/android_video_encode_accelerator.h
@@ -0,0 +1,115 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef CONTENT_COMMON_GPU_MEDIA_ANDROID_VIDEO_ENCODE_ACCELERATOR_H_
+#define CONTENT_COMMON_GPU_MEDIA_ANDROID_VIDEO_ENCODE_ACCELERATOR_H_
+
+#include <list>
+#include <queue>
+#include <vector>
+
+#include "base/memory/weak_ptr.h"
+#include "base/threading/thread_checker.h"
+#include "base/timer/timer.h"
+#include "base/tuple.h"
+#include "content/common/content_export.h"
+#include "media/base/android/media_codec_bridge.h"
+#include "media/video/video_encode_accelerator.h"
+
+namespace media {
+class BitstreamBuffer;
+} // namespace media
+
+namespace content {
+
+// Android-specific implementation of media::VideoEncodeAccelerator, enabling
+// hardware-acceleration of video encoding, based on Android's MediaCodec class
+// (http://developer.android.com/reference/android/media/MediaCodec.html). This
+// class expects to live and be called on a single thread (the GPU process'
+// ChildThread).
+class CONTENT_EXPORT AndroidVideoEncodeAccelerator
+ : public media::VideoEncodeAccelerator {
+ public:
+ explicit AndroidVideoEncodeAccelerator(
+ media::VideoEncodeAccelerator::Client* client);
+ virtual ~AndroidVideoEncodeAccelerator();
+
+ static std::vector<media::VideoEncodeAccelerator::SupportedProfile>
+ GetSupportedProfiles();
+
+ // media::VideoEncodeAccelerator implementation.
+ virtual void Initialize(media::VideoFrame::Format format,
+ const gfx::Size& input_visible_size,
+ media::VideoCodecProfile output_profile,
+ uint32 initial_bitrate) OVERRIDE;
+ virtual void Encode(const scoped_refptr<media::VideoFrame>& frame,
+ bool force_keyframe) OVERRIDE;
+ virtual void UseOutputBitstreamBuffer(const media::BitstreamBuffer& buffer)
+ OVERRIDE;
+ virtual void RequestEncodingParametersChange(uint32 bitrate,
+ uint32 framerate) OVERRIDE;
+ virtual void Destroy() OVERRIDE;
+
+ private:
+ enum {
+ // Arbitrary choice.
+ INITIAL_FRAMERATE = 30,
+ // Until there are non-realtime users, no need for unrequested I-frames.
+ IFRAME_INTERVAL = kint32max,
+ };
+
+ // Impedance-mismatch fixers: MediaCodec is a poll-based API but VEA is a
+ // push-based API; these methods turn the crank to make the two work together.
+ void DoIOTask();
+ void QueueInput();
+ void DequeueOutput();
+
+ // Returns true if we don't need more or bigger output buffers.
+ bool DoOutputBuffersSuffice();
+
+ // Start & stop |io_timer_| if the time seems right.
+ void MaybeStartIOTimer();
+ void MaybeStopIOTimer();
+
+ // Used to DCHECK that we are called on the correct thread.
+ base::ThreadChecker thread_checker_;
+
+ // VideoDecodeAccelerator::Client callbacks go here. Invalidated once any
+ // error triggers.
+ base::WeakPtrFactory<Client> client_ptr_factory_;
+
+ scoped_ptr<media::VideoCodecBridge> media_codec_;
+
+ // Bitstream buffers waiting to be populated & returned to the client.
+ std::vector<media::BitstreamBuffer> available_bitstream_buffers_;
+
+ // Frames waiting to be passed to the codec, queued until an input buffer is
+ // available. Each element is a tuple of <Frame, key_frame, enqueue_time>.
+ typedef std::queue<
+ Tuple3<scoped_refptr<media::VideoFrame>, bool, base::Time> >
+ PendingFrames;
+ PendingFrames pending_frames_;
+
+ // Repeating timer responsible for draining pending IO to the codec.
+ base::RepeatingTimer<AndroidVideoEncodeAccelerator> io_timer_;
+
+ // The difference between number of buffers queued & dequeued at the codec.
+ int32 num_buffers_at_codec_;
+
+ // A monotonically-growing value, used as a fake timestamp just to keep things
+ // appearing to move forward.
+ base::TimeDelta fake_input_timestamp_;
+
+ // Number of requested output buffers and their capacity.
+ int num_output_buffers_; // -1 until RequireBitstreamBuffers.
+ size_t output_buffers_capacity_; // 0 until RequireBitstreamBuffers.
+
+ uint32 last_set_bitrate_; // In bps.
+
+ DISALLOW_COPY_AND_ASSIGN(AndroidVideoEncodeAccelerator);
+};
+
+} // namespace content
+
+#endif // CONTENT_COMMON_GPU_MEDIA_ANDROID_VIDEO_ENCODE_ACCELERATOR_H_
diff --git a/content/common/gpu/media/gpu_video_encode_accelerator.cc b/content/common/gpu/media/gpu_video_encode_accelerator.cc
index 8d115ee..4624ea2 100644
--- a/content/common/gpu/media/gpu_video_encode_accelerator.cc
+++ b/content/common/gpu/media/gpu_video_encode_accelerator.cc
@@ -17,6 +17,8 @@
#if defined(OS_CHROMEOS) && defined(ARCH_CPU_ARMEL) && defined(USE_X11)
#include "content/common/gpu/media/exynos_video_encode_accelerator.h"
+#elif defined(OS_ANDROID) && defined(ENABLE_WEBRTC)
+#include "content/common/gpu/media/android_video_encode_accelerator.h"
#endif
namespace content {
@@ -88,6 +90,8 @@ GpuVideoEncodeAccelerator::GetSupportedProfiles() {
#if defined(OS_CHROMEOS) && defined(ARCH_CPU_ARMEL) && defined(USE_X11)
profiles = ExynosVideoEncodeAccelerator::GetSupportedProfiles();
+#elif defined(OS_ANDROID) && defined(ENABLE_WEBRTC)
+ profiles = AndroidVideoEncodeAccelerator::GetSupportedProfiles();
#endif
// TODO(sheu): return platform-specific profiles.
@@ -95,8 +99,11 @@ GpuVideoEncodeAccelerator::GetSupportedProfiles() {
}
void GpuVideoEncodeAccelerator::CreateEncoder() {
+ DCHECK(!encoder_);
#if defined(OS_CHROMEOS) && defined(ARCH_CPU_ARMEL) && defined(USE_X11)
encoder_.reset(new ExynosVideoEncodeAccelerator(this));
+#elif defined(OS_ANDROID) && defined(ENABLE_WEBRTC)
+ encoder_.reset(new AndroidVideoEncodeAccelerator(this));
#endif
}
diff --git a/content/content_common.gypi b/content/content_common.gypi
index 7d6214c..9434052 100644
--- a/content/content_common.gypi
+++ b/content/content_common.gypi
@@ -511,6 +511,16 @@
'common/gpu/media/android_video_decode_accelerator.h',
],
}],
+ ['OS=="android" and android_webview_build==0', {
+ 'dependencies': [
+ '../media/media.gyp:media',
+ '../third_party/libyuv/libyuv.gyp:libyuv',
+ ],
+ 'sources': [
+ 'common/gpu/media/android_video_encode_accelerator.cc',
+ 'common/gpu/media/android_video_encode_accelerator.h',
+ ],
+ }],
['target_arch=="arm" and chromeos == 1 and use_x11 == 1', {
'dependencies': [
'../media/media.gyp:media',
diff --git a/content/renderer/media/media_stream_dependency_factory.cc b/content/renderer/media/media_stream_dependency_factory.cc
index a37ae5f..11ba165 100644
--- a/content/renderer/media/media_stream_dependency_factory.cc
+++ b/content/renderer/media/media_stream_dependency_factory.cc
@@ -48,6 +48,10 @@
#include "content/renderer/media/rtc_video_decoder_factory_tv.h"
#endif
+#if defined(OS_ANDROID)
+#include "media/base/android/media_codec_bridge.h"
+#endif
+
namespace content {
// Constant constraint keys which enables default audio constraints on
@@ -594,6 +598,13 @@ bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() {
encoder_factory.reset(new RTCVideoEncoderFactory(gpu_factories));
}
+#if defined(OS_ANDROID)
+ if (!media::MediaCodecBridge::IsAvailable() ||
+ !media::MediaCodecBridge::SupportsSetParameters()) {
+ encoder_factory.reset();
+ }
+#endif
+
scoped_refptr<WebRtcAudioDeviceImpl> audio_device(
new WebRtcAudioDeviceImpl());
diff --git a/gpu/config/gpu_blacklist.cc b/gpu/config/gpu_blacklist.cc
index 39946b0..92c5207 100644
--- a/gpu/config/gpu_blacklist.cc
+++ b/gpu/config/gpu_blacklist.cc
@@ -36,6 +36,8 @@ GpuBlacklist* GpuBlacklist::Create() {
GPU_FEATURE_TYPE_TEXTURE_SHARING);
list->AddSupportedFeature("accelerated_video_decode",
GPU_FEATURE_TYPE_ACCELERATED_VIDEO_DECODE);
+ list->AddSupportedFeature("accelerated_video_encode",
+ GPU_FEATURE_TYPE_ACCELERATED_VIDEO_ENCODE);
list->AddSupportedFeature("3d_css",
GPU_FEATURE_TYPE_3D_CSS);
list->AddSupportedFeature("accelerated_video",
diff --git a/gpu/config/gpu_blacklist_unittest.cc b/gpu/config/gpu_blacklist_unittest.cc
index b1ed333..af381e0 100644
--- a/gpu/config/gpu_blacklist_unittest.cc
+++ b/gpu/config/gpu_blacklist_unittest.cc
@@ -123,6 +123,10 @@ GPU_BLACKLIST_FEATURE_TEST(AcceleratedVideoDecode,
"accelerated_video_decode",
GPU_FEATURE_TYPE_ACCELERATED_VIDEO_DECODE)
+GPU_BLACKLIST_FEATURE_TEST(AcceleratedVideoEncode,
+ "accelerated_video_encode",
+ GPU_FEATURE_TYPE_ACCELERATED_VIDEO_ENCODE)
+
GPU_BLACKLIST_FEATURE_TEST(Css3D,
"3d_css",
GPU_FEATURE_TYPE_3D_CSS)
diff --git a/gpu/config/gpu_feature_type.h b/gpu/config/gpu_feature_type.h
index b082d78..e0e4df4 100644
--- a/gpu/config/gpu_feature_type.h
+++ b/gpu/config/gpu_feature_type.h
@@ -19,6 +19,7 @@ enum GpuFeatureType {
GPU_FEATURE_TYPE_FLASH_STAGE3D,
GPU_FEATURE_TYPE_TEXTURE_SHARING,
GPU_FEATURE_TYPE_ACCELERATED_VIDEO_DECODE,
+ GPU_FEATURE_TYPE_ACCELERATED_VIDEO_ENCODE,
GPU_FEATURE_TYPE_3D_CSS,
GPU_FEATURE_TYPE_ACCELERATED_VIDEO,
GPU_FEATURE_TYPE_PANEL_FITTING,
diff --git a/gpu/config/software_rendering_list_json.cc b/gpu/config/software_rendering_list_json.cc
index 6ed261f..d686d84 100644
--- a/gpu/config/software_rendering_list_json.cc
+++ b/gpu/config/software_rendering_list_json.cc
@@ -971,6 +971,16 @@ LONG_STRING_CONST(
"features": [
"force_compositing_mode"
]
+ },
+ {
+ "id": 82,
+ "description": "MediaCodec is still too buggy to use for encoding (b/11536167).",
+ "os": {
+ "type": "android"
+ },
+ "features": [
+ "accelerated_video_encode"
+ ]
}
]
}
diff --git a/media/base/android/java/src/org/chromium/media/MediaCodecBridge.java b/media/base/android/java/src/org/chromium/media/MediaCodecBridge.java
index ad98a50..683d2e7 100644
--- a/media/base/android/java/src/org/chromium/media/MediaCodecBridge.java
+++ b/media/base/android/java/src/org/chromium/media/MediaCodecBridge.java
@@ -13,12 +13,14 @@ import android.media.MediaCodecList;
import android.media.MediaCrypto;
import android.media.MediaFormat;
import android.os.Build;
+import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
@@ -46,6 +48,10 @@ class MediaCodecBridge {
private static final int MEDIA_CODEC_STOPPED = 8;
private static final int MEDIA_CODEC_ERROR = 9;
+ // Codec direction. Keep this in sync with media_codec_bridge.h.
+ private static final int MEDIA_CODEC_DECODER = 0;
+ private static final int MEDIA_CODEC_ENCODER = 1;
+
// After a flush(), dequeueOutputBuffer() can often produce empty presentation timestamps
// for several frames. As a result, the player may find that the time does not increase
// after decoding a frame. To detect this, we check whether the presentation timestamp from
@@ -84,10 +90,13 @@ class MediaCodecBridge {
private static class CodecInfo {
private final String mCodecType; // e.g. "video/x-vnd.on2.vp8".
private final String mCodecName; // e.g. "OMX.google.vp8.decoder".
+ private final int mDirection;
- private CodecInfo(String codecType, String codecName) {
+ private CodecInfo(String codecType, String codecName,
+ int direction) {
mCodecType = codecType;
mCodecName = codecName;
+ mDirection = direction;
}
@CalledByNative("CodecInfo")
@@ -95,6 +104,9 @@ class MediaCodecBridge {
@CalledByNative("CodecInfo")
private String codecName() { return mCodecName; }
+
+ @CalledByNative("CodecInfo")
+ private int direction() { return mDirection; }
}
private static class DequeueOutputResult {
@@ -139,25 +151,29 @@ class MediaCodecBridge {
*/
@CalledByNative
private static CodecInfo[] getCodecsInfo() {
- Map<String, CodecInfo> CodecInfoMap = new HashMap<String, CodecInfo>();
+ // Return the first (highest-priority) codec for each MIME type.
+ Map<String, CodecInfo> encoderInfoMap = new HashMap<String, CodecInfo>();
+ Map<String, CodecInfo> decoderInfoMap = new HashMap<String, CodecInfo>();
int count = MediaCodecList.getCodecCount();
for (int i = 0; i < count; ++i) {
MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
- if (info.isEncoder()) {
- continue;
- }
-
+ int direction =
+ info.isEncoder() ? MEDIA_CODEC_ENCODER : MEDIA_CODEC_DECODER;
String codecString = info.getName();
String[] supportedTypes = info.getSupportedTypes();
for (int j = 0; j < supportedTypes.length; ++j) {
- if (!CodecInfoMap.containsKey(supportedTypes[j])) {
- CodecInfoMap.put(supportedTypes[j], new CodecInfo(
- supportedTypes[j], codecString));
+ Map<String, CodecInfo> map = info.isEncoder() ? encoderInfoMap : decoderInfoMap;
+ if (!map.containsKey(supportedTypes[j])) {
+ map.put(supportedTypes[j], new CodecInfo(
+ supportedTypes[j], codecString, direction));
}
}
}
- return CodecInfoMap.values().toArray(
- new CodecInfo[CodecInfoMap.size()]);
+ ArrayList<CodecInfo> codecInfos = new ArrayList<CodecInfo>(
+ decoderInfoMap.size() + encoderInfoMap.size());
+ codecInfos.addAll(encoderInfoMap.values());
+ codecInfos.addAll(decoderInfoMap.values());
+ return codecInfos.toArray(new CodecInfo[codecInfos.size()]);
}
private static String getSecureDecoderNameForMime(String mime) {
@@ -187,7 +203,7 @@ class MediaCodecBridge {
}
@CalledByNative
- private static MediaCodecBridge create(String mime, boolean isSecure) {
+ private static MediaCodecBridge create(String mime, boolean isSecure, int direction) {
// Creation of ".secure" codecs sometimes crash instead of throwing exceptions
// on pre-JBMR2 devices.
if (isSecure && Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) {
@@ -196,14 +212,18 @@ class MediaCodecBridge {
MediaCodec mediaCodec = null;
try {
// |isSecure| only applies to video decoders.
- if (mime.startsWith("video") && isSecure) {
+ if (mime.startsWith("video") && isSecure && direction == MEDIA_CODEC_DECODER) {
mediaCodec = MediaCodec.createByCodecName(getSecureDecoderNameForMime(mime));
} else {
- mediaCodec = MediaCodec.createDecoderByType(mime);
+ if (direction == MEDIA_CODEC_ENCODER) {
+ mediaCodec = MediaCodec.createEncoderByType(mime);
+ } else {
+ mediaCodec = MediaCodec.createDecoderByType(mime);
+ }
}
} catch (Exception e) {
Log.e(TAG, "Failed to create MediaCodec: " + mime + ", isSecure: "
- + isSecure + ", " + e.toString());
+ + isSecure + ", direction: " + direction, e);
}
if (mediaCodec == null) {
@@ -215,7 +235,9 @@ class MediaCodecBridge {
@CalledByNative
private void release() {
+ mMediaCodec.stop();
mMediaCodec.release();
+ mMediaCodec = null;
if (mAudioTrack != null) {
mAudioTrack.release();
}
@@ -227,7 +249,7 @@ class MediaCodecBridge {
mMediaCodec.start();
mInputBuffers = mMediaCodec.getInputBuffers();
} catch (IllegalStateException e) {
- Log.e(TAG, "Cannot start the media codec " + e.toString());
+ Log.e(TAG, "Cannot start the media codec", e);
return false;
}
return true;
@@ -246,10 +268,11 @@ class MediaCodecBridge {
Log.e(TAG, "dequeueInputBuffer: MediaCodec.INFO_TRY_AGAIN_LATER");
status = MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER;
} else {
+ Log.e(TAG, "Unexpected index_or_status: " + index_or_status);
assert(false);
}
} catch(Exception e) {
- Log.e(TAG, "Failed to dequeue input buffer: " + e.toString());
+ Log.e(TAG, "Failed to dequeue input buffer", e);
}
return new DequeueInputResult(status, index);
}
@@ -263,7 +286,7 @@ class MediaCodecBridge {
}
mMediaCodec.flush();
} catch(IllegalStateException e) {
- Log.e(TAG, "Failed to flush MediaCodec " + e.toString());
+ Log.e(TAG, "Failed to flush MediaCodec", e);
return MEDIA_CODEC_ERROR;
}
return MEDIA_CODEC_OK;
@@ -298,19 +321,59 @@ class MediaCodecBridge {
}
@CalledByNative
+ private int getInputBuffersCount() {
+ return mInputBuffers.length;
+ }
+
+ @CalledByNative
+ private int getOutputBuffersCount() {
+ return mOutputBuffers != null ? mOutputBuffers.length : -1;
+ }
+
+ @CalledByNative
+ private int getOutputBuffersCapacity() {
+ return mOutputBuffers != null ? mOutputBuffers[0].capacity() : -1;
+ }
+
+ @CalledByNative
+ private boolean getOutputBuffers() {
+ try {
+ mOutputBuffers = mMediaCodec.getOutputBuffers();
+ } catch (IllegalStateException e) {
+ Log.e(TAG, "Cannot get output buffers", e);
+ return false;
+ }
+ return true;
+ }
+
+ @CalledByNative
private int queueInputBuffer(
int index, int offset, int size, long presentationTimeUs, int flags) {
resetLastPresentationTimeIfNeeded(presentationTimeUs);
try {
mMediaCodec.queueInputBuffer(index, offset, size, presentationTimeUs, flags);
} catch(Exception e) {
- Log.e(TAG, "Failed to queue input buffer: " + e.toString());
+ Log.e(TAG, "Failed to queue input buffer", e);
return MEDIA_CODEC_ERROR;
}
return MEDIA_CODEC_OK;
}
@CalledByNative
+ private void setVideoBitrate(int bps) {
+ Bundle b = new Bundle();
+ b.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, bps);
+ mMediaCodec.setParameters(b);
+ }
+
+ @CalledByNative
+ private void requestKeyFrameSoon() {
+ Bundle b = new Bundle();
+ b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+ mMediaCodec.setParameters(b);
+ }
+
+ @CalledByNative
private int queueSecureInputBuffer(
int index, int offset, byte[] iv, byte[] keyId, int[] numBytesOfClearData,
int[] numBytesOfEncryptedData, int numSubSamples, long presentationTimeUs) {
@@ -321,7 +384,7 @@ class MediaCodecBridge {
keyId, iv, MediaCodec.CRYPTO_MODE_AES_CTR);
mMediaCodec.queueSecureInputBuffer(index, offset, cryptoInfo, presentationTimeUs, 0);
} catch (MediaCodec.CryptoException e) {
- Log.e(TAG, "Failed to queue secure input buffer: " + e.toString());
+ Log.e(TAG, "Failed to queue secure input buffer", e);
if (e.getErrorCode() == MediaCodec.CryptoException.ERROR_NO_KEY) {
Log.e(TAG, "MediaCodec.CryptoException.ERROR_NO_KEY");
return MEDIA_CODEC_NO_KEY;
@@ -329,7 +392,7 @@ class MediaCodecBridge {
Log.e(TAG, "MediaCodec.CryptoException with error code " + e.getErrorCode());
return MEDIA_CODEC_ERROR;
} catch(IllegalStateException e) {
- Log.e(TAG, "Failed to queue secure input buffer: " + e.toString());
+ Log.e(TAG, "Failed to queue secure input buffer", e);
return MEDIA_CODEC_ERROR;
}
return MEDIA_CODEC_OK;
@@ -341,17 +404,6 @@ class MediaCodecBridge {
}
@CalledByNative
- private boolean getOutputBuffers() {
- try {
- mOutputBuffers = mMediaCodec.getOutputBuffers();
- } catch (IllegalStateException e) {
- Log.e(TAG, "Cannot get output buffers " + e.toString());
- return false;
- }
- return true;
- }
-
- @CalledByNative
private DequeueOutputResult dequeueOutputBuffer(long timeoutUs) {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int status = MEDIA_CODEC_ERROR;
@@ -376,10 +428,11 @@ class MediaCodecBridge {
} else if (index_or_status == MediaCodec.INFO_TRY_AGAIN_LATER) {
status = MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER;
} else {
+ Log.e(TAG, "Unexpected index_or_status: " + index_or_status);
assert(false);
}
} catch (IllegalStateException e) {
- Log.e(TAG, "Failed to dequeue output buffer: " + e.toString());
+ Log.e(TAG, "Failed to dequeue output buffer", e);
}
return new DequeueOutputResult(
@@ -393,7 +446,7 @@ class MediaCodecBridge {
mMediaCodec.configure(format, surface, crypto, flags);
return true;
} catch (IllegalStateException e) {
- Log.e(TAG, "Cannot configure the video codec " + e.toString());
+ Log.e(TAG, "Cannot configure the video codec", e);
}
return false;
}
@@ -404,11 +457,22 @@ class MediaCodecBridge {
}
@CalledByNative
- private static MediaFormat createVideoFormat(String mime, int width, int height) {
+ private static MediaFormat createVideoDecoderFormat(String mime, int width, int height) {
return MediaFormat.createVideoFormat(mime, width, height);
}
@CalledByNative
+ private static MediaFormat createVideoEncoderFormat(String mime, int width, int height,
+ int bitRate, int frameRate, int iFrameInterval, int colorFormat) {
+ MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
+ format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
+ return format;
+ }
+
+ @CalledByNative
private static void setCodecSpecificData(MediaFormat format, int index, byte[] bytes) {
String name = null;
if (index == 0) {
@@ -445,7 +509,7 @@ class MediaCodecBridge {
}
return true;
} catch (IllegalStateException e) {
- Log.e(TAG, "Cannot configure the audio codec " + e.toString());
+ Log.e(TAG, "Cannot configure the audio codec", e);
}
return false;
}
diff --git a/media/base/android/media_codec_bridge.cc b/media/base/android/media_codec_bridge.cc
index 1acd23a..15ce6e4 100644
--- a/media/base/android/media_codec_bridge.cc
+++ b/media/base/android/media_codec_bridge.cc
@@ -28,7 +28,11 @@ using base::android::ScopedJavaLocalRef;
namespace media {
-enum { kBufferFlagEndOfStream = 4 };
+enum {
+ kBufferFlagSyncFrame = 1, // BUFFER_FLAG_SYNC_FRAME
+ kBufferFlagEndOfStream = 4, // BUFFER_FLAG_END_OF_STREAM
+ kConfigureFlagEncode = 1, // CONFIGURE_FLAG_ENCODE
+};
static const std::string AudioCodecToAndroidMimeType(const AudioCodec& codec) {
switch (codec) {
@@ -64,7 +68,7 @@ static const std::string CodecTypeToAndroidMimeType(const std::string& codec) {
return "audio/mp4a-latm";
if (codec == "vp8" || codec == "vp8.0")
return "video/x-vnd.on2.vp8";
- if (codec == "vp9" || codec == "vp9.0")
+ if (codec == "vp9" || codec == "vp9.0")
return "video/x-vnd.on2.vp9";
if (codec == "vorbis")
return "audio/vorbis";
@@ -90,8 +94,8 @@ static const std::string AndroidMimeTypeToCodecType(const std::string& mime) {
return std::string();
}
-static ScopedJavaLocalRef<jintArray> ToJavaIntArray(
- JNIEnv* env, scoped_ptr<jint[]> native_array, int size) {
+static ScopedJavaLocalRef<jintArray>
+ToJavaIntArray(JNIEnv* env, scoped_ptr<jint[]> native_array, int size) {
ScopedJavaLocalRef<jintArray> j_array(env, env->NewIntArray(size));
env->SetIntArrayRegion(j_array.obj(), 0, size, native_array.get());
return j_array;
@@ -104,11 +108,17 @@ bool MediaCodecBridge::IsAvailable() {
}
// static
-void MediaCodecBridge::GetCodecsInfo(
- std::vector<CodecsInfo>* codecs_info) {
+bool MediaCodecBridge::SupportsSetParameters() {
+ // MediaCodec.setParameters() is only available starting with K.
+ return base::android::BuildInfo::GetInstance()->sdk_int() >= 19;
+}
+
+// static
+std::vector<MediaCodecBridge::CodecsInfo> MediaCodecBridge::GetCodecsInfo() {
+ std::vector<CodecsInfo> codecs_info;
JNIEnv* env = AttachCurrentThread();
if (!IsAvailable())
- return;
+ return codecs_info;
std::string mime_type;
std::string codec_name;
@@ -126,8 +136,11 @@ void MediaCodecBridge::GetCodecsInfo(
CodecsInfo info;
info.codecs = AndroidMimeTypeToCodecType(mime_type);
ConvertJavaStringToUTF8(env, j_codec_name.obj(), &info.name);
- codecs_info->push_back(info);
+ info.direction = static_cast<MediaCodecDirection>(
+ Java_CodecInfo_direction(env, j_info.obj()));
+ codecs_info.push_back(info);
}
+ return codecs_info;
}
// static
@@ -138,7 +151,7 @@ bool MediaCodecBridge::CanDecode(const std::string& codec, bool is_secure) {
return false;
ScopedJavaLocalRef<jstring> j_mime = ConvertUTF8ToJavaString(env, mime);
ScopedJavaLocalRef<jobject> j_media_codec_bridge =
- Java_MediaCodecBridge_create(env, j_mime.obj(), is_secure);
+ Java_MediaCodecBridge_create(env, j_mime.obj(), is_secure, false);
if (!j_media_codec_bridge.is_null()) {
Java_MediaCodecBridge_release(env, j_media_codec_bridge.obj());
return true;
@@ -147,12 +160,14 @@ bool MediaCodecBridge::CanDecode(const std::string& codec, bool is_secure) {
}
// static
-bool MediaCodecBridge::IsKnownUnaccelerated(const std::string& mime_type) {
+bool MediaCodecBridge::IsKnownUnaccelerated(const std::string& mime_type,
+ MediaCodecDirection direction) {
std::string codec_type = AndroidMimeTypeToCodecType(mime_type);
- std::vector<media::MediaCodecBridge::CodecsInfo> codecs_info;
- media::MediaCodecBridge::GetCodecsInfo(&codecs_info);
+ std::vector<media::MediaCodecBridge::CodecsInfo> codecs_info =
+ MediaCodecBridge::GetCodecsInfo();
for (size_t i = 0; i < codecs_info.size(); ++i) {
- if (codecs_info[i].codecs == codec_type) {
+ if (codecs_info[i].codecs == codec_type &&
+ codecs_info[i].direction == direction) {
// It would be nice if MediaCodecInfo externalized some notion of
// HW-acceleration but it doesn't. Android Media guidance is that the
// prefix below is always used for SW decoders, so that's what we use.
@@ -162,13 +177,15 @@ bool MediaCodecBridge::IsKnownUnaccelerated(const std::string& mime_type) {
return true;
}
-MediaCodecBridge::MediaCodecBridge(const std::string& mime, bool is_secure) {
+MediaCodecBridge::MediaCodecBridge(const std::string& mime,
+ bool is_secure,
+ MediaCodecDirection direction) {
JNIEnv* env = AttachCurrentThread();
CHECK(env);
DCHECK(!mime.empty());
ScopedJavaLocalRef<jstring> j_mime = ConvertUTF8ToJavaString(env, mime);
j_media_codec_.Reset(
- Java_MediaCodecBridge_create(env, j_mime.obj(), is_secure));
+ Java_MediaCodecBridge_create(env, j_mime.obj(), is_secure, direction));
}
MediaCodecBridge::~MediaCodecBridge() {
@@ -203,22 +220,41 @@ void MediaCodecBridge::GetOutputFormat(int* width, int* height) {
}
MediaCodecStatus MediaCodecBridge::QueueInputBuffer(
- int index, const uint8* data, int data_size,
+ int index,
+ const uint8* data,
+ int orig_data_size,
const base::TimeDelta& presentation_time) {
- if (!FillInputBuffer(index, data, data_size))
+ DVLOG(3) << "MediaCodecBridge::QueueInputBuffer: " << index << ": "
+ << orig_data_size;
+ size_t data_size = base::checked_numeric_cast<size_t>(orig_data_size);
+ if (data && !FillInputBuffer(index, data, data_size))
return MEDIA_CODEC_ERROR;
JNIEnv* env = AttachCurrentThread();
- return static_cast<MediaCodecStatus>(Java_MediaCodecBridge_queueInputBuffer(
- env, j_media_codec_.obj(),
- index, 0, data_size, presentation_time.InMicroseconds(), 0));
+ return static_cast<MediaCodecStatus>(
+ Java_MediaCodecBridge_queueInputBuffer(env,
+ j_media_codec_.obj(),
+ index,
+ 0,
+ data_size,
+ presentation_time.InMicroseconds(),
+ 0));
}
MediaCodecStatus MediaCodecBridge::QueueSecureInputBuffer(
- int index, const uint8* data, int data_size, const uint8* key_id,
- int key_id_size, const uint8* iv, int iv_size,
- const SubsampleEntry* subsamples, int subsamples_size,
+ int index,
+ const uint8* data,
+ int orig_data_size,
+ const uint8* key_id,
+ int key_id_size,
+ const uint8* iv,
+ int iv_size,
+ const SubsampleEntry* subsamples,
+ int subsamples_size,
const base::TimeDelta& presentation_time) {
- if (!FillInputBuffer(index, data, data_size))
+ DVLOG(3) << "MediaCodecBridge::QueueSecureInputBuffer: " << index << ": "
+ << orig_data_size;
+ size_t data_size = base::checked_numeric_cast<size_t>(orig_data_size);
+ if (data && !FillInputBuffer(index, data, data_size))
return MEDIA_CODEC_ERROR;
JNIEnv* env = AttachCurrentThread();
@@ -256,55 +292,87 @@ MediaCodecStatus MediaCodecBridge::QueueSecureInputBuffer(
}
ScopedJavaLocalRef<jintArray> clear_array =
- ToJavaIntArray(env, native_clear_array.Pass(), new_subsamples_size);
+ ToJavaIntArray(env, native_clear_array.Pass(), new_subsamples_size);
ScopedJavaLocalRef<jintArray> cypher_array =
- ToJavaIntArray(env, native_cypher_array.Pass(), new_subsamples_size);
+ ToJavaIntArray(env, native_cypher_array.Pass(), new_subsamples_size);
return static_cast<MediaCodecStatus>(
Java_MediaCodecBridge_queueSecureInputBuffer(
- env, j_media_codec_.obj(), index, 0, j_iv.obj(), j_key_id.obj(),
- clear_array.obj(), cypher_array.obj(), new_subsamples_size,
+ env,
+ j_media_codec_.obj(),
+ index,
+ 0,
+ j_iv.obj(),
+ j_key_id.obj(),
+ clear_array.obj(),
+ cypher_array.obj(),
+ new_subsamples_size,
presentation_time.InMicroseconds()));
}
void MediaCodecBridge::QueueEOS(int input_buffer_index) {
+ DVLOG(3) << "MediaCodecBridge::QueueEOS: " << input_buffer_index;
JNIEnv* env = AttachCurrentThread();
- Java_MediaCodecBridge_queueInputBuffer(
- env, j_media_codec_.obj(),
- input_buffer_index, 0, 0, 0, kBufferFlagEndOfStream);
+ Java_MediaCodecBridge_queueInputBuffer(env,
+ j_media_codec_.obj(),
+ input_buffer_index,
+ 0,
+ 0,
+ 0,
+ kBufferFlagEndOfStream);
}
MediaCodecStatus MediaCodecBridge::DequeueInputBuffer(
- const base::TimeDelta& timeout, int* index) {
+ const base::TimeDelta& timeout,
+ int* index) {
JNIEnv* env = AttachCurrentThread();
ScopedJavaLocalRef<jobject> result = Java_MediaCodecBridge_dequeueInputBuffer(
env, j_media_codec_.obj(), timeout.InMicroseconds());
*index = Java_DequeueInputResult_index(env, result.obj());
- return static_cast<MediaCodecStatus>(
+ MediaCodecStatus status = static_cast<MediaCodecStatus>(
Java_DequeueInputResult_status(env, result.obj()));
+ DVLOG(3) << "MediaCodecBridge::DequeueInputBuffer: status: " << status
+ << ", index: " << *index;
+ return status;
}
MediaCodecStatus MediaCodecBridge::DequeueOutputBuffer(
- const base::TimeDelta& timeout, int* index, size_t* offset, size_t* size,
- base::TimeDelta* presentation_time, bool* end_of_stream) {
+ const base::TimeDelta& timeout,
+ int* index,
+ size_t* offset,
+ size_t* size,
+ base::TimeDelta* presentation_time,
+ bool* end_of_stream,
+ bool* key_frame) {
JNIEnv* env = AttachCurrentThread();
ScopedJavaLocalRef<jobject> result =
- Java_MediaCodecBridge_dequeueOutputBuffer(env, j_media_codec_.obj(),
- timeout.InMicroseconds());
- *index = Java_DequeueOutputResult_index(env, result.obj());;
+ Java_MediaCodecBridge_dequeueOutputBuffer(
+ env, j_media_codec_.obj(), timeout.InMicroseconds());
+ *index = Java_DequeueOutputResult_index(env, result.obj());
*offset = base::checked_numeric_cast<size_t>(
- Java_DequeueOutputResult_offset(env, result.obj()));
+ Java_DequeueOutputResult_offset(env, result.obj()));
*size = base::checked_numeric_cast<size_t>(
- Java_DequeueOutputResult_numBytes(env, result.obj()));
- *presentation_time = base::TimeDelta::FromMicroseconds(
- Java_DequeueOutputResult_presentationTimeMicroseconds(env, result.obj()));
+ Java_DequeueOutputResult_numBytes(env, result.obj()));
+ if (presentation_time) {
+ *presentation_time = base::TimeDelta::FromMicroseconds(
+ Java_DequeueOutputResult_presentationTimeMicroseconds(env,
+ result.obj()));
+ }
int flags = Java_DequeueOutputResult_flags(env, result.obj());
- *end_of_stream = flags & kBufferFlagEndOfStream;
- return static_cast<MediaCodecStatus>(
+ if (end_of_stream)
+ *end_of_stream = flags & kBufferFlagEndOfStream;
+ if (key_frame)
+ *key_frame = flags & kBufferFlagSyncFrame;
+ MediaCodecStatus status = static_cast<MediaCodecStatus>(
Java_DequeueOutputResult_status(env, result.obj()));
+ DVLOG(3) << "MediaCodecBridge::DequeueOutputBuffer: status: " << status
+ << ", index: " << *index << ", offset: " << *offset
+ << ", size: " << *size << ", flags: " << flags;
+ return status;
}
void MediaCodecBridge::ReleaseOutputBuffer(int index, bool render) {
+ DVLOG(3) << "MediaCodecBridge::ReleaseOutputBuffer: " << index;
JNIEnv* env = AttachCurrentThread();
CHECK(env);
@@ -312,38 +380,85 @@ void MediaCodecBridge::ReleaseOutputBuffer(int index, bool render) {
env, j_media_codec_.obj(), index, render);
}
+int MediaCodecBridge::GetInputBuffersCount() {
+ JNIEnv* env = AttachCurrentThread();
+ return Java_MediaCodecBridge_getInputBuffersCount(env, j_media_codec_.obj());
+}
+
+int MediaCodecBridge::GetOutputBuffersCount() {
+ JNIEnv* env = AttachCurrentThread();
+ return Java_MediaCodecBridge_getOutputBuffersCount(env, j_media_codec_.obj());
+}
+
+size_t MediaCodecBridge::GetOutputBuffersCapacity() {
+ JNIEnv* env = AttachCurrentThread();
+ return Java_MediaCodecBridge_getOutputBuffersCapacity(env,
+ j_media_codec_.obj());
+}
+
bool MediaCodecBridge::GetOutputBuffers() {
JNIEnv* env = AttachCurrentThread();
return Java_MediaCodecBridge_getOutputBuffers(env, j_media_codec_.obj());
}
-bool MediaCodecBridge::FillInputBuffer(int index, const uint8* data, int size) {
+void MediaCodecBridge::GetInputBuffer(int input_buffer_index,
+ uint8** data,
+ size_t* capacity) {
JNIEnv* env = AttachCurrentThread();
+ ScopedJavaLocalRef<jobject> j_buffer(Java_MediaCodecBridge_getInputBuffer(
+ env, j_media_codec_.obj(), input_buffer_index));
+ *data = static_cast<uint8*>(env->GetDirectBufferAddress(j_buffer.obj()));
+ *capacity = base::checked_numeric_cast<size_t>(
+ env->GetDirectBufferCapacity(j_buffer.obj()));
+}
+bool MediaCodecBridge::CopyFromOutputBuffer(int index,
+ size_t offset,
+ void* dst,
+ int dst_size) {
+ JNIEnv* env = AttachCurrentThread();
ScopedJavaLocalRef<jobject> j_buffer(
- Java_MediaCodecBridge_getInputBuffer(env, j_media_codec_.obj(), index));
- jlong capacity = env->GetDirectBufferCapacity(j_buffer.obj());
+ Java_MediaCodecBridge_getOutputBuffer(env, j_media_codec_.obj(), index));
+ void* src_data =
+ reinterpret_cast<uint8*>(env->GetDirectBufferAddress(j_buffer.obj())) +
+ offset;
+ int src_capacity = env->GetDirectBufferCapacity(j_buffer.obj()) - offset;
+ if (src_capacity < dst_size)
+ return false;
+ memcpy(dst, src_data, dst_size);
+ return true;
+}
+
+bool MediaCodecBridge::FillInputBuffer(int index,
+ const uint8* data,
+ size_t size) {
+ uint8* dst = NULL;
+ size_t capacity = 0;
+ GetInputBuffer(index, &dst, &capacity);
+ CHECK(dst);
+
if (size > capacity) {
LOG(ERROR) << "Input buffer size " << size
<< " exceeds MediaCodec input buffer capacity: " << capacity;
return false;
}
- uint8* direct_buffer =
- static_cast<uint8*>(env->GetDirectBufferAddress(j_buffer.obj()));
- memcpy(direct_buffer, data, size);
+ memcpy(dst, data, size);
return true;
}
AudioCodecBridge::AudioCodecBridge(const std::string& mime)
- // Audio codec doesn't care about security level.
- : MediaCodecBridge(mime, false) {
-}
-
-bool AudioCodecBridge::Start(
- const AudioCodec& codec, int sample_rate, int channel_count,
- const uint8* extra_data, size_t extra_data_size, bool play_audio,
- jobject media_crypto) {
+ // Audio codec doesn't care about security level and there is no need for
+ // audio encoding yet.
+ : MediaCodecBridge(mime, false, MEDIA_CODEC_DECODER) {}
+
+bool AudioCodecBridge::Start(const AudioCodec& codec,
+ int sample_rate,
+ int channel_count,
+ const uint8* extra_data,
+ size_t extra_data_size,
+ bool play_audio,
+ jobject media_crypto) {
JNIEnv* env = AttachCurrentThread();
if (!media_codec())
@@ -355,32 +470,31 @@ bool AudioCodecBridge::Start(
ScopedJavaLocalRef<jstring> j_mime =
ConvertUTF8ToJavaString(env, codec_string);
- ScopedJavaLocalRef<jobject> j_format(
- Java_MediaCodecBridge_createAudioFormat(
- env, j_mime.obj(), sample_rate, channel_count));
+ ScopedJavaLocalRef<jobject> j_format(Java_MediaCodecBridge_createAudioFormat(
+ env, j_mime.obj(), sample_rate, channel_count));
DCHECK(!j_format.is_null());
if (!ConfigureMediaFormat(j_format.obj(), codec, extra_data, extra_data_size))
return false;
if (!Java_MediaCodecBridge_configureAudio(
- env, media_codec(), j_format.obj(), media_crypto, 0, play_audio)) {
+ env, media_codec(), j_format.obj(), media_crypto, 0, play_audio)) {
return false;
}
return StartInternal();
}
-bool AudioCodecBridge::ConfigureMediaFormat(
- jobject j_format, const AudioCodec& codec, const uint8* extra_data,
- size_t extra_data_size) {
+bool AudioCodecBridge::ConfigureMediaFormat(jobject j_format,
+ const AudioCodec& codec,
+ const uint8* extra_data,
+ size_t extra_data_size) {
if (extra_data_size == 0)
return true;
JNIEnv* env = AttachCurrentThread();
switch (codec) {
- case kCodecVorbis:
- {
+ case kCodecVorbis: {
if (extra_data[0] != 2) {
LOG(ERROR) << "Invalid number of vorbis headers before the codec "
<< "header: " << extra_data[0];
@@ -425,8 +539,7 @@ bool AudioCodecBridge::ConfigureMediaFormat(
env, j_format, 1, last_header.obj());
break;
}
- case kCodecAAC:
- {
+ case kCodecAAC: {
media::BitReader reader(extra_data, extra_data_size);
// The following code is copied from aac.cc
@@ -485,8 +598,7 @@ void AudioCodecBridge::PlayOutputBuffer(int index, size_t size) {
ScopedJavaLocalRef<jbyteArray> byte_array =
base::android::ToJavaByteArray(env, buffer, numBytes);
- Java_MediaCodecBridge_playOutputBuffer(
- env, media_codec(), byte_array.obj());
+ Java_MediaCodecBridge_playOutputBuffer(env, media_codec(), byte_array.obj());
}
void AudioCodecBridge::SetVolume(double volume) {
@@ -494,57 +606,104 @@ void AudioCodecBridge::SetVolume(double volume) {
Java_MediaCodecBridge_setVolume(env, media_codec(), volume);
}
-VideoCodecBridge::VideoCodecBridge(const std::string& mime, bool is_secure)
- : MediaCodecBridge(mime, is_secure) {
+AudioCodecBridge* AudioCodecBridge::Create(const AudioCodec& codec) {
+ const std::string mime = AudioCodecToAndroidMimeType(codec);
+ return mime.empty() ? NULL : new AudioCodecBridge(mime);
}
-bool VideoCodecBridge::Start(
- const VideoCodec& codec, const gfx::Size& size, jobject surface,
- jobject media_crypto) {
- JNIEnv* env = AttachCurrentThread();
+// static
+bool AudioCodecBridge::IsKnownUnaccelerated(const AudioCodec& codec) {
+ return MediaCodecBridge::IsKnownUnaccelerated(
+ AudioCodecToAndroidMimeType(codec), MEDIA_CODEC_DECODER);
+}
- if (!media_codec())
- return false;
+// static
+bool VideoCodecBridge::IsKnownUnaccelerated(const VideoCodec& codec,
+ MediaCodecDirection direction) {
+ return MediaCodecBridge::IsKnownUnaccelerated(
+ VideoCodecToAndroidMimeType(codec), direction);
+}
- std::string codec_string = VideoCodecToAndroidMimeType(codec);
- if (codec_string.empty())
- return false;
+VideoCodecBridge* VideoCodecBridge::CreateDecoder(const VideoCodec& codec,
+ bool is_secure,
+ const gfx::Size& size,
+ jobject surface,
+ jobject media_crypto) {
+ JNIEnv* env = AttachCurrentThread();
+ const std::string mime = VideoCodecToAndroidMimeType(codec);
+ if (mime.empty())
+ return NULL;
- ScopedJavaLocalRef<jstring> j_mime =
- ConvertUTF8ToJavaString(env, codec_string);
+ scoped_ptr<VideoCodecBridge> bridge(
+ new VideoCodecBridge(mime, is_secure, MEDIA_CODEC_DECODER));
+
+ ScopedJavaLocalRef<jstring> j_mime = ConvertUTF8ToJavaString(env, mime);
ScopedJavaLocalRef<jobject> j_format(
- Java_MediaCodecBridge_createVideoFormat(
+ Java_MediaCodecBridge_createVideoDecoderFormat(
env, j_mime.obj(), size.width(), size.height()));
DCHECK(!j_format.is_null());
- if (!Java_MediaCodecBridge_configureVideo(
- env, media_codec(), j_format.obj(), surface, media_crypto, 0)) {
- return false;
+ if (!Java_MediaCodecBridge_configureVideo(env,
+ bridge->media_codec(),
+ j_format.obj(),
+ surface,
+ media_crypto,
+ 0)) {
+ return NULL;
}
- return StartInternal();
+ return bridge->StartInternal() ? bridge.release() : NULL;
}
-AudioCodecBridge* AudioCodecBridge::Create(const AudioCodec& codec) {
- const std::string mime = AudioCodecToAndroidMimeType(codec);
- return mime.empty() ? NULL : new AudioCodecBridge(mime);
-}
+VideoCodecBridge* VideoCodecBridge::CreateEncoder(const VideoCodec& codec,
+ const gfx::Size& size,
+ int bit_rate,
+ int frame_rate,
+ int i_frame_interval,
+ int color_format) {
+ JNIEnv* env = AttachCurrentThread();
+ const std::string mime = VideoCodecToAndroidMimeType(codec);
+ if (mime.empty())
+ return NULL;
-// static
-bool AudioCodecBridge::IsKnownUnaccelerated(const AudioCodec& codec) {
- return MediaCodecBridge::IsKnownUnaccelerated(
- AudioCodecToAndroidMimeType(codec));
+ scoped_ptr<VideoCodecBridge> bridge(
+ new VideoCodecBridge(mime, false, MEDIA_CODEC_ENCODER));
+
+ ScopedJavaLocalRef<jstring> j_mime = ConvertUTF8ToJavaString(env, mime);
+ ScopedJavaLocalRef<jobject> j_format(
+ Java_MediaCodecBridge_createVideoEncoderFormat(env,
+ j_mime.obj(),
+ size.width(),
+ size.height(),
+ bit_rate,
+ frame_rate,
+ i_frame_interval,
+ color_format));
+ DCHECK(!j_format.is_null());
+ if (!Java_MediaCodecBridge_configureVideo(env,
+ bridge->media_codec(),
+ j_format.obj(),
+ NULL,
+ NULL,
+ kConfigureFlagEncode)) {
+ return NULL;
+ }
+
+ return bridge->StartInternal() ? bridge.release() : NULL;
}
-VideoCodecBridge* VideoCodecBridge::Create(const VideoCodec& codec,
- bool is_secure) {
- const std::string mime = VideoCodecToAndroidMimeType(codec);
- return mime.empty() ? NULL : new VideoCodecBridge(mime, is_secure);
+VideoCodecBridge::VideoCodecBridge(const std::string& mime,
+ bool is_secure,
+ MediaCodecDirection direction)
+ : MediaCodecBridge(mime, is_secure, direction) {}
+
+void VideoCodecBridge::SetVideoBitrate(int bps) {
+ JNIEnv* env = AttachCurrentThread();
+ Java_MediaCodecBridge_setVideoBitrate(env, media_codec(), bps);
}
-// static
-bool VideoCodecBridge::IsKnownUnaccelerated(const VideoCodec& codec) {
- return MediaCodecBridge::IsKnownUnaccelerated(
- VideoCodecToAndroidMimeType(codec));
+void VideoCodecBridge::RequestKeyFrameSoon() {
+ JNIEnv* env = AttachCurrentThread();
+ Java_MediaCodecBridge_requestKeyFrameSoon(env, media_codec());
}
bool MediaCodecBridge::RegisterMediaCodecBridge(JNIEnv* env) {
diff --git a/media/base/android/media_codec_bridge.h b/media/base/android/media_codec_bridge.h
index cdd883f..3a85019 100644
--- a/media/base/android/media_codec_bridge.h
+++ b/media/base/android/media_codec_bridge.h
@@ -33,17 +33,29 @@ enum MediaCodecStatus {
MEDIA_CODEC_ERROR
};
+// Codec direction. Keep this in sync with MediaCodecBridge.java.
+enum MediaCodecDirection {
+ MEDIA_CODEC_DECODER,
+ MEDIA_CODEC_ENCODER,
+};
+
// This class serves as a bridge for native code to call java functions inside
// Android MediaCodec class. For more information on Android MediaCodec, check
// http://developer.android.com/reference/android/media/MediaCodec.html
// Note: MediaCodec is only available on JB and greater.
// Use AudioCodecBridge or VideoCodecBridge to create an instance of this
// object.
+//
+// TODO(fischman,xhwang): replace this (and the enums that go with it) with
+// chromium's JNI auto-generation hotness.
class MEDIA_EXPORT MediaCodecBridge {
public:
// Returns true if MediaCodec is available on the device.
static bool IsAvailable();
+ // Returns true if MediaCodec.setParameters() is available on the device.
+ static bool SupportsSetParameters();
+
// Returns whether MediaCodecBridge has a decoder that |is_secure| and can
// decode |codec| type.
static bool CanDecode(const std::string& codec, bool is_secure);
@@ -52,12 +64,13 @@ class MEDIA_EXPORT MediaCodecBridge {
// TODO(qinmin): Curretly the codecs string only contains one codec, do we
// need more specific codecs separated by comma. (e.g. "vp8" -> "vp8, vp8.0")
struct CodecsInfo {
- std::string codecs;
- std::string name;
+ std::string codecs; // E.g. "vp8" or "avc1".
+ std::string name; // E.g. "OMX.google.vp8.decoder".
+ MediaCodecDirection direction;
};
// Get a list of supported codecs.
- static void GetCodecsInfo(std::vector<CodecsInfo>* codecs_info);
+ static std::vector<CodecsInfo> GetCodecsInfo();
virtual ~MediaCodecBridge();
@@ -80,21 +93,31 @@ class MEDIA_EXPORT MediaCodecBridge {
// returns a format change by returning INFO_OUTPUT_FORMAT_CHANGED
void GetOutputFormat(int* width, int* height);
+ // Returns the number of input buffers used by the codec.
+ int GetInputBuffersCount();
+
// Submits a byte array to the given input buffer. Call this after getting an
- // available buffer from DequeueInputBuffer().
+ // available buffer from DequeueInputBuffer(). If |data| is NULL, assume the
+ // input buffer has already been populated (but still obey |size|).
MediaCodecStatus QueueInputBuffer(int index,
const uint8* data,
- int size,
+ int orig_data_size,
const base::TimeDelta& presentation_time);
// Similar to the above call, but submits a buffer that is encrypted.
- // Note: NULL |subsamples| indicates the whole buffer is encrypted.
+ // Note: NULL |subsamples| indicates the whole buffer is encrypted. If |data|
+ // is NULL, assume the input buffer has already been populated (but still obey
+ // |data_size|).
MediaCodecStatus QueueSecureInputBuffer(
int index,
- const uint8* data, int data_size,
- const uint8* key_id, int key_id_size,
- const uint8* iv, int iv_size,
- const SubsampleEntry* subsamples, int subsamples_size,
+ const uint8* data,
+ int orig_data_size,
+ const uint8* key_id,
+ int key_id_size,
+ const uint8* iv,
+ int iv_size,
+ const SubsampleEntry* subsamples,
+ int subsamples_size,
const base::TimeDelta& presentation_time);
// Submits an empty buffer with a EOS (END OF STREAM) flag.
@@ -112,7 +135,8 @@ class MEDIA_EXPORT MediaCodecBridge {
// Dequeues an output buffer, block at most timeout_us microseconds.
// Returns the status of this operation. If OK is returned, the output
// parameters should be populated. Otherwise, the values of output parameters
- // should not be used.
+ // should not be used. Output parameters other than index/offset/size are
+ // optional and only set if not NULL.
// Note: Never use infinite timeout as this would block the decoder thread and
// prevent the decoder job from being released.
// TODO(xhwang): Can we drop |end_of_stream| and return
@@ -122,25 +146,42 @@ class MEDIA_EXPORT MediaCodecBridge {
size_t* offset,
size_t* size,
base::TimeDelta* presentation_time,
- bool* end_of_stream);
+ bool* end_of_stream,
+ bool* key_frame);
- // Returns the buffer to the codec. If you previously specified a surface
- // when configuring this video decoder you can optionally render the buffer.
+ // Returns the buffer to the codec. If you previously specified a surface when
+ // configuring this video decoder you can optionally render the buffer.
void ReleaseOutputBuffer(int index, bool render);
+ // Returns the number of output buffers used by the codec.
+ int GetOutputBuffersCount();
+
+ // Returns the capacity of each output buffer used by the codec.
+ size_t GetOutputBuffersCapacity();
+
// Gets output buffers from media codec and keeps them inside the java class.
// To access them, use DequeueOutputBuffer(). Returns whether output buffers
// were successfully obtained.
bool GetOutputBuffers() WARN_UNUSED_RESULT;
+ // Returns an input buffer's base pointer and capacity.
+ void GetInputBuffer(int input_buffer_index, uint8** data, size_t* capacity);
+
+ // Copy |dst_size| bytes from output buffer |index|'s |offset| onwards into
+ // |*dst|.
+ bool CopyFromOutputBuffer(int index, size_t offset, void* dst, int dst_size);
+
static bool RegisterMediaCodecBridge(JNIEnv* env);
protected:
// Returns true if |mime_type| is known to be unaccelerated (i.e. backed by a
// software codec instead of a hardware one).
- static bool IsKnownUnaccelerated(const std::string& mime_type);
+ static bool IsKnownUnaccelerated(const std::string& mime_type,
+ MediaCodecDirection direction);
- MediaCodecBridge(const std::string& mime, bool is_secure);
+ MediaCodecBridge(const std::string& mime,
+ bool is_secure,
+ MediaCodecDirection direction);
// Calls start() against the media codec instance. Used in StartXXX() after
// configuring media codec. Returns whether media codec was successfully
@@ -148,13 +189,14 @@ class MEDIA_EXPORT MediaCodecBridge {
bool StartInternal() WARN_UNUSED_RESULT;
jobject media_codec() { return j_media_codec_.obj(); }
+ MediaCodecDirection direction_;
private:
// Fills a particular input buffer; returns false if |data_size| exceeds the
// input buffer's capacity (and doesn't touch the input buffer in that case).
bool FillInputBuffer(int index,
const uint8* data,
- int data_size) WARN_UNUSED_RESULT;
+ size_t data_size) WARN_UNUSED_RESULT;
// Java MediaCodec instance.
base::android::ScopedJavaGlobalRef<jobject> j_media_codec_;
@@ -193,20 +235,34 @@ class AudioCodecBridge : public MediaCodecBridge {
class MEDIA_EXPORT VideoCodecBridge : public MediaCodecBridge {
public:
- // Returns an VideoCodecBridge instance if |codec| is supported, or a NULL
- // pointer otherwise.
- static VideoCodecBridge* Create(const VideoCodec& codec, bool is_secure);
-
// See MediaCodecBridge::IsKnownUnaccelerated().
- static bool IsKnownUnaccelerated(const VideoCodec& codec);
-
- // Start the video codec bridge.
- // TODO(qinmin): Pass codec specific data if available.
- bool Start(const VideoCodec& codec, const gfx::Size& size, jobject surface,
- jobject media_crypto);
+ static bool IsKnownUnaccelerated(const VideoCodec& codec,
+ MediaCodecDirection direction);
+
+ // Create, start, and return a VideoCodecBridge decoder or NULL on failure.
+ static VideoCodecBridge* CreateDecoder(
+ const VideoCodec& codec, // e.g. media::kCodecVP8
+ bool is_secure,
+ const gfx::Size& size, // Output frame size.
+ jobject surface, // Output surface, optional.
+ jobject media_crypto); // MediaCrypto object, optional.
+
+ // Create, start, and return a VideoCodecBridge encoder or NULL on failure.
+ static VideoCodecBridge* CreateEncoder(
+ const VideoCodec& codec, // e.g. media::kCodecVP8
+ const gfx::Size& size, // input frame size
+ int bit_rate, // bits/second
+ int frame_rate, // frames/second
+ int i_frame_interval, // count
+ int color_format); // MediaCodecInfo.CodecCapabilities.
+
+ void SetVideoBitrate(int bps);
+ void RequestKeyFrameSoon();
private:
- VideoCodecBridge(const std::string& mime, bool is_secure);
+ VideoCodecBridge(const std::string& mime,
+ bool is_secure,
+ MediaCodecDirection direction);
};
} // namespace media
diff --git a/media/base/android/media_codec_bridge_unittest.cc b/media/base/android/media_codec_bridge_unittest.cc
index 5a35579..e680251 100644
--- a/media/base/android/media_codec_bridge_unittest.cc
+++ b/media/base/android/media_codec_bridge_unittest.cc
@@ -111,7 +111,8 @@ static inline const base::TimeDelta InfiniteTimeOut() {
void DecodeMediaFrame(
VideoCodecBridge* media_codec, const uint8* data, size_t data_size,
const base::TimeDelta input_presentation_timestamp,
- const base::TimeDelta initial_timestamp_lower_bound) {
+ const base::TimeDelta initial_timestamp_lower_bound,
+ bool expected_key_frame) {
base::TimeDelta input_pts = input_presentation_timestamp;
base::TimeDelta timestamp = initial_timestamp_lower_bound;
base::TimeDelta new_timestamp;
@@ -128,11 +129,19 @@ void DecodeMediaFrame(
size_t size = 0;
bool eos = false;
int output_buf_index = -1;
+ bool key_frame = false;
status = media_codec->DequeueOutputBuffer(InfiniteTimeOut(),
- &output_buf_index, &unused_offset, &size, &new_timestamp, &eos);
-
- if (status == MEDIA_CODEC_OK && output_buf_index > 0)
+ &output_buf_index,
+ &unused_offset,
+ &size,
+ &new_timestamp,
+ &eos,
+ &key_frame);
+
+ if (status == MEDIA_CODEC_OK && output_buf_index > 0) {
media_codec->ReleaseOutputBuffer(output_buf_index, false);
+ ASSERT_EQ(expected_key_frame, key_frame);
+ }
// Output time stamp should not be smaller than old timestamp.
ASSERT_TRUE(new_timestamp >= timestamp);
input_pts += base::TimeDelta::FromMicroseconds(33000);
@@ -144,7 +153,8 @@ TEST(MediaCodecBridgeTest, Initialize) {
SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
scoped_ptr<media::MediaCodecBridge> media_codec;
- media_codec.reset(VideoCodecBridge::Create(kCodecH264, false));
+ media_codec.reset(VideoCodecBridge::CreateDecoder(
+ kCodecH264, false, gfx::Size(640, 480), NULL, NULL));
}
TEST(MediaCodecBridgeTest, DoNormal) {
@@ -162,9 +172,10 @@ TEST(MediaCodecBridgeTest, DoNormal) {
ASSERT_GE(input_buf_index, 0);
int64 input_pts = kPresentationTimeBase;
- media_codec->QueueInputBuffer(
- input_buf_index, test_mp3, sizeof(test_mp3),
- base::TimeDelta::FromMicroseconds(++input_pts));
+ media_codec->QueueInputBuffer(input_buf_index,
+ test_mp3,
+ sizeof(test_mp3),
+ base::TimeDelta::FromMicroseconds(++input_pts));
status = media_codec->DequeueInputBuffer(InfiniteTimeOut(), &input_buf_index);
media_codec->QueueInputBuffer(
@@ -182,7 +193,12 @@ TEST(MediaCodecBridgeTest, DoNormal) {
base::TimeDelta timestamp;
int output_buf_index = -1;
status = media_codec->DequeueOutputBuffer(InfiniteTimeOut(),
- &output_buf_index, &unused_offset, &size, &timestamp, &eos);
+ &output_buf_index,
+ &unused_offset,
+ &size,
+ &timestamp,
+ &eos,
+ NULL);
switch (status) {
case MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER:
FAIL();
@@ -239,15 +255,17 @@ TEST(MediaCodecBridgeTest, InvalidVorbisHeader) {
TEST(MediaCodecBridgeTest, PresentationTimestampsDoNotDecrease) {
SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
- scoped_ptr<VideoCodecBridge> media_codec;
- media_codec.reset(VideoCodecBridge::Create(kCodecVP8, false));
- EXPECT_TRUE(media_codec->Start(
- kCodecVP8, gfx::Size(320, 240), NULL, NULL));
+ scoped_ptr<VideoCodecBridge> media_codec(VideoCodecBridge::CreateDecoder(
+ kCodecVP8, false, gfx::Size(320, 240), NULL, NULL));
+ EXPECT_TRUE(media_codec.get());
scoped_refptr<DecoderBuffer> buffer =
ReadTestDataFile("vp8-I-frame-320x240");
- DecodeMediaFrame(
- media_codec.get(), buffer->data(), buffer->data_size(),
- base::TimeDelta(), base::TimeDelta());
+ DecodeMediaFrame(media_codec.get(),
+ buffer->data(),
+ buffer->data_size(),
+ base::TimeDelta(),
+ base::TimeDelta(),
+ true);
// Simulate a seek to 10 seconds, and each chunk has 2 I-frames.
std::vector<uint8> chunk(buffer->data(),
@@ -255,20 +273,29 @@ TEST(MediaCodecBridgeTest, PresentationTimestampsDoNotDecrease) {
chunk.insert(chunk.end(), buffer->data(),
buffer->data() + buffer->data_size());
media_codec->Reset();
- DecodeMediaFrame(media_codec.get(), &chunk[0], chunk.size(),
+ DecodeMediaFrame(media_codec.get(),
+ &chunk[0],
+ chunk.size(),
base::TimeDelta::FromMicroseconds(10000000),
- base::TimeDelta::FromMicroseconds(9900000));
+ base::TimeDelta::FromMicroseconds(9900000),
+ true);
// Simulate a seek to 5 seconds.
media_codec->Reset();
- DecodeMediaFrame(media_codec.get(), &chunk[0], chunk.size(),
+ DecodeMediaFrame(media_codec.get(),
+ &chunk[0],
+ chunk.size(),
base::TimeDelta::FromMicroseconds(5000000),
- base::TimeDelta::FromMicroseconds(4900000));
+ base::TimeDelta::FromMicroseconds(4900000),
+ true);
}
TEST(MediaCodecBridgeTest, CreateUnsupportedCodec) {
EXPECT_EQ(NULL, AudioCodecBridge::Create(kUnknownAudioCodec));
- EXPECT_EQ(NULL, VideoCodecBridge::Create(kUnknownVideoCodec, false));
+ EXPECT_EQ(
+ NULL,
+ VideoCodecBridge::CreateDecoder(
+ kUnknownVideoCodec, false, gfx::Size(320, 240), NULL, NULL));
}
} // namespace media
diff --git a/media/base/android/media_decoder_job.cc b/media/base/android/media_decoder_job.cc
index 74aeecd..5cd2bbb 100644
--- a/media/base/android/media_decoder_job.cc
+++ b/media/base/android/media_decoder_job.cc
@@ -320,9 +320,14 @@ void MediaDecoderJob::DecodeInternal(
base::TimeDelta timeout = base::TimeDelta::FromMilliseconds(
kMediaCodecTimeoutInMilliseconds);
- MediaCodecStatus status = media_codec_bridge_->DequeueOutputBuffer(
- timeout, &buffer_index, &offset, &size, &presentation_timestamp,
- &output_eos_encountered);
+ MediaCodecStatus status =
+ media_codec_bridge_->DequeueOutputBuffer(timeout,
+ &buffer_index,
+ &offset,
+ &size,
+ &presentation_timestamp,
+ &output_eos_encountered,
+ NULL);
if (status != MEDIA_CODEC_OK) {
if (status == MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED &&
diff --git a/media/base/android/video_decoder_job.cc b/media/base/android/video_decoder_job.cc
index 02b41c4..75124e7 100644
--- a/media/base/android/video_decoder_job.cc
+++ b/media/base/android/video_decoder_job.cc
@@ -30,9 +30,9 @@ VideoDecoderJob* VideoDecoderJob::Create(const VideoCodec video_codec,
jobject surface,
jobject media_crypto,
const base::Closure& request_data_cb) {
- scoped_ptr<VideoCodecBridge> codec(
- VideoCodecBridge::Create(video_codec, is_secure));
- if (codec && codec->Start(video_codec, size, surface, media_crypto))
+ scoped_ptr<VideoCodecBridge> codec(VideoCodecBridge::CreateDecoder(
+ video_codec, is_secure, size, surface, media_crypto));
+ if (codec)
return new VideoDecoderJob(codec.Pass(), request_data_cb);
LOG(ERROR) << "Failed to create VideoDecoderJob.";