diff options
author | dalecurtis <dalecurtis@chromium.org> | 2016-01-06 11:48:00 -0800 |
---|---|---|
committer | Commit bot <commit-bot@chromium.org> | 2016-01-06 19:49:08 +0000 |
commit | c0baf4af90ffad9436aa0c432d70365e28559219 (patch) | |
tree | aeadce6c64ba8cef57a7a3aeb2ac3fd5ac019e7c /media/base/android | |
parent | f0107588610d8beec57308c7820ca25b4f9d47b8 (diff) | |
download | chromium_src-c0baf4af90ffad9436aa0c432d70365e28559219.zip chromium_src-c0baf4af90ffad9436aa0c432d70365e28559219.tar.gz chromium_src-c0baf4af90ffad9436aa0c432d70365e28559219.tar.bz2 |
Replace WebAudio MediaCodec usage with FFmpeg. A ~4x improvement.
Now that ffmpeg is linked in for project spitzer (issue 507834) we
can use it for audio decoding on Android; this provides a more
secure (all decoding in the renderer) and faster decoding experience
for WebAudio users.
Using the demo page from the linked bug, the improvements are:
apk size: -2162 bytes
aac decode: 12.9 seconds -> 3.7 seconds (~3.5x speedup)
ogg decode: 15.7 seconds -> 3.8 seconds (~4.1x speedup)
BUG=424174, 570711, 570788
TEST=existing layout tests all pass.
Review URL: https://codereview.chromium.org/1565623002
Cr-Commit-Position: refs/heads/master@{#367881}
Diffstat (limited to 'media/base/android')
-rw-r--r-- | media/base/android/BUILD.gn | 4 | ||||
-rw-r--r-- | media/base/android/java/src/org/chromium/media/WebAudioMediaCodecBridge.java | 293 | ||||
-rw-r--r-- | media/base/android/media_jni_registrar.cc | 3 | ||||
-rw-r--r-- | media/base/android/webaudio_media_codec_bridge.cc | 202 | ||||
-rw-r--r-- | media/base/android/webaudio_media_codec_bridge.h | 87 | ||||
-rw-r--r-- | media/base/android/webaudio_media_codec_info.h | 20 |
6 files changed, 0 insertions, 609 deletions
diff --git a/media/base/android/BUILD.gn b/media/base/android/BUILD.gn index 75c6717..54d8074 100644 --- a/media/base/android/BUILD.gn +++ b/media/base/android/BUILD.gn @@ -64,9 +64,6 @@ source_set("android") { "sdk_media_codec_bridge.h", "video_decoder_job.cc", "video_decoder_job.h", - "webaudio_media_codec_bridge.cc", - "webaudio_media_codec_bridge.h", - "webaudio_media_codec_info.h", ] configs += [ "//media:media_config", @@ -114,7 +111,6 @@ generate_jni("media_jni_headers") { "java/src/org/chromium/media/MediaDrmBridge.java", "java/src/org/chromium/media/MediaPlayerBridge.java", "java/src/org/chromium/media/MediaPlayerListener.java", - "java/src/org/chromium/media/WebAudioMediaCodecBridge.java", ] jni_package = "media" } diff --git a/media/base/android/java/src/org/chromium/media/WebAudioMediaCodecBridge.java b/media/base/android/java/src/org/chromium/media/WebAudioMediaCodecBridge.java deleted file mode 100644 index abc394c..0000000 --- a/media/base/android/java/src/org/chromium/media/WebAudioMediaCodecBridge.java +++ /dev/null @@ -1,293 +0,0 @@ -// Copyright 2013 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -package org.chromium.media; - -import android.content.Context; -import android.media.MediaCodec; -import android.media.MediaCodec.BufferInfo; -import android.media.MediaExtractor; -import android.media.MediaFormat; -import android.os.ParcelFileDescriptor; - -import org.chromium.base.Log; -import org.chromium.base.annotations.CalledByNative; -import org.chromium.base.annotations.JNINamespace; - -import java.io.File; -import java.nio.ByteBuffer; - -@JNINamespace("media") -class WebAudioMediaCodecBridge { - private static final String TAG = "cr.media"; - // TODO(rtoy): What is the correct timeout value for reading - // from a file in memory? - static final long TIMEOUT_MICROSECONDS = 500; - @CalledByNative - private static String createTempFile(Context ctx) throws java.io.IOException { - File outputDirectory = ctx.getCacheDir(); - File outputFile = File.createTempFile("webaudio", ".dat", outputDirectory); - return outputFile.getAbsolutePath(); - } - - @SuppressWarnings("deprecation") - @CalledByNative - private static boolean decodeAudioFile(Context ctx, long nativeMediaCodecBridge, - int inputFD, long dataSize) { - - if (dataSize < 0 || dataSize > 0x7fffffff) return false; - - MediaExtractor extractor = new MediaExtractor(); - - ParcelFileDescriptor encodedFD; - encodedFD = ParcelFileDescriptor.adoptFd(inputFD); - try { - extractor.setDataSource(encodedFD.getFileDescriptor(), 0, dataSize); - } catch (Exception e) { - e.printStackTrace(); - encodedFD.detachFd(); - return false; - } - - if (extractor.getTrackCount() <= 0) { - encodedFD.detachFd(); - return false; - } - - MediaFormat format = extractor.getTrackFormat(0); - - // If we are unable to get the input channel count, the sample - // rate or the mime type for any reason, just give up. - // Without these, we don't know what to do. - - int inputChannelCount; - try { - // Number of channels specified in the file - inputChannelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); - } catch (Exception e) { - // Give up. - Log.w(TAG, "Unable to determine number of channels"); - encodedFD.detachFd(); - return false; - } - - // Number of channels the decoder will provide. (Not - // necessarily the same as inputChannelCount. See - // crbug.com/266006.) - int outputChannelCount = inputChannelCount; - - int sampleRate; - try { - sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); - } catch (Exception e) { - // Give up. - Log.w(TAG, "Unable to determine sample rate"); - encodedFD.detachFd(); - return false; - } - - String mime; - try { - mime = format.getString(MediaFormat.KEY_MIME); - } catch (Exception e) { - // Give up. - Log.w(TAG, "Unable to determine type of encoding used by the file"); - encodedFD.detachFd(); - return false; - } - - long durationMicroseconds = 0; - if (format.containsKey(MediaFormat.KEY_DURATION)) { - try { - durationMicroseconds = format.getLong(MediaFormat.KEY_DURATION); - } catch (Exception e) { - Log.d(TAG, "Cannot get duration"); - } - } - - // If the duration is too long, set to 0 to force the caller - // not to preallocate space. See crbug.com/326856. - // FIXME: What should be the limit? We're arbitrarily using - // about 2148 sec (35.8 min). - if (durationMicroseconds > 0x7fffffff) { - durationMicroseconds = 0; - } - - Log.d(TAG, "Initial: Tracks: %d Format: %s", extractor.getTrackCount(), format); - - // Create decoder - MediaCodec codec; - try { - codec = MediaCodec.createDecoderByType(mime); - } catch (Exception e) { - Log.w(TAG, "Failed to create MediaCodec for mime type: %s", mime); - encodedFD.detachFd(); - return false; - } - - try { - codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */); - } catch (Exception e) { - Log.w(TAG, "Unable to configure codec for format " + format, e); - encodedFD.detachFd(); - return false; - } - try { - codec.start(); - } catch (Exception e) { - Log.w(TAG, "Unable to start()", e); - encodedFD.detachFd(); - return false; - } - - ByteBuffer[] codecInputBuffers; - try { - codecInputBuffers = codec.getInputBuffers(); - } catch (Exception e) { - Log.w(TAG, "getInputBuffers() failed", e); - encodedFD.detachFd(); - return false; - } - ByteBuffer[] codecOutputBuffers; - try { - codecOutputBuffers = codec.getOutputBuffers(); - } catch (Exception e) { - Log.w(TAG, "getOutputBuffers() failed", e); - encodedFD.detachFd(); - return false; - } - - // A track must be selected and will be used to read samples. - extractor.selectTrack(0); - - boolean sawInputEOS = false; - boolean sawOutputEOS = false; - boolean destinationInitialized = false; - boolean decodedSuccessfully = true; - - // Keep processing until the output is done. - while (!sawOutputEOS) { - if (!sawInputEOS) { - // Input side - int inputBufIndex; - try { - inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_MICROSECONDS); - } catch (Exception e) { - Log.w(TAG, "dequeueInputBuffer(%d) failed.", TIMEOUT_MICROSECONDS, e); - decodedSuccessfully = false; - break; - } - - if (inputBufIndex >= 0) { - ByteBuffer dstBuf = codecInputBuffers[inputBufIndex]; - int sampleSize; - - try { - sampleSize = extractor.readSampleData(dstBuf, 0); - } catch (Exception e) { - Log.w(TAG, "readSampleData failed."); - decodedSuccessfully = false; - break; - } - - long presentationTimeMicroSec = 0; - - if (sampleSize < 0) { - sawInputEOS = true; - sampleSize = 0; - } else { - presentationTimeMicroSec = extractor.getSampleTime(); - } - - try { - codec.queueInputBuffer(inputBufIndex, - 0, /* offset */ - sampleSize, - presentationTimeMicroSec, - sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); - } catch (Exception e) { - Log.w(TAG, "queueInputBuffer(%d, 0, %d, %d, %d) failed.", - inputBufIndex, sampleSize, presentationTimeMicroSec, - (sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0), e); - decodedSuccessfully = false; - break; - } - - if (!sawInputEOS) { - extractor.advance(); - } - } - } - - // Output side - MediaCodec.BufferInfo info = new BufferInfo(); - final int outputBufIndex; - - try { - outputBufIndex = codec.dequeueOutputBuffer(info, TIMEOUT_MICROSECONDS); - } catch (Exception e) { - Log.w(TAG, "dequeueOutputBuffer(%s, %d) failed", info, TIMEOUT_MICROSECONDS); - e.printStackTrace(); - decodedSuccessfully = false; - break; - } - - if (outputBufIndex >= 0) { - ByteBuffer buf = codecOutputBuffers[outputBufIndex]; - - if (!destinationInitialized) { - // Initialize the destination as late as possible to - // catch any changes in format. But be sure to - // initialize it BEFORE we send any decoded audio, - // and only initialize once. - Log.d(TAG, "Final: Rate: %d Channels: %d Mime: %s Duration: %d microsec", - sampleRate, inputChannelCount, mime, durationMicroseconds); - - nativeInitializeDestination(nativeMediaCodecBridge, - inputChannelCount, - sampleRate, - durationMicroseconds); - destinationInitialized = true; - } - - if (destinationInitialized && info.size > 0) { - nativeOnChunkDecoded(nativeMediaCodecBridge, buf, info.size, - inputChannelCount, outputChannelCount); - } - - buf.clear(); - codec.releaseOutputBuffer(outputBufIndex, false /* render */); - - if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { - sawOutputEOS = true; - } - } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { - codecOutputBuffers = codec.getOutputBuffers(); - } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { - MediaFormat newFormat = codec.getOutputFormat(); - outputChannelCount = newFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT); - sampleRate = newFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE); - Log.d(TAG, "output format changed to " + newFormat); - } - } - - encodedFD.detachFd(); - - codec.stop(); - codec.release(); - codec = null; - - return decodedSuccessfully; - } - - private static native void nativeOnChunkDecoded( - long nativeWebAudioMediaCodecBridge, ByteBuffer buf, int size, - int inputChannelCount, int outputChannelCount); - - private static native void nativeInitializeDestination( - long nativeWebAudioMediaCodecBridge, - int inputChannelCount, - int sampleRate, - long durationMicroseconds); -} diff --git a/media/base/android/media_jni_registrar.cc b/media/base/android/media_jni_registrar.cc index 375748e..e230784 100644 --- a/media/base/android/media_jni_registrar.cc +++ b/media/base/android/media_jni_registrar.cc @@ -15,7 +15,6 @@ #include "media/base/android/media_player_bridge.h" #include "media/base/android/media_player_listener.h" #include "media/base/android/sdk_media_codec_bridge.h" -#include "media/base/android/webaudio_media_codec_bridge.h" #include "media/capture/video/android/video_capture_device_android.h" #include "media/capture/video/android/video_capture_device_factory_android.h" @@ -33,8 +32,6 @@ static base::android::RegistrationMethod kMediaRegisteredMethods[] = { VideoCaptureDeviceAndroid::RegisterVideoCaptureDevice}, {"VideoCaptureDeviceFactory", VideoCaptureDeviceFactoryAndroid::RegisterVideoCaptureDeviceFactory}, - {"WebAudioMediaCodecBridge", - WebAudioMediaCodecBridge::RegisterWebAudioMediaCodecBridge}, }; bool RegisterJni(JNIEnv* env) { diff --git a/media/base/android/webaudio_media_codec_bridge.cc b/media/base/android/webaudio_media_codec_bridge.cc deleted file mode 100644 index c38ba75..0000000 --- a/media/base/android/webaudio_media_codec_bridge.cc +++ /dev/null @@ -1,202 +0,0 @@ -// Copyright (c) 2012 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "media/base/android/webaudio_media_codec_bridge.h" - -#include <errno.h> -#include <fcntl.h> -#include <stddef.h> -#include <string.h> -#include <sys/stat.h> -#include <sys/types.h> -#include <unistd.h> -#include <vector> - -#include "base/android/context_utils.h" -#include "base/android/jni_android.h" -#include "base/android/jni_array.h" -#include "base/android/jni_string.h" -#include "base/files/scoped_file.h" -#include "base/logging.h" -#include "base/posix/eintr_wrapper.h" -#include "jni/WebAudioMediaCodecBridge_jni.h" -#include "media/base/android/webaudio_media_codec_info.h" - -using base::android::AttachCurrentThread; - -namespace media { - -void WebAudioMediaCodecBridge::RunWebAudioMediaCodec( - base::SharedMemoryHandle encoded_audio_handle, - base::FileDescriptor pcm_output, - uint32_t data_size, - base::Closure on_decode_finished_cb) { - WebAudioMediaCodecBridge bridge( - encoded_audio_handle, pcm_output, data_size); - - bridge.DecodeInMemoryAudioFile(); - on_decode_finished_cb.Run(); -} - -WebAudioMediaCodecBridge::WebAudioMediaCodecBridge( - base::SharedMemoryHandle encoded_audio_handle, - base::FileDescriptor pcm_output, - uint32_t data_size) - : encoded_audio_handle_(encoded_audio_handle), - pcm_output_(pcm_output.fd), - data_size_(data_size) { - DVLOG(1) << "WebAudioMediaCodecBridge start **********************" - << " output fd = " << pcm_output.fd; -} - -WebAudioMediaCodecBridge::~WebAudioMediaCodecBridge() { - if (close(pcm_output_)) { - DVLOG(1) << "Couldn't close output fd " << pcm_output_ - << ": " << strerror(errno); - } -} - -int WebAudioMediaCodecBridge::SaveEncodedAudioToFile( - JNIEnv* env, - jobject context) { - // Create a temporary file where we can save the encoded audio data. - std::string temporaryFile = - base::android::ConvertJavaStringToUTF8( - env, - Java_WebAudioMediaCodecBridge_createTempFile(env, context).obj()); - - // Open the file and unlink it, so that it will be actually removed - // when we close the file. - base::ScopedFD fd(open(temporaryFile.c_str(), O_RDWR)); - if (unlink(temporaryFile.c_str())) { - VLOG(0) << "Couldn't unlink temp file " << temporaryFile - << ": " << strerror(errno); - } - - if (!fd.is_valid()) { - return -1; - } - - // Create a local mapping of the shared memory containing the - // encoded audio data, and save the contents to the temporary file. - base::SharedMemory encoded_data(encoded_audio_handle_, true); - - if (!encoded_data.Map(data_size_)) { - VLOG(0) << "Unable to map shared memory!"; - return -1; - } - - if (static_cast<uint32_t>(write(fd.get(), encoded_data.memory(), data_size_)) - != data_size_) { - VLOG(0) << "Failed to write all audio data to temp file!"; - return -1; - } - - lseek(fd.get(), 0, SEEK_SET); - - return fd.release(); -} - -bool WebAudioMediaCodecBridge::DecodeInMemoryAudioFile() { - JNIEnv* env = AttachCurrentThread(); - CHECK(env); - - jobject context = base::android::GetApplicationContext(); - - int sourceFd = SaveEncodedAudioToFile(env, context); - - if (sourceFd < 0) - return false; - - jboolean decoded = Java_WebAudioMediaCodecBridge_decodeAudioFile( - env, - context, - reinterpret_cast<intptr_t>(this), - sourceFd, - data_size_); - - close(sourceFd); - - DVLOG(1) << "decoded = " << (decoded ? "true" : "false"); - - return decoded; -} - -void WebAudioMediaCodecBridge::InitializeDestination( - JNIEnv* env, - const JavaParamRef<jobject>& /*java object*/, - jint channel_count, - jint sample_rate, - jlong duration_microsec) { - // Send information about this audio file: number of channels, - // sample rate (Hz), and the number of frames. - struct WebAudioMediaCodecInfo info = { - static_cast<unsigned long>(channel_count), - static_cast<unsigned long>(sample_rate), - // The number of frames is the duration of the file - // (in microseconds) times the sample rate. - static_cast<unsigned long>( - 0.5 + (duration_microsec * 0.000001 * - sample_rate)) - }; - - DVLOG(1) << "InitializeDestination:" - << " channel count = " << channel_count - << " rate = " << sample_rate - << " duration = " << duration_microsec << " microsec"; - - HANDLE_EINTR(write(pcm_output_, &info, sizeof(info))); -} - -void WebAudioMediaCodecBridge::OnChunkDecoded( - JNIEnv* env, - const JavaParamRef<jobject>& /*java object*/, - const JavaParamRef<jobject>& buf, - jint buf_size, - jint input_channel_count, - jint output_channel_count) { - if (buf_size <= 0 || !buf) - return; - - int8_t* buffer = - static_cast<int8_t*>(env->GetDirectBufferAddress(buf)); - size_t count = static_cast<size_t>(buf_size); - std::vector<int16_t> decoded_data; - - if (input_channel_count == 1 && output_channel_count == 2) { - // See crbug.com/266006. The file has one channel, but the - // decoder decided to return two channels. To be consistent with - // the number of channels in the file, only send one channel (the - // first). - int16_t* data = static_cast<int16_t*>(env->GetDirectBufferAddress(buf)); - int frame_count = buf_size / sizeof(*data) / 2; - - decoded_data.resize(frame_count); - for (int k = 0; k < frame_count; ++k) { - decoded_data[k] = *data; - data += 2; - } - buffer = reinterpret_cast<int8_t*>(decoded_data.data()); - DCHECK(buffer); - count = frame_count * sizeof(*data); - } - - // Write out the data to the pipe in small chunks if necessary. - while (count > 0) { - int bytes_to_write = (count >= PIPE_BUF) ? PIPE_BUF : count; - ssize_t bytes_written = HANDLE_EINTR(write(pcm_output_, - buffer, - bytes_to_write)); - if (bytes_written == -1) - break; - count -= bytes_written; - buffer += bytes_written; - } -} - -bool WebAudioMediaCodecBridge::RegisterWebAudioMediaCodecBridge(JNIEnv* env) { - return RegisterNativesImpl(env); -} - -} // namespace diff --git a/media/base/android/webaudio_media_codec_bridge.h b/media/base/android/webaudio_media_codec_bridge.h deleted file mode 100644 index cd40843..0000000 --- a/media/base/android/webaudio_media_codec_bridge.h +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright (c) 2013 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef MEDIA_BASE_ANDROID_WEBAUDIO_MEDIA_CODEC_BRIDGE_H_ -#define MEDIA_BASE_ANDROID_WEBAUDIO_MEDIA_CODEC_BRIDGE_H_ - -#include <jni.h> -#include <stdint.h> - -#include "base/android/scoped_java_ref.h" -#include "base/callback.h" -#include "base/file_descriptor_posix.h" -#include "base/macros.h" -#include "base/memory/shared_memory.h" -#include "media/base/media_export.h" - -namespace media { - -// This class serves as a bridge for native code to call Java -// functions in the Android MediaCodec class. See -// http://developer.android.com/reference/android/media/MediaCodec.html. -class MEDIA_EXPORT WebAudioMediaCodecBridge { - public: - // Create the bridge with the given file descriptors. We read from - // |encoded_audio_handle| to get the encoded audio data. Audio file - // information and decoded PCM samples are written to |pcm_output|. - // We also take ownership of |pcm_output|. - WebAudioMediaCodecBridge(base::SharedMemoryHandle encoded_audio_handle, - base::FileDescriptor pcm_output, - uint32_t data_size); - ~WebAudioMediaCodecBridge(); - - // Inform JNI about this bridge. Returns true if registration - // succeeded. - static bool RegisterWebAudioMediaCodecBridge(JNIEnv* env); - - // Start MediaCodec to process the encoded data in - // |encoded_audio_handle|. The PCM samples are sent to |pcm_output|. - static void RunWebAudioMediaCodec( - base::SharedMemoryHandle encoded_audio_handle, - base::FileDescriptor pcm_output, - uint32_t data_size, - base::Closure on_decode_finished_cb); - - void OnChunkDecoded( - JNIEnv* env, - const base::android::JavaParamRef<jobject>& /*java object*/, - const base::android::JavaParamRef<jobject>& buf, - jint buf_size, - jint input_channel_count, - jint output_channel_count); - - void InitializeDestination( - JNIEnv* env, - const base::android::JavaParamRef<jobject>& /*java object*/, - jint channel_count, - jint sample_rate, - jlong duration_us); - - private: - // Handles MediaCodec processing of the encoded data in - // |encoded_audio_handle_| and sends the pcm data to |pcm_output_|. - // Returns true if decoding was successful. - bool DecodeInMemoryAudioFile(); - - // Save encoded audio data to a temporary file and return the file - // descriptor to that file. -1 is returned if the audio data could - // not be saved for any reason. - int SaveEncodedAudioToFile(JNIEnv*, jobject); - - // The encoded audio data is read from this file descriptor for the - // shared memory that holds the encoded data. - base::SharedMemoryHandle encoded_audio_handle_; - - // The audio file information and decoded pcm data are written to - // this file descriptor. We take ownership of this descriptor. - int pcm_output_; - - // The length of the encoded data. - uint32_t data_size_; - - DISALLOW_COPY_AND_ASSIGN(WebAudioMediaCodecBridge); -}; - -} // namespace media -#endif // MEDIA_BASE_ANDROID_WEBAUDIO_MEDIA_CODEC_BRIDGE_H_ diff --git a/media/base/android/webaudio_media_codec_info.h b/media/base/android/webaudio_media_codec_info.h deleted file mode 100644 index 423af91..0000000 --- a/media/base/android/webaudio_media_codec_info.h +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2013 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef MEDIA_BASE_ANDROID_WEBAUDIO_MEDIA_CODEC_INFO_H_ -#define MEDIA_BASE_ANDROID_WEBAUDIO_MEDIA_CODEC_INFO_H_ - -namespace media { - -// This structure holds the information about the audio file -// determined by MediaCodec that is needed by the audio decoder to -// create the necessary destination bus. -struct WebAudioMediaCodecInfo { - unsigned long channel_count; - unsigned long sample_rate; - unsigned long number_of_frames; -}; - -} // namespace media -#endif // MEDIA_BASE_ANDROID_WEBAUDIO_MEDIA_CODEC_INFO_H_ |