summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
authorqinmin@chromium.org <qinmin@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-05-20 05:40:11 +0000
committerqinmin@chromium.org <qinmin@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-05-20 05:40:11 +0000
commitc1b61abe9cfb2fd8d4ea632ae7bb3b7461dd3c46 (patch)
tree0c19227d8ce4690ed747503e5f9213a3417e8e79 /media
parentf94283f099229c2fdf9960c50848d3ec9e36039e (diff)
downloadchromium_src-c1b61abe9cfb2fd8d4ea632ae7bb3b7461dd3c46.zip
chromium_src-c1b61abe9cfb2fd8d4ea632ae7bb3b7461dd3c46.tar.gz
chromium_src-c1b61abe9cfb2fd8d4ea632ae7bb3b7461dd3c46.tar.bz2
Add MSE implementation on android
Here are the changes included in this CL: 1. Adds a MediaSourcePlayer for android to play media source extensions. Only embedded mode are working currently. 2. Rewrite the MediaCodecBridge code in java. The current MediaCodecBridge code is not very suitable for exception handling. Also, this refactoring adds support for audio playback. BUG=233420 Review URL: https://chromiumcodereview.appspot.com/15113002 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@201031 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media')
-rw-r--r--media/base/android/java/src/org/chromium/media/MediaCodecBridge.java187
-rw-r--r--media/base/android/media_codec_bridge.cc150
-rw-r--r--media/base/android/media_codec_bridge.h28
-rw-r--r--media/base/android/media_codec_bridge_unittest.cc17
-rw-r--r--media/base/android/media_player_android.cc2
-rw-r--r--media/base/android/media_player_android.h7
-rw-r--r--media/base/android/media_player_bridge.cc20
-rw-r--r--media/base/android/media_player_bridge.h1
-rw-r--r--media/base/android/media_player_manager.h4
-rw-r--r--media/base/android/media_source_player.cc461
-rw-r--r--media/base/android/media_source_player.h222
-rw-r--r--media/media.gyp23
12 files changed, 958 insertions, 164 deletions
diff --git a/media/base/android/java/src/org/chromium/media/MediaCodecBridge.java b/media/base/android/java/src/org/chromium/media/MediaCodecBridge.java
new file mode 100644
index 0000000..9e7894c
--- /dev/null
+++ b/media/base/android/java/src/org/chromium/media/MediaCodecBridge.java
@@ -0,0 +1,187 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.media;
+
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+import android.media.MediaCodec;
+import android.media.MediaCrypto;
+import android.media.MediaFormat;
+import android.view.Surface;
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+
+import org.chromium.base.CalledByNative;
+import org.chromium.base.JNINamespace;
+
+/**
+ * A wrapper of the MediaCodec class to facilitate exception capturing and
+ * audio rendering.
+ */
+@JNINamespace("media")
+class MediaCodecBridge {
+
+ private static final String TAG = "MediaCodecBridge";
+
+ private ByteBuffer[] mInputBuffers;
+ private ByteBuffer[] mOutputBuffers;
+
+ private MediaCodec mMediaCodec;
+
+ private AudioTrack mAudioTrack;
+
+ private static class DequeueOutputResult {
+ private final int mIndex;
+ private final int mFlags;
+ private final int mOffset;
+ private final long mPresentationTimeMicroseconds;
+ private final int mNumBytes;
+
+ private DequeueOutputResult(
+ int index, int flags, int offset, long presentationTimeMicroseconds, int numBytes) {
+ mIndex = index;
+ mFlags = flags;
+ mOffset = offset;
+ mPresentationTimeMicroseconds = presentationTimeMicroseconds;
+ mNumBytes = numBytes;
+ }
+
+ @CalledByNative("DequeueOutputResult")
+ private int index() { return mIndex; }
+
+ @CalledByNative("DequeueOutputResult")
+ private int flags() { return mFlags; }
+
+ @CalledByNative("DequeueOutputResult")
+ private int offset() { return mOffset; }
+
+ @CalledByNative("DequeueOutputResult")
+ private long presentationTimeMicroseconds() { return mPresentationTimeMicroseconds; }
+
+ @CalledByNative("DequeueOutputResult")
+ private int numBytes() { return mNumBytes; }
+ }
+
+ private MediaCodecBridge(String mime) {
+ mMediaCodec = MediaCodec.createDecoderByType(mime);
+ }
+
+ @CalledByNative
+ private static MediaCodecBridge create(String mime) {
+ return new MediaCodecBridge(mime);
+ }
+
+ @CalledByNative
+ private void release() {
+ mMediaCodec.release();
+ if (mAudioTrack != null) {
+ mAudioTrack.release();
+ }
+ }
+
+ @CalledByNative
+ private void start() {
+ mMediaCodec.start();
+ mInputBuffers = mMediaCodec.getInputBuffers();
+ }
+
+ @CalledByNative
+ private int dequeueInputBuffer(long timeoutUs) {
+ return mMediaCodec.dequeueInputBuffer(timeoutUs);
+ }
+
+ @CalledByNative
+ private void flush() {
+ mMediaCodec.flush();
+ if (mAudioTrack != null) {
+ mAudioTrack.flush();
+ }
+ }
+
+ @CalledByNative
+ private void stop() {
+ mMediaCodec.stop();
+ if (mAudioTrack != null) {
+ mAudioTrack.pause();
+ }
+ }
+
+ @CalledByNative
+ private MediaFormat getOutputFormat() {
+ return mMediaCodec.getOutputFormat();
+ }
+
+ @CalledByNative
+ private ByteBuffer getInputBuffer(int index) {
+ return mInputBuffers[index];
+ }
+
+ @CalledByNative
+ private ByteBuffer getOutputBuffer(int index) {
+ return mOutputBuffers[index];
+ }
+
+ @CalledByNative
+ private void queueInputBuffer(
+ int index, int offset, int size, long presentationTimeUs, int flags) {
+ mMediaCodec.queueInputBuffer(index, offset, size, presentationTimeUs, flags);
+ }
+
+ @CalledByNative
+ private void releaseOutputBuffer(int index, boolean render) {
+ mMediaCodec.releaseOutputBuffer(index, render);
+ }
+
+ @CalledByNative
+ private void getOutputBuffers() {
+ mOutputBuffers = mMediaCodec.getOutputBuffers();
+ }
+
+ @CalledByNative
+ private DequeueOutputResult dequeueOutputBuffer(long timeoutUs) {
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ int index = mMediaCodec.dequeueOutputBuffer(info, timeoutUs);
+ return new DequeueOutputResult(
+ index, info.flags, info.offset, info.presentationTimeUs, info.size);
+ }
+
+ @CalledByNative
+ private void configureVideo(MediaFormat format, Surface surface, MediaCrypto crypto,
+ int flags) {
+ mMediaCodec.configure(format, surface, crypto, flags);
+ }
+
+ @CalledByNative
+ private void configureAudio(MediaFormat format, MediaCrypto crypto, int flags,
+ boolean playAudio) {
+ mMediaCodec.configure(format, null, crypto, flags);
+ if (playAudio) {
+ int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
+ int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
+ int channelConfig = (channelCount == 1) ? AudioFormat.CHANNEL_OUT_MONO :
+ AudioFormat.CHANNEL_OUT_STEREO;
+ int minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig,
+ AudioFormat.ENCODING_PCM_16BIT);
+ mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig,
+ AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_STREAM);
+ }
+ }
+
+ @CalledByNative
+ private void playOutputBuffer(byte[] buf) {
+ if (mAudioTrack != null) {
+ if (AudioTrack.PLAYSTATE_PLAYING != mAudioTrack.getPlayState()) {
+ mAudioTrack.play();
+ }
+ int size = mAudioTrack.write(buf, 0, buf.length);
+ if (buf.length != size) {
+ Log.i(TAG, "Failed to send all data to audio output, expected size: " +
+ buf.length + ", actual size: " + size);
+ }
+ }
+ }
+}
diff --git a/media/base/android/media_codec_bridge.cc b/media/base/android/media_codec_bridge.cc
index bd4c274..37949f4 100644
--- a/media/base/android/media_codec_bridge.cc
+++ b/media/base/android/media_codec_bridge.cc
@@ -13,70 +13,24 @@
#include "base/basictypes.h"
#include "base/lazy_instance.h"
#include "base/logging.h"
+#include "base/safe_numerics.h"
#include "base/stringprintf.h"
-#include "jni/MediaCodec_jni.h"
+#include "jni/MediaCodecBridge_jni.h"
#include "jni/MediaFormat_jni.h"
-
using base::android::AttachCurrentThread;
using base::android::ConvertUTF8ToJavaString;
using base::android::ScopedJavaLocalRef;
namespace {
-static jclass g_MediaCodecBufferInfo_clazz = NULL;
-
-static const char kMediaCodecBufferInfoClassPath[] =
- "android/media/MediaCodec$BufferInfo";
-
-static bool MediaCodecBufferInfo_RegisterNativesImpl(JNIEnv* env) {
- g_MediaCodecBufferInfo_clazz = reinterpret_cast<jclass>(env->NewGlobalRef(
- base::android::GetClass(env, kMediaCodecBufferInfoClassPath).obj()));
- base::android::CheckException(env);
- return true;
-}
-
-static void GetBufferInfo(JNIEnv* env, jobject buffer_info, int* offset,
- int* size, int64* presentation_time, int* flags) {
- static jfieldID offset_id =
- env->GetFieldID(g_MediaCodecBufferInfo_clazz, "offset", "I");
- static jfieldID size_id =
- env->GetFieldID(g_MediaCodecBufferInfo_clazz, "size", "I");
- static jfieldID presentation_time_id =
- env->GetFieldID(g_MediaCodecBufferInfo_clazz, "presentationTimeUs", "J");
- static jfieldID flags_id =
- env->GetFieldID(g_MediaCodecBufferInfo_clazz, "flags", "I");
-
- *offset = env->GetIntField(buffer_info, offset_id);
- *size = env->GetIntField(buffer_info, size_id);
- *presentation_time = env->GetLongField(buffer_info, presentation_time_id);
- *flags = env->GetIntField(buffer_info, flags_id);
-}
-
-static base::subtle::AtomicWord g_MediaCodecBufferInfo_Constructor = 0;
-static ScopedJavaLocalRef<jobject> Java_MediaCodecBufferInfo_Constructor(
- JNIEnv* env) {
- /* Must call RegisterNativesImpl() */
- DCHECK(g_MediaCodecBufferInfo_clazz);
- jmethodID method_id =
- base::android::MethodID::LazyGet<base::android::MethodID::TYPE_INSTANCE>(
- env, g_MediaCodecBufferInfo_clazz,
- "<init>", "()V",
- &g_MediaCodecBufferInfo_Constructor);
-
- jobject ret = env->NewObject(g_MediaCodecBufferInfo_clazz, method_id);
- base::android::CheckException(env);
- return ScopedJavaLocalRef<jobject>(env, ret);
-}
-
class MediaCodecNativeRegisterer {
public:
MediaCodecNativeRegisterer() {
JNIEnv* env = AttachCurrentThread();
jni_initialized_ =
- MediaCodecBufferInfo_RegisterNativesImpl(env) &&
- JNI_MediaCodec::RegisterNativesImpl(env) &&
+ media::RegisterNativesImpl(env) &&
JNI_MediaFormat::RegisterNativesImpl(env);
}
@@ -141,44 +95,37 @@ MediaCodecBridge::MediaCodecBridge(const char* mime) {
CHECK(g_native_registerer.Pointer()->IsRegistered());
DCHECK(mime);
- ScopedJavaLocalRef<jstring> j_type =
- ConvertUTF8ToJavaString(env, mime);
-
- ScopedJavaLocalRef<jobject> tmp(
- JNI_MediaCodec::Java_MediaCodec_createDecoderByType(env, j_type.obj()));
- DCHECK(!tmp.is_null());
- j_media_codec_.Reset(tmp);
+ ScopedJavaLocalRef<jstring> j_type = ConvertUTF8ToJavaString(env, mime);
+ j_media_codec_.Reset(Java_MediaCodecBridge_create(
+ env, j_type.obj()));
}
MediaCodecBridge::~MediaCodecBridge() {
JNIEnv* env = AttachCurrentThread();
CHECK(env);
-
- JNI_MediaCodec::Java_MediaCodec_release(env, j_media_codec_.obj());
+ Java_MediaCodecBridge_release(env, j_media_codec_.obj());
}
void MediaCodecBridge::StartInternal() {
JNIEnv* env = AttachCurrentThread();
- JNI_MediaCodec::Java_MediaCodec_start(env, j_media_codec_.obj());
- GetInputBuffers();
+ Java_MediaCodecBridge_start(env, j_media_codec_.obj());
}
void MediaCodecBridge::Reset() {
JNIEnv* env = AttachCurrentThread();
- JNI_MediaCodec::Java_MediaCodec_flush(env, j_media_codec_.obj());
+ Java_MediaCodecBridge_flush(env, j_media_codec_.obj());
}
void MediaCodecBridge::Stop() {
JNIEnv* env = AttachCurrentThread();
- JNI_MediaCodec::Java_MediaCodec_stop(env, j_media_codec_.obj());
+ Java_MediaCodecBridge_stop(env, j_media_codec_.obj());
}
void MediaCodecBridge::GetOutputFormat(int* width, int* height) {
JNIEnv* env = AttachCurrentThread();
ScopedJavaLocalRef<jobject> j_format(
- JNI_MediaCodec::Java_MediaCodec_getOutputFormat(
- env, j_media_codec_.obj()));
+ Java_MediaCodecBridge_getOutputFormat(env, j_media_codec_.obj()));
if (!j_format.is_null()) {
ScopedJavaLocalRef<jstring> j_key_width =
ConvertUTF8ToJavaString(env, "width");
@@ -198,17 +145,17 @@ size_t MediaCodecBridge::QueueInputBuffer(
JNIEnv* env = AttachCurrentThread();
ScopedJavaLocalRef<jobject> j_buffer(
- env, env->GetObjectArrayElement(j_input_buffers_.obj(), index));
+ Java_MediaCodecBridge_getInputBuffer(env, j_media_codec_.obj(), index));
uint8* direct_buffer =
static_cast<uint8*>(env->GetDirectBufferAddress(j_buffer.obj()));
int64 buffer_capacity = env->GetDirectBufferCapacity(j_buffer.obj());
size_t size_to_copy = (buffer_capacity < size) ? buffer_capacity : size;
+
if (size_to_copy > 0)
memcpy(direct_buffer, data, size_to_copy);
-
- JNI_MediaCodec::Java_MediaCodec_queueInputBuffer(
+ Java_MediaCodecBridge_queueInputBuffer(
env, j_media_codec_.obj(),
index, 0, size_to_copy, presentation_time.InMicroseconds(), 0);
return size_to_copy;
@@ -216,32 +163,36 @@ size_t MediaCodecBridge::QueueInputBuffer(
void MediaCodecBridge::QueueEOS(int input_buffer_index) {
JNIEnv* env = AttachCurrentThread();
- JNI_MediaCodec::Java_MediaCodec_queueInputBuffer(
+ Java_MediaCodecBridge_queueInputBuffer(
env, j_media_codec_.obj(),
input_buffer_index, 0, 0, 0, kBufferFlagEndOfStream);
}
int MediaCodecBridge::DequeueInputBuffer(base::TimeDelta timeout) {
JNIEnv* env = AttachCurrentThread();
- return JNI_MediaCodec::Java_MediaCodec_dequeueInputBuffer(
+ return Java_MediaCodecBridge_dequeueInputBuffer(
env, j_media_codec_.obj(), timeout.InMicroseconds());
}
int MediaCodecBridge::DequeueOutputBuffer(
- base::TimeDelta timeout, int* offset, int* size,
+ base::TimeDelta timeout, size_t* offset, size_t* size,
base::TimeDelta* presentation_time, bool* end_of_stream) {
JNIEnv* env = AttachCurrentThread();
- ScopedJavaLocalRef<jobject> j_info(
- Java_MediaCodecBufferInfo_Constructor(env));
- jint j_buffer = JNI_MediaCodec::Java_MediaCodec_dequeueOutputBuffer(
- env, j_media_codec_.obj(), j_info.obj(), timeout.InMicroseconds());
+ ScopedJavaLocalRef<jobject> result =
+ Java_MediaCodecBridge_dequeueOutputBuffer(env, j_media_codec_.obj(),
+ timeout.InMicroseconds());
+ int j_buffer = Java_DequeueOutputResult_index(env, result.obj());
if (j_buffer >= 0) {
- int64 presentation_time_us;
- int flags;
- GetBufferInfo(
- env, j_info.obj(), offset, size, &presentation_time_us, &flags);
+ int64 presentation_time_us =
+ Java_DequeueOutputResult_presentationTimeMicroseconds(
+ env, result.obj());
+ int flags = Java_DequeueOutputResult_flags(env, result.obj());
+ *offset = base::checked_numeric_cast<size_t>(
+ Java_DequeueOutputResult_offset(env, result.obj()));
+ *size = base::checked_numeric_cast<size_t>(
+ Java_DequeueOutputResult_numBytes(env, result.obj()));
*presentation_time =
base::TimeDelta::FromMicroseconds(presentation_time_us);
*end_of_stream = flags & kBufferFlagEndOfStream;
@@ -253,28 +204,13 @@ void MediaCodecBridge::ReleaseOutputBuffer(int index, bool render) {
JNIEnv* env = AttachCurrentThread();
CHECK(env);
- JNI_MediaCodec::Java_MediaCodec_releaseOutputBuffer(
+ Java_MediaCodecBridge_releaseOutputBuffer(
env, j_media_codec_.obj(), index, render);
}
-int MediaCodecBridge::GetInputBuffers() {
- JNIEnv* env = AttachCurrentThread();
-
- j_input_buffers_.Reset(
- JNI_MediaCodec::Java_MediaCodec_getInputBuffers(
- env, j_media_codec_.obj()));
-
- return env->GetArrayLength(j_input_buffers_.obj());
-}
-
-int MediaCodecBridge::GetOutputBuffers() {
+void MediaCodecBridge::GetOutputBuffers() {
JNIEnv* env = AttachCurrentThread();
-
- j_output_buffers_.Reset(
- JNI_MediaCodec::Java_MediaCodec_getOutputBuffers(
- env, j_media_codec_.obj()));
-
- return env->GetArrayLength(j_output_buffers_.obj());
+ Java_MediaCodecBridge_getOutputBuffers(env, j_media_codec_.obj());
}
AudioCodecBridge::AudioCodecBridge(const AudioCodec codec)
@@ -283,7 +219,7 @@ AudioCodecBridge::AudioCodecBridge(const AudioCodec codec)
bool AudioCodecBridge::Start(
const AudioCodec codec, int sample_rate, int channel_count,
- const uint8* extra_data, size_t extra_data_size) {
+ const uint8* extra_data, size_t extra_data_size, bool play_audio) {
JNIEnv* env = AttachCurrentThread();
DCHECK(AudioCodecToMimeType(codec));
@@ -344,12 +280,26 @@ bool AudioCodecBridge::Start(
env->DeleteLocalRef(identification_header);
}
- JNI_MediaCodec::Java_MediaCodec_configure(
- env, media_codec(), j_format.obj(), NULL, NULL, 0);
+ Java_MediaCodecBridge_configureAudio(
+ env, media_codec(), j_format.obj(), NULL, 0, play_audio);
StartInternal();
return true;
}
+void AudioCodecBridge::PlayOutputBuffer(int index, size_t size) {
+ DCHECK_LE(0, index);
+ int numBytes = base::checked_numeric_cast<int>(size);
+ JNIEnv* env = AttachCurrentThread();
+ ScopedJavaLocalRef<jobject> buf =
+ Java_MediaCodecBridge_getOutputBuffer(env, media_codec(), index);
+ uint8* buffer = static_cast<uint8*>(env->GetDirectBufferAddress(buf.obj()));
+
+ ScopedJavaLocalRef<jbyteArray> byte_array =
+ base::android::ToJavaByteArray(env, buffer, numBytes);
+ Java_MediaCodecBridge_playOutputBuffer(
+ env, media_codec(), byte_array.obj());
+}
+
VideoCodecBridge::VideoCodecBridge(const VideoCodec codec)
: MediaCodecBridge(VideoCodecToMimeType(codec)) {
}
@@ -365,7 +315,7 @@ bool VideoCodecBridge::Start(
JNI_MediaFormat::Java_MediaFormat_createVideoFormat(
env, j_mime.obj(), size.width(), size.height()));
DCHECK(!j_format.is_null());
- JNI_MediaCodec::Java_MediaCodec_configure(
+ Java_MediaCodecBridge_configureVideo(
env, media_codec(), j_format.obj(), surface, NULL, 0);
StartInternal();
return true;
diff --git a/media/base/android/media_codec_bridge.h b/media/base/android/media_codec_bridge.h
index b1f779e..bff24f9 100644
--- a/media/base/android/media_codec_bridge.h
+++ b/media/base/android/media_codec_bridge.h
@@ -22,7 +22,7 @@ namespace media {
// Note: MediaCodec is only available on JB and greater.
// Use AudioCodecBridge or VideoCodecBridge to create an instance of this
// object.
-class MediaCodecBridge {
+class MEDIA_EXPORT MediaCodecBridge {
public:
enum DequeueBufferInfo {
INFO_OUTPUT_BUFFERS_CHANGED = -3,
@@ -74,16 +74,16 @@ class MediaCodecBridge {
// or one of DequeueBufferInfo above.
// Use kTimeOutInfinity for infinite timeout.
int DequeueOutputBuffer(
- base::TimeDelta timeout, int* offset, int* size,
+ base::TimeDelta timeout, size_t* offset, size_t* size,
base::TimeDelta* presentation_time, bool* end_of_stream);
// Returns the buffer to the codec. If you previously specified a surface
// when configuring this video decoder you can optionally render the buffer.
void ReleaseOutputBuffer(int index, bool render);
- // Gets output buffers from media codec and keeps them inside this class.
- // To access them, use DequeueOutputBuffer() and GetFromOutputBuffer().
- int GetOutputBuffers();
+ // Gets output buffers from media codec and keeps them inside the java class.
+ // To access them, use DequeueOutputBuffer().
+ void GetOutputBuffers();
protected:
explicit MediaCodecBridge(const char* mime);
@@ -95,20 +95,9 @@ class MediaCodecBridge {
jobject media_codec() { return j_media_codec_.obj(); }
private:
- // Gets input buffers from media codec and keeps them inside this class.
- // To access them, use DequeueInputBuffer(), PutToInputBuffer() and
- // QueueInputBuffer().
- int GetInputBuffers();
-
// Java MediaCodec instance.
base::android::ScopedJavaGlobalRef<jobject> j_media_codec_;
- // Input buffers used for *InputBuffer() methods.
- base::android::ScopedJavaGlobalRef<jobjectArray> j_input_buffers_;
-
- // Output buffers used for *InputBuffer() methods.
- base::android::ScopedJavaGlobalRef<jobjectArray> j_output_buffers_;
-
DISALLOW_COPY_AND_ASSIGN(MediaCodecBridge);
};
@@ -118,7 +107,12 @@ class AudioCodecBridge : public MediaCodecBridge {
// Start the audio codec bridge.
bool Start(const AudioCodec codec, int sample_rate, int channel_count,
- const uint8* extra_data, size_t extra_data_size);
+ const uint8* extra_data, size_t extra_data_size,
+ bool play_audio);
+
+ // Play the output buffer. This call must be called after
+ // DequeueOutputBuffer() and before ReleaseOutputBuffer.
+ void PlayOutputBuffer(int index, size_t size);
};
class VideoCodecBridge : public MediaCodecBridge {
diff --git a/media/base/android/media_codec_bridge_unittest.cc b/media/base/android/media_codec_bridge_unittest.cc
index acf421e..ea95c52 100644
--- a/media/base/android/media_codec_bridge_unittest.cc
+++ b/media/base/android/media_codec_bridge_unittest.cc
@@ -107,9 +107,7 @@ TEST(MediaCodecBridgeTest, DoNormal) {
scoped_ptr<media::AudioCodecBridge> media_codec;
media_codec.reset(new AudioCodecBridge(kCodecMP3));
- media_codec->Start(kCodecMP3, 44100, 2, NULL, 0);
-
- ASSERT_GT(media_codec->GetOutputBuffers(), 0);
+ media_codec->Start(kCodecMP3, 44100, 2, NULL, 0, false);
int input_buf_index = media_codec->DequeueInputBuffer(
MediaCodecBridge::kTimeOutInfinity);
@@ -133,8 +131,8 @@ TEST(MediaCodecBridgeTest, DoNormal) {
input_pts = kPresentationTimeBase;
bool eos = false;
while (!eos) {
- int unused_offset = 0;
- int size = 0;
+ size_t unused_offset = 0;
+ size_t size = 0;
base::TimeDelta timestamp;
int output_buf_index = media_codec->DequeueOutputBuffer(
MediaCodecBridge::kTimeOutInfinity,
@@ -151,7 +149,7 @@ TEST(MediaCodecBridgeTest, DoNormal) {
media_codec->GetOutputBuffers();
continue;
}
- EXPECT_LE(1, size);
+ EXPECT_LE(1u, size);
if (!eos)
EXPECT_EQ(++input_pts, timestamp.InMicroseconds());
ASSERT_LE(input_pts, kPresentationTimeBase + 2);
@@ -169,12 +167,13 @@ TEST(MediaCodecBridgeTest, InvalidVorbisHeader) {
// The first byte of the header is not 0x02.
uint8 invalid_first_byte[] = { 0x00, 0xff, 0xff, 0xff, 0xff };
EXPECT_FALSE(media_codec->Start(
- kCodecVorbis, 44100, 2, invalid_first_byte, sizeof(invalid_first_byte)));
+ kCodecVorbis, 44100, 2, invalid_first_byte, sizeof(invalid_first_byte),
+ false));
// Size of the header does not match with the data we passed in.
uint8 invalid_size[] = { 0x02, 0x01, 0xff, 0x01, 0xff };
EXPECT_FALSE(media_codec->Start(
- kCodecVorbis, 44100, 2, invalid_size, sizeof(invalid_size)));
+ kCodecVorbis, 44100, 2, invalid_size, sizeof(invalid_size), false));
// Size of the header is too large.
size_t large_size = 8 * 1024 * 1024 + 2;
@@ -184,7 +183,7 @@ TEST(MediaCodecBridgeTest, InvalidVorbisHeader) {
very_large_header[i] = 0xff;
very_large_header[large_size - 1] = 0xfe;
EXPECT_FALSE(media_codec->Start(
- kCodecVorbis, 44100, 2, very_large_header, 0x80000000));
+ kCodecVorbis, 44100, 2, very_large_header, 0x80000000, false));
delete[] very_large_header;
}
diff --git a/media/base/android/media_player_android.cc b/media/base/android/media_player_android.cc
index e45c16f..f5f61c0 100644
--- a/media/base/android/media_player_android.cc
+++ b/media/base/android/media_player_android.cc
@@ -62,7 +62,6 @@ void MediaPlayerAndroid::ReleaseMediaResourcesFromManager() {
manager_->ReleaseMediaResources(this);
}
-#if defined(GOOGLE_TV)
void MediaPlayerAndroid::DemuxerReady(
const MediaPlayerHostMsg_DemuxerReady_Params& params) {
NOTREACHED() << "Unexpected ipc received";
@@ -72,6 +71,5 @@ void MediaPlayerAndroid::ReadFromDemuxerAck(
const MediaPlayerHostMsg_ReadFromDemuxerAck_Params& params) {
NOTREACHED() << "Unexpected ipc received";
}
-#endif
} // namespace media
diff --git a/media/base/android/media_player_android.h b/media/base/android/media_player_android.h
index be21581..685797a 100644
--- a/media/base/android/media_player_android.h
+++ b/media/base/android/media_player_android.h
@@ -11,11 +11,8 @@
#include "base/callback.h"
#include "base/time.h"
#include "googleurl/src/gurl.h"
-#include "media/base/media_export.h"
-
-#if defined(GOOGLE_TV)
#include "media/base/android/demuxer_stream_player_params.h"
-#endif
+#include "media/base/media_export.h"
namespace media {
@@ -79,7 +76,6 @@ class MEDIA_EXPORT MediaPlayerAndroid {
virtual bool CanSeekForward() = 0;
virtual bool CanSeekBackward() = 0;
-#if defined(GOOGLE_TV)
// Methods for DeumxerStreamPlayer.
// Informs DemuxerStreamPlayer that the demuxer is ready.
virtual void DemuxerReady(
@@ -87,7 +83,6 @@ class MEDIA_EXPORT MediaPlayerAndroid {
// Called when the requested data is received from the demuxer.
virtual void ReadFromDemuxerAck(
const MediaPlayerHostMsg_ReadFromDemuxerAck_Params& params);
-#endif
int player_id() { return player_id_; }
diff --git a/media/base/android/media_player_bridge.cc b/media/base/android/media_player_bridge.cc
index b87e4dc..6268ded 100644
--- a/media/base/android/media_player_bridge.cc
+++ b/media/base/android/media_player_bridge.cc
@@ -13,6 +13,7 @@
#include "jni/MediaPlayer_jni.h"
#include "media/base/android/media_player_manager.h"
#include "media/base/android/media_resource_getter.h"
+#include "media/base/android/media_source_player.h"
using base::android::ConvertUTF8ToJavaString;
using base::android::ScopedJavaLocalRef;
@@ -38,13 +39,18 @@ MediaPlayerAndroid* MediaPlayerAndroid::Create(
const GURL& first_party_for_cookies,
bool hide_url_log,
MediaPlayerManager* manager) {
- LOG_IF(WARNING, is_media_source) << "MSE is not supported";
- return new MediaPlayerBridge(
- player_id,
- url,
- first_party_for_cookies,
- hide_url_log,
- manager);
+ if (!is_media_source) {
+ return new MediaPlayerBridge(
+ player_id,
+ url,
+ first_party_for_cookies,
+ hide_url_log,
+ manager);
+ } else {
+ return new MediaSourcePlayer(
+ player_id,
+ manager);
+ }
}
#endif
diff --git a/media/base/android/media_player_bridge.h b/media/base/android/media_player_bridge.h
index 84b769d..476a6c9 100644
--- a/media/base/android/media_player_bridge.h
+++ b/media/base/android/media_player_bridge.h
@@ -18,7 +18,6 @@
#include "googleurl/src/gurl.h"
#include "media/base/android/media_player_android.h"
#include "media/base/android/media_player_listener.h"
-#include "media/base/media_export.h"
namespace media {
diff --git a/media/base/android/media_player_manager.h b/media/base/android/media_player_manager.h
index a0b764d..bef25e5 100644
--- a/media/base/android/media_player_manager.h
+++ b/media/base/android/media_player_manager.h
@@ -7,9 +7,7 @@
#include "base/time.h"
#include "media/base/media_export.h"
-#if defined(GOOGLE_TV)
#include "media/base/android/demuxer_stream_player_params.h"
-#endif
namespace media {
@@ -67,11 +65,9 @@ class MEDIA_EXPORT MediaPlayerManager {
// Called when video size has changed. Args: player ID, width, height.
virtual void OnVideoSizeChanged(int player_id, int width, int height) = 0;
-#if defined(GOOGLE_TV)
// Callback when DemuxerStreamPlayer wants to read data from the demuxer.
virtual void OnReadFromDemuxer(
int player_id, media::DemuxerStream::Type type, bool seek_done) = 0;
-#endif
};
} // namespace media
diff --git a/media/base/android/media_source_player.cc b/media/base/android/media_source_player.cc
new file mode 100644
index 0000000..b29d182
--- /dev/null
+++ b/media/base/android/media_source_player.cc
@@ -0,0 +1,461 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/android/media_source_player.h"
+
+#include "base/android/jni_android.h"
+#include "base/android/jni_string.h"
+#include "base/basictypes.h"
+#include "base/bind.h"
+#include "base/logging.h"
+#include "base/message_loop.h"
+#include "base/threading/thread.h"
+#include "media/base/android/media_codec_bridge.h"
+#include "media/base/android/media_player_manager.h"
+
+namespace {
+// Timeout value for media codec operations.
+const int kMediaCodecTimeoutInMicroseconds = 5000;
+}
+
+namespace media {
+
+MediaDecoderJob::MediaDecoderJob(
+ bool is_audio, const scoped_refptr<base::MessageLoopProxy>& message_loop)
+ : message_loop_(message_loop),
+ needs_flush_(false),
+ is_audio_(is_audio),
+ weak_this_(this) {
+}
+
+MediaDecoderJob::~MediaDecoderJob() {}
+
+// Class for managing audio decoding jobs.
+class AudioDecoderJob : public MediaDecoderJob {
+ public:
+ AudioDecoderJob(
+ const scoped_refptr<base::MessageLoopProxy>& message_loop,
+ const AudioCodec audio_codec, int sample_rate, int channel_count,
+ const uint8* extra_data, size_t extra_data_size);
+ virtual ~AudioDecoderJob() {}
+};
+
+// Class for managing video decoding jobs.
+class VideoDecoderJob : public MediaDecoderJob {
+ public:
+ VideoDecoderJob(
+ const scoped_refptr<base::MessageLoopProxy>& message_loop,
+ const VideoCodec video_codec, const gfx::Size& size, jobject surface);
+ virtual ~VideoDecoderJob() {}
+
+ void Configure(
+ const VideoCodec codec, const gfx::Size& size, jobject surface);
+};
+
+void MediaDecoderJob::Decode(
+ const MediaPlayerHostMsg_ReadFromDemuxerAck_Params::AccessUnit& unit,
+ const base::Time& start_wallclock_time,
+ const base::TimeDelta& start_presentation_timestamp,
+ const MediaDecoderJob::DecoderCallback& callback) {
+ if (!thread_->IsRunning())
+ thread_->Start();
+ thread_->message_loop()->PostTask(FROM_HERE, base::Bind(
+ &MediaDecoderJob::DecodeInternal, base::Unretained(this), unit,
+ start_wallclock_time, start_presentation_timestamp, needs_flush_,
+ callback));
+ needs_flush_ = false;
+}
+
+void MediaDecoderJob::DecodeInternal(
+ const MediaPlayerHostMsg_ReadFromDemuxerAck_Params::AccessUnit& unit,
+ const base::Time& start_wallclock_time,
+ const base::TimeDelta& start_presentation_timestamp,
+ bool needs_flush,
+ const MediaDecoderJob::DecoderCallback& callback) {
+ if (needs_flush)
+ media_codec_bridge_->Reset();
+ base::TimeDelta timeout = base::TimeDelta::FromMicroseconds(
+ kMediaCodecTimeoutInMicroseconds);
+ int input_buf_index = media_codec_bridge_->DequeueInputBuffer(timeout);
+ // TODO(qinmin): skip frames if video is falling far behind.
+ if (input_buf_index >= 0) {
+ if (unit.end_of_stream) {
+ media_codec_bridge_->QueueEOS(input_buf_index);
+ } else {
+ media_codec_bridge_->QueueInputBuffer(
+ input_buf_index, &unit.data[0], unit.data.size(), unit.timestamp);
+ }
+ }
+ size_t offset = 0;
+ size_t size = 0;
+ base::TimeDelta presentation_timestamp;
+ bool end_of_stream = false;
+
+ int outputBufferIndex = media_codec_bridge_->DequeueOutputBuffer(
+ timeout, &offset, &size, &presentation_timestamp, &end_of_stream);
+ switch (outputBufferIndex) {
+ case MediaCodecBridge::INFO_OUTPUT_BUFFERS_CHANGED:
+ media_codec_bridge_->GetOutputBuffers();
+ break;
+ case MediaCodecBridge::INFO_OUTPUT_FORMAT_CHANGED:
+ // TODO(qinmin): figure out what we should do if format changes.
+ break;
+ case MediaCodecBridge::INFO_TRY_AGAIN_LATER:
+ break;
+ default:
+ DCHECK_LE(0, outputBufferIndex);
+ if (size == 0 && end_of_stream)
+ break;
+ base::TimeDelta time_to_render;
+ if (!start_wallclock_time.is_null()) {
+ time_to_render = presentation_timestamp - (base::Time::Now() -
+ start_wallclock_time + start_presentation_timestamp);
+ }
+ if (time_to_render >= base::TimeDelta()) {
+ MessageLoop::current()->PostDelayedTask(
+ FROM_HERE,
+ base::Bind(&MediaDecoderJob::ReleaseOutputBuffer,
+ weak_this_.GetWeakPtr(), outputBufferIndex, size,
+ presentation_timestamp, end_of_stream, callback),
+ time_to_render);
+ } else {
+ // TODO(qinmin): The codec is lagging behind, need to recalculate the
+ // |start_presentation_timestamp_| and |start_wallclock_time_|.
+ DVLOG(1) << (is_audio_ ? "audio " : "video ")
+ << "codec is lagging behind :" << time_to_render.InMicroseconds();
+ ReleaseOutputBuffer(outputBufferIndex, size, presentation_timestamp,
+ end_of_stream, callback);
+ }
+ return;
+ }
+ message_loop_->PostTask(FROM_HERE, base::Bind(
+ callback, start_presentation_timestamp, start_wallclock_time,
+ end_of_stream));
+}
+
+void MediaDecoderJob::ReleaseOutputBuffer(
+ int outputBufferIndex, size_t size,
+ const base::TimeDelta& presentation_timestamp,
+ bool end_of_stream, const MediaDecoderJob::DecoderCallback& callback) {
+ // TODO(qinmin): Refactor this function. Maybe AudioDecoderJob should provide
+ // its own ReleaseOutputBuffer().
+ if (is_audio_) {
+ static_cast<AudioCodecBridge*>(media_codec_bridge_.get())->PlayOutputBuffer(
+ outputBufferIndex, size);
+ }
+ media_codec_bridge_->ReleaseOutputBuffer(outputBufferIndex, !is_audio_);
+ message_loop_->PostTask(FROM_HERE, base::Bind(
+ callback, presentation_timestamp, base::Time::Now(), end_of_stream));
+}
+
+void MediaDecoderJob::Flush() {
+ // Do nothing, flush when the next Decode() happens.
+ needs_flush_ = true;
+}
+
+void MediaDecoderJob::Release() {
+ if (thread_->IsRunning() &&
+ thread_->message_loop() != base::MessageLoop::current()) {
+ thread_->message_loop()->DeleteSoon(FROM_HERE, this);
+ } else {
+ delete this;
+ }
+}
+
+VideoDecoderJob::VideoDecoderJob(
+ const scoped_refptr<base::MessageLoopProxy>& message_loop,
+ const VideoCodec video_codec, const gfx::Size& size, jobject surface)
+ : MediaDecoderJob(false, message_loop) {
+ scoped_ptr<VideoCodecBridge> codec(new VideoCodecBridge(video_codec));
+ codec->Start(video_codec, size, surface);
+ media_codec_bridge_.reset(codec.release());
+ thread_.reset(new base::Thread("MediaSource_VideoDecoderThread"));
+}
+
+AudioDecoderJob::AudioDecoderJob(
+ const scoped_refptr<base::MessageLoopProxy>& message_loop,
+ const AudioCodec audio_codec,
+ int sample_rate,
+ int channel_count,
+ const uint8* extra_data,
+ size_t extra_data_size)
+ : MediaDecoderJob(true, message_loop) {
+ scoped_ptr<AudioCodecBridge> codec(new AudioCodecBridge(audio_codec));
+ codec->Start(audio_codec, sample_rate, channel_count, extra_data,
+ extra_data_size, true);
+ media_codec_bridge_.reset(codec.release());
+ thread_.reset(new base::Thread("MediaSource_AudioDecoderThread"));
+}
+
+MediaSourcePlayer::MediaSourcePlayer(
+ int player_id,
+ MediaPlayerManager* manager)
+ : MediaPlayerAndroid(player_id, manager),
+ pending_play_(false),
+ width_(0),
+ height_(0),
+ audio_codec_(kUnknownAudioCodec),
+ video_codec_(kUnknownVideoCodec),
+ num_channels_(0),
+ sampling_rate_(0),
+ seekable_(true),
+ audio_finished_(true),
+ video_finished_(true),
+ playing_(false),
+ audio_access_unit_index_(0),
+ video_access_unit_index_(0),
+ waiting_for_audio_data_(false),
+ waiting_for_video_data_(false),
+ weak_this_(this) {
+ OnMediaMetadataChanged(duration_, width_, height_, false);
+}
+
+MediaSourcePlayer::~MediaSourcePlayer() {
+ Release();
+}
+
+void MediaSourcePlayer::SetVideoSurface(jobject surface) {
+ video_decoder_job_.reset();
+ if (!surface) {
+ return;
+ }
+
+ video_decoder_job_.reset(new VideoDecoderJob(
+ base::MessageLoopProxy::current(), video_codec_,
+ gfx::Size(width_, height_), surface));
+
+ if (pending_play_)
+ StartInternal();
+
+ // Inform the fullscreen view the player is ready.
+ // TODO(qinmin): refactor MediaPlayerBridge so that we have a better way
+ // to inform ContentVideoView.
+ OnMediaMetadataChanged(duration_, width_, height_, true);
+}
+
+void MediaSourcePlayer::Start() {
+ if (HasAudio() && !audio_decoder_job_) {
+ audio_decoder_job_.reset(new AudioDecoderJob(
+ base::MessageLoopProxy::current(), audio_codec_, sampling_rate_,
+ num_channels_, &audio_extra_data_[0], audio_extra_data_.size()));
+ }
+
+ if (HasVideo() && !video_decoder_job_) {
+ // StartInternal() will be delayed until SetVideoSurface() gets called.
+ pending_play_ = true;
+ return;
+ }
+
+ StartInternal();
+}
+
+void MediaSourcePlayer::Pause() {
+ pending_play_ = false;
+ playing_ = false;
+ start_wallclock_time_ = base::Time();
+}
+
+bool MediaSourcePlayer::IsPlaying() {
+ return pending_play_ || playing_;
+}
+
+int MediaSourcePlayer::GetVideoWidth() {
+ return width_;
+}
+
+int MediaSourcePlayer::GetVideoHeight() {
+ return height_;
+}
+
+void MediaSourcePlayer::SeekTo(base::TimeDelta timestamp) {
+ last_presentation_timestamp_ = timestamp;
+ start_wallclock_time_ = base::Time();
+ last_seek_time_ = base::Time::Now();
+ if (audio_decoder_job_)
+ audio_decoder_job_->Flush();
+ if (video_decoder_job_)
+ video_decoder_job_->Flush();
+ received_audio_ = MediaPlayerHostMsg_ReadFromDemuxerAck_Params();
+ received_video_ = MediaPlayerHostMsg_ReadFromDemuxerAck_Params();
+ audio_access_unit_index_ = 0;
+ video_access_unit_index_ = 0;
+ OnSeekComplete();
+}
+
+base::TimeDelta MediaSourcePlayer::GetCurrentTime() {
+ return last_presentation_timestamp_;
+}
+
+base::TimeDelta MediaSourcePlayer::GetDuration() {
+ return duration_;
+}
+
+void MediaSourcePlayer::Release() {
+ audio_decoder_job_.reset();
+ video_decoder_job_.reset();
+ ReleaseMediaResourcesFromManager();
+}
+
+void MediaSourcePlayer::SetVolume(float leftVolume, float rightVolume) {
+}
+
+bool MediaSourcePlayer::CanPause() {
+ return seekable_;
+}
+
+bool MediaSourcePlayer::CanSeekForward() {
+ return seekable_;
+}
+
+bool MediaSourcePlayer::CanSeekBackward() {
+ return seekable_;
+}
+
+bool MediaSourcePlayer::IsPlayerReady() {
+ return audio_decoder_job_ || video_decoder_job_;
+}
+
+void MediaSourcePlayer::StartInternal() {
+ if (playing_)
+ return;
+ playing_ = true;
+
+ if (HasAudio()) {
+ audio_finished_ = false;
+ DecodeMoreAudio();
+ }
+ if (HasVideo()) {
+ video_finished_ = false;
+ DecodeMoreVideo();
+ }
+}
+
+void MediaSourcePlayer::DemuxerReady(
+ const MediaPlayerHostMsg_DemuxerReady_Params& params) {
+ if (params.duration_ms == std::numeric_limits<int>::max())
+ seekable_ = false;
+ duration_ = base::TimeDelta::FromMilliseconds(params.duration_ms);
+ width_ = params.video_size.width();
+ height_ = params.video_size.height();
+ num_channels_ = params.audio_channels;
+ sampling_rate_ = params.audio_sampling_rate;
+ audio_codec_ = params.audio_codec;
+ video_codec_ = params.video_codec;
+ audio_extra_data_ = params.audio_extra_data;
+ OnMediaMetadataChanged(duration_, width_, height_, true);
+}
+
+void MediaSourcePlayer::ReadFromDemuxerAck(
+ const MediaPlayerHostMsg_ReadFromDemuxerAck_Params& params) {
+ if (params.type == DemuxerStream::AUDIO) {
+ DCHECK_EQ(0u, audio_access_unit_index_);
+ received_audio_ = params;
+ waiting_for_audio_data_ = false;
+ DecodeMoreAudio();
+ } else {
+ DCHECK_EQ(0u, video_access_unit_index_);
+ received_video_ = params;
+ waiting_for_video_data_ = false;
+ DecodeMoreVideo();
+ }
+}
+
+void MediaSourcePlayer::UpdateTimestamps(
+ const base::Time& kickoff_time,
+ const base::TimeDelta& presentation_timestamp,
+ const base::Time& wallclock_time) {
+ // If the job was posted after last seek, update the presentation time.
+ // Otherwise, ignore it.
+ if (kickoff_time > last_seek_time_) {
+ last_presentation_timestamp_ = presentation_timestamp;
+ OnTimeUpdated();
+ if (start_wallclock_time_.is_null() && playing_) {
+ start_wallclock_time_ = wallclock_time;
+ start_presentation_timestamp_ = last_presentation_timestamp_;
+ }
+ }
+}
+
+void MediaSourcePlayer::MediaDecoderCallback(
+ bool is_audio, const base::Time& kickoff_time,
+ const base::TimeDelta& presentation_timestamp,
+ const base::Time& wallclock_time, bool end_of_stream) {
+ if (is_audio || !HasAudio())
+ UpdateTimestamps(kickoff_time, presentation_timestamp, wallclock_time);
+
+ if (end_of_stream) {
+ PlaybackCompleted(is_audio);
+ return;
+ }
+
+ if (!playing_)
+ return;
+
+ if (is_audio)
+ DecodeMoreAudio();
+ else
+ DecodeMoreVideo();
+}
+
+void MediaSourcePlayer::DecodeMoreAudio() {
+ if (audio_access_unit_index_ >= received_audio_.access_units.size()) {
+ if (!waiting_for_audio_data_) {
+ manager()->OnReadFromDemuxer(player_id(), DemuxerStream::AUDIO, true);
+ received_audio_ = MediaPlayerHostMsg_ReadFromDemuxerAck_Params();
+ audio_access_unit_index_ = 0;
+ waiting_for_audio_data_ = true;
+ }
+ return;
+ }
+
+ audio_decoder_job_->Decode(
+ received_audio_.access_units[audio_access_unit_index_],
+ start_wallclock_time_, start_presentation_timestamp_,
+ base::Bind(&MediaSourcePlayer::MediaDecoderCallback,
+ weak_this_.GetWeakPtr(), true, base::Time::Now()));
+ ++audio_access_unit_index_;
+}
+
+void MediaSourcePlayer::DecodeMoreVideo() {
+ if (video_access_unit_index_ >= received_video_.access_units.size()) {
+ if (!waiting_for_video_data_) {
+ manager()->OnReadFromDemuxer(player_id(), DemuxerStream::VIDEO, true);
+ received_video_ = MediaPlayerHostMsg_ReadFromDemuxerAck_Params();
+ video_access_unit_index_ = 0;
+ waiting_for_video_data_ = true;
+ }
+ return;
+ }
+
+ video_decoder_job_->Decode(
+ received_video_.access_units[video_access_unit_index_],
+ start_wallclock_time_, start_presentation_timestamp_,
+ base::Bind(&MediaSourcePlayer::MediaDecoderCallback,
+ weak_this_.GetWeakPtr(), false, base::Time::Now()));
+ ++video_access_unit_index_;
+}
+
+
+void MediaSourcePlayer::PlaybackCompleted(bool is_audio) {
+ if (is_audio)
+ audio_finished_ = true;
+ else
+ video_finished_ = true;
+
+ if ((!HasAudio() || audio_finished_) && (!HasVideo() || video_finished_)) {
+ playing_ = false;
+ start_wallclock_time_ = base::Time();
+ OnPlaybackComplete();
+ }
+}
+
+bool MediaSourcePlayer::HasVideo() {
+ return kUnknownVideoCodec != video_codec_;
+}
+
+bool MediaSourcePlayer::HasAudio() {
+ return kUnknownAudioCodec != audio_codec_;
+}
+
+} // namespace media
diff --git a/media/base/android/media_source_player.h b/media/base/android/media_source_player.h
new file mode 100644
index 0000000..c671950
--- /dev/null
+++ b/media/base/android/media_source_player.h
@@ -0,0 +1,222 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_ANDROID_MEDIA_SOURCE_PLAYER_H_
+#define MEDIA_BASE_ANDROID_MEDIA_SOURCE_PLAYER_H_
+
+#include <jni.h>
+#include <map>
+#include <string>
+#include <vector>
+
+#include "base/android/scoped_java_ref.h"
+#include "base/callback.h"
+#include "base/memory/scoped_ptr.h"
+#include "base/memory/weak_ptr.h"
+#include "base/threading/thread.h"
+#include "base/time.h"
+#include "media/base/android/demuxer_stream_player_params.h"
+#include "media/base/android/media_codec_bridge.h"
+#include "media/base/android/media_player_android.h"
+#include "media/base/media_export.h"
+
+namespace base {
+class MessageLoopProxy;
+}
+
+namespace media {
+
+class VideoDecoderJob;
+class AudioDecoderJob;
+
+// Class for managing all the decoding tasks. Each decoding task will be posted
+// onto the same thread. The thread will be stopped once Stop() is called.
+class MediaDecoderJob {
+ public:
+ virtual ~MediaDecoderJob();
+
+ // Callback when a decoder job finishes its work. Args: presentation time,
+ // timestamp when the data is rendered, whether decoder is reaching EOS.
+ typedef base::Callback<void(const base::TimeDelta&, const base::Time&, bool)>
+ DecoderCallback;
+
+ // Called by MediaSourcePlayer to decode some data.
+ void Decode(
+ const MediaPlayerHostMsg_ReadFromDemuxerAck_Params::AccessUnit& unit,
+ const base::Time& start_wallclock_time,
+ const base::TimeDelta& start_presentation_timestamp,
+ const MediaDecoderJob::DecoderCallback& callback);
+
+ // Flush the decoder.
+ void Flush();
+
+ struct Deleter {
+ inline void operator()(MediaDecoderJob* ptr) const { ptr->Release(); }
+ };
+
+ // Causes this instance to be deleted on the thread it is bound to.
+ void Release();
+
+ protected:
+ MediaDecoderJob(
+ bool is_audio, const scoped_refptr<base::MessageLoopProxy>& message_loop);
+
+ // Release the output buffer and render it.
+ void ReleaseOutputBuffer(
+ int outputBufferIndex, size_t size,
+ const base::TimeDelta& presentation_timestamp,
+ bool end_of_stream, const MediaDecoderJob::DecoderCallback& callback);
+
+ // Helper function to decoder data on |thread_|. |unit| contains all the data
+ // to be decoded. |start_wallclock_time| and |start_presentation_timestamp|
+ // represent the system time and the presentation timestamp when the first
+ // frame is rendered. We use these information to estimate when the current
+ // frame should be rendered. If |needs_flush| is true, codec needs to be
+ // flushed at the beginning of this call.
+ void DecodeInternal(
+ const MediaPlayerHostMsg_ReadFromDemuxerAck_Params::AccessUnit& unit,
+ const base::Time& start_wallclock_time,
+ const base::TimeDelta& start_presentation_timestamp,
+ bool needs_flush,
+ const MediaDecoderJob::DecoderCallback& callback);
+
+ // The media codec bridge used for decoding.
+ scoped_ptr<MediaCodecBridge> media_codec_bridge_;
+
+ // The message loop where callbacks should be dispatched.
+ scoped_refptr<base::MessageLoopProxy> message_loop_;
+
+ // Thread the decode task runs on.
+ scoped_ptr<base::Thread> thread_;
+
+ // Whether the decoder needs to be flushed.
+ bool needs_flush_;
+
+ // Whether this is an audio decoder.
+ bool is_audio_;
+
+ // Weak pointer passed to media decoder jobs for callbacks.
+ base::WeakPtrFactory<MediaDecoderJob> weak_this_;
+};
+
+typedef scoped_ptr<MediaDecoderJob, MediaDecoderJob::Deleter>
+ ScopedMediaDecoderJob;
+
+// This class handles media source extensions on Android. It uses Android
+// MediaCodec to decode audio and video streams in two separate threads.
+// IPC is being used to send data from the render process to this object.
+// TODO(qinmin): use shared memory to send data between processes.
+class MEDIA_EXPORT MediaSourcePlayer : public MediaPlayerAndroid {
+ public:
+ // Construct a MediaSourcePlayer object with all the needed media player
+ // callbacks.
+ MediaSourcePlayer(int player_id,
+ MediaPlayerManager* manager);
+ virtual ~MediaSourcePlayer();
+
+ // MediaPlayerAndroid implementation.
+ virtual void SetVideoSurface(jobject surface) OVERRIDE;
+ virtual void Start() OVERRIDE;
+ virtual void Pause() OVERRIDE;
+ virtual void SeekTo(base::TimeDelta timestamp) OVERRIDE;
+ virtual void Release() OVERRIDE;
+ virtual void SetVolume(float leftVolume, float rightVolume) OVERRIDE;
+ virtual int GetVideoWidth() OVERRIDE;
+ virtual int GetVideoHeight() OVERRIDE;
+ virtual base::TimeDelta GetCurrentTime() OVERRIDE;
+ virtual base::TimeDelta GetDuration() OVERRIDE;
+ virtual bool IsPlaying() OVERRIDE;
+ virtual bool CanPause() OVERRIDE;
+ virtual bool CanSeekForward() OVERRIDE;
+ virtual bool CanSeekBackward() OVERRIDE;
+ virtual bool IsPlayerReady() OVERRIDE;
+
+ // Called when the demuxer is ready.
+ virtual void DemuxerReady(
+ const MediaPlayerHostMsg_DemuxerReady_Params& params) OVERRIDE;
+
+ // Called when the requested data is received from the demuxer.
+ virtual void ReadFromDemuxerAck(
+ const MediaPlayerHostMsg_ReadFromDemuxerAck_Params& params) OVERRIDE;
+
+ private:
+ // Update the timestamps for A/V sync scheduling. |kickoff_time| keeps
+ // track of when the job is started. We need this to check if a seek is
+ // performed during decoding.
+ void UpdateTimestamps(
+ const base::Time& kickoff_time,
+ const base::TimeDelta& presentation_timestamp,
+ const base::Time& wallclock_time);
+
+ // Helper function for starting media playback.
+ void StartInternal();
+
+ // Playback is completed for one channel.
+ void PlaybackCompleted(bool is_audio);
+
+ // Called when the decoder finishes its task.
+ void MediaDecoderCallback(
+ bool is_audio, const base::Time& kickoff_time,
+ const base::TimeDelta& presentation_timestamp,
+ const base::Time& wallclock_time, bool end_of_stream);
+
+ // Called to decoder more data.
+ void DecodeMoreAudio();
+ void DecodeMoreVideo();
+
+ // Functions check whether audio/video is present.
+ bool HasVideo();
+ bool HasAudio();
+
+ // Pending play event while player is preparing.
+ bool pending_play_;
+
+ // Stats about the media.
+ base::TimeDelta duration_;
+ int width_;
+ int height_;
+ AudioCodec audio_codec_;
+ VideoCodec video_codec_;
+ int num_channels_;
+ int sampling_rate_;
+ bool seekable_;
+ base::TimeDelta last_presentation_timestamp_;
+ std::vector<uint8> audio_extra_data_;
+ bool audio_finished_;
+ bool video_finished_;
+ bool playing_;
+
+ // Timestamps for providing simple A/V sync. When start decoding an audio
+ // chunk, we record its presentation timestamp and the current system time.
+ // Then we use this information to estimate when the next audio/video frame
+ // should be rendered.
+ // TODO(qinmin): Need to fix the problem if audio/video lagged too far behind
+ // due to network or decoding problem.
+ base::Time start_wallclock_time_;
+ base::TimeDelta start_presentation_timestamp_;
+ base::Time last_seek_time_;
+
+ // Decoder jobs
+ ScopedMediaDecoderJob audio_decoder_job_;
+ ScopedMediaDecoderJob video_decoder_job_;
+
+ // These variables keep track of the current decoding data.
+ // TODO(qinmin): remove these variables when we no longer relies on IPC for
+ // data passing.
+ size_t audio_access_unit_index_;
+ size_t video_access_unit_index_;
+ bool waiting_for_audio_data_;
+ bool waiting_for_video_data_;
+ MediaPlayerHostMsg_ReadFromDemuxerAck_Params received_audio_;
+ MediaPlayerHostMsg_ReadFromDemuxerAck_Params received_video_;
+
+ // Weak pointer passed to media decoder jobs for callbacks.
+ base::WeakPtrFactory<MediaSourcePlayer> weak_this_;
+
+ DISALLOW_COPY_AND_ASSIGN(MediaSourcePlayer);
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_ANDROID_MEDIA_SOURCE_PLAYER_H_
diff --git a/media/media.gyp b/media/media.gyp
index 4305097..98fdb81 100644
--- a/media/media.gyp
+++ b/media/media.gyp
@@ -198,6 +198,8 @@
'audio/win/wavein_input_win.h',
'audio/win/waveout_output_win.cc',
'audio/win/waveout_output_win.h',
+ 'base/android/demuxer_stream_player_params.cc',
+ 'base/android/demuxer_stream_player_params.h',
'base/android/media_player_manager.cc',
'base/android/media_player_manager.h',
'base/android/media_resource_getter.cc',
@@ -1498,6 +1500,7 @@
],
'sources': [
'base/android/java/src/org/chromium/media/AudioManagerAndroid.java',
+ 'base/android/java/src/org/chromium/media/MediaCodecBridge.java',
'base/android/java/src/org/chromium/media/MediaPlayerBridge.java',
'base/android/java/src/org/chromium/media/MediaPlayerListener.java',
'base/android/java/src/org/chromium/media/WebAudioMediaCodecBridge.java',
@@ -1519,15 +1522,6 @@
'includes': [ '../build/jni_generator.gypi' ],
},
{
- 'target_name': 'media_codec_jni_headers',
- 'type': 'none',
- 'variables': {
- 'jni_gen_package': 'media',
- 'input_java_class': 'android/media/MediaCodec.class',
- },
- 'includes': [ '../build/jar_file_jni_generator.gypi' ],
- },
- {
'target_name': 'media_format_jni_headers',
'type': 'none',
'variables': {
@@ -1550,22 +1544,15 @@
'base/android/media_player_bridge.h',
'base/android/media_player_listener.cc',
'base/android/media_player_listener.h',
+ 'base/android/media_source_player.cc',
+ 'base/android/media_source_player.h',
'base/android/webaudio_media_codec_bridge.cc',
'base/android/webaudio_media_codec_bridge.h',
'base/android/webaudio_media_codec_info.h',
],
- 'conditions': [
- ['google_tv == 1', {
- 'sources': [
- 'base/android/demuxer_stream_player_params.cc',
- 'base/android/demuxer_stream_player_params.h',
- ],
- }],
- ],
'dependencies': [
'../base/base.gyp:base',
'media_android_jni_headers',
- 'media_codec_jni_headers',
'media_format_jni_headers',
],
'include_dirs': [