summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorqinmin@chromium.org <qinmin@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-04-15 00:41:32 +0000
committerqinmin@chromium.org <qinmin@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-04-15 00:41:32 +0000
commit60bbbbde53dace07e8dbb383aace4a46c3b8276b (patch)
treebfe756e577ba2f059587e3816515301ec6b95abc
parent174963675bf92d06bf24b251efa46ddee3475b8e (diff)
downloadchromium_src-60bbbbde53dace07e8dbb383aace4a46c3b8276b.zip
chromium_src-60bbbbde53dace07e8dbb383aace4a46c3b8276b.tar.gz
chromium_src-60bbbbde53dace07e8dbb383aace4a46c3b8276b.tar.bz2
Fix an issue that audio and video may run out of sync
This CL fixes 3 issues: 1. In ProcessPendingEvents(), if there is a non-seek pending event, UpdateTimestamps() will not get called. This is probably the number 1 reason that a/v lose sync. 2. MSP use its internal clock to estimate the current presentation time. However, hardware may consume data at a different rate. Passing the current frame position from AudioTrack to MSP so we get a better estimation of the current time. 3. When config change comes, We don't need to reset the clock if audio decoder is not recreated. BUG=351722 Review URL: https://codereview.chromium.org/215783002 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@263754 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--media/base/android/audio_decoder_job.cc47
-rw-r--r--media/base/android/audio_decoder_job.h15
-rw-r--r--media/base/android/java/src/org/chromium/media/MediaCodecBridge.java40
-rw-r--r--media/base/android/media_codec_bridge.cc5
-rw-r--r--media/base/android/media_codec_bridge.h5
-rw-r--r--media/base/android/media_decoder_job.cc34
-rw-r--r--media/base/android/media_decoder_job.h18
-rw-r--r--media/base/android/media_source_player.cc71
-rw-r--r--media/base/android/media_source_player.h24
-rw-r--r--media/base/android/media_source_player_unittest.cc51
-rw-r--r--media/base/android/video_decoder_job.cc3
-rw-r--r--media/base/android/video_decoder_job.h1
12 files changed, 213 insertions, 101 deletions
diff --git a/media/base/android/audio_decoder_job.cc b/media/base/android/audio_decoder_job.cc
index d089796..580c63f 100644
--- a/media/base/android/audio_decoder_job.cc
+++ b/media/base/android/audio_decoder_job.cc
@@ -8,6 +8,14 @@
#include "base/lazy_instance.h"
#include "base/threading/thread.h"
#include "media/base/android/media_codec_bridge.h"
+#include "media/base/audio_timestamp_helper.h"
+
+namespace {
+
+// Use 16bit PCM for audio output. Keep this value in sync with the output
+// format we passed to AudioTrack in MediaCodecBridge.
+const int kBytesPerAudioOutputSample = 2;
+}
namespace media {
@@ -35,19 +43,26 @@ AudioDecoderJob* AudioDecoderJob::Create(
scoped_ptr<AudioCodecBridge> codec(AudioCodecBridge::Create(audio_codec));
if (codec && codec->Start(audio_codec, sample_rate, channel_count, extra_data,
extra_data_size, true, media_crypto)) {
- return new AudioDecoderJob(codec.Pass(), request_data_cb);
+ scoped_ptr<AudioTimestampHelper> audio_timestamp_helper(
+ new AudioTimestampHelper(sample_rate));
+ return new AudioDecoderJob(
+ audio_timestamp_helper.Pass(), codec.Pass(),
+ kBytesPerAudioOutputSample * channel_count, request_data_cb);
}
-
LOG(ERROR) << "Failed to create AudioDecoderJob.";
return NULL;
}
AudioDecoderJob::AudioDecoderJob(
+ scoped_ptr<AudioTimestampHelper> audio_timestamp_helper,
scoped_ptr<AudioCodecBridge> audio_codec_bridge,
+ int bytes_per_frame,
const base::Closure& request_data_cb)
: MediaDecoderJob(g_audio_decoder_thread.Pointer()->message_loop_proxy(),
audio_codec_bridge.get(), request_data_cb),
- audio_codec_bridge_(audio_codec_bridge.Pass()) {
+ bytes_per_frame_(bytes_per_frame),
+ audio_codec_bridge_(audio_codec_bridge.Pass()),
+ audio_timestamp_helper_(audio_timestamp_helper.Pass()) {
}
AudioDecoderJob::~AudioDecoderJob() {
@@ -57,17 +72,33 @@ void AudioDecoderJob::SetVolume(double volume) {
audio_codec_bridge_->SetVolume(volume);
}
+void AudioDecoderJob::SetBaseTimestamp(base::TimeDelta base_timestamp) {
+ audio_timestamp_helper_->SetBaseTimestamp(base_timestamp);
+}
+
void AudioDecoderJob::ReleaseOutputBuffer(
int output_buffer_index,
size_t size,
bool render_output,
+ base::TimeDelta current_presentation_timestamp,
const ReleaseOutputCompletionCallback& callback) {
- size_t size_to_render = render_output ? size : 0u;
- if (size_to_render)
- audio_codec_bridge_->PlayOutputBuffer(output_buffer_index, size_to_render);
+ render_output = render_output && (size != 0u);
+ if (render_output) {
+ int64 head_position = audio_codec_bridge_->PlayOutputBuffer(
+ output_buffer_index, size);
+ audio_timestamp_helper_->AddFrames(size / (bytes_per_frame_));
+ int64 frames_to_play =
+ audio_timestamp_helper_->frame_count() - head_position;
+ DCHECK_GE(frames_to_play, 0);
+ current_presentation_timestamp =
+ audio_timestamp_helper_->GetTimestamp() -
+ audio_timestamp_helper_->GetFrameDuration(frames_to_play);
+ } else {
+ current_presentation_timestamp = kNoTimestamp();
+ }
audio_codec_bridge_->ReleaseOutputBuffer(output_buffer_index, false);
-
- callback.Run(size_to_render);
+ callback.Run(current_presentation_timestamp,
+ audio_timestamp_helper_->GetTimestamp());
}
bool AudioDecoderJob::ComputeTimeToRender() const {
diff --git a/media/base/android/audio_decoder_job.h b/media/base/android/audio_decoder_job.h
index 3d1b21f..4a37038 100644
--- a/media/base/android/audio_decoder_job.h
+++ b/media/base/android/audio_decoder_job.h
@@ -12,6 +12,7 @@
namespace media {
class AudioCodecBridge;
+class AudioTimestampHelper;
// Class for managing audio decoding jobs.
class AudioDecoderJob : public MediaDecoderJob {
@@ -34,8 +35,13 @@ class AudioDecoderJob : public MediaDecoderJob {
void SetVolume(double volume);
+ // Sets the base timestamp for |audio_timestamp_helper_|.
+ void SetBaseTimestamp(base::TimeDelta base_timestamp);
+
private:
- AudioDecoderJob(scoped_ptr<AudioCodecBridge> audio_decoder_bridge,
+ AudioDecoderJob(scoped_ptr<AudioTimestampHelper> audio_timestamp_helper,
+ scoped_ptr<AudioCodecBridge> audio_decoder_bridge,
+ int bytes_per_frame,
const base::Closure& request_data_cb);
// MediaDecoderJob implementation.
@@ -43,11 +49,18 @@ class AudioDecoderJob : public MediaDecoderJob {
int output_buffer_index,
size_t size,
bool render_output,
+ base::TimeDelta current_presentation_timestamp,
const ReleaseOutputCompletionCallback& callback) OVERRIDE;
virtual bool ComputeTimeToRender() const OVERRIDE;
+ // number of bytes per audio frame;
+ int bytes_per_frame_;
+
scoped_ptr<AudioCodecBridge> audio_codec_bridge_;
+
+ // Object to calculate the current audio timestamp for A/V sync.
+ scoped_ptr<AudioTimestampHelper> audio_timestamp_helper_;
};
} // namespace media
diff --git a/media/base/android/java/src/org/chromium/media/MediaCodecBridge.java b/media/base/android/java/src/org/chromium/media/MediaCodecBridge.java
index cfcebb0..5eb7b5b 100644
--- a/media/base/android/java/src/org/chromium/media/MediaCodecBridge.java
+++ b/media/base/android/java/src/org/chromium/media/MediaCodecBridge.java
@@ -285,6 +285,8 @@ class MediaCodecBridge {
try {
mFlushed = true;
if (mAudioTrack != null) {
+ // Need to call pause() here, or otherwise flush() is a no-op.
+ mAudioTrack.pause();
mAudioTrack.flush();
}
mMediaCodec.flush();
@@ -525,18 +527,36 @@ class MediaCodecBridge {
return false;
}
+ /**
+ * Play the audio buffer that is passed in.
+ *
+ * @param buf Audio buffer to be rendered.
+ * @return The number of frames that have already been consumed by the
+ * hardware. This number resets to 0 after each flush call.
+ */
@CalledByNative
- private void playOutputBuffer(byte[] buf) {
- if (mAudioTrack != null) {
- if (AudioTrack.PLAYSTATE_PLAYING != mAudioTrack.getPlayState()) {
- mAudioTrack.play();
- }
- int size = mAudioTrack.write(buf, 0, buf.length);
- if (buf.length != size) {
- Log.i(TAG, "Failed to send all data to audio output, expected size: " +
- buf.length + ", actual size: " + size);
- }
+ private long playOutputBuffer(byte[] buf) {
+ if (mAudioTrack == null) {
+ return 0;
+ }
+
+ if (AudioTrack.PLAYSTATE_PLAYING != mAudioTrack.getPlayState()) {
+ mAudioTrack.play();
+ }
+ int size = mAudioTrack.write(buf, 0, buf.length);
+ if (buf.length != size) {
+ Log.i(TAG, "Failed to send all data to audio output, expected size: " +
+ buf.length + ", actual size: " + size);
}
+ // TODO(qinmin): Returning the head position allows us to estimate
+ // the current presentation time in native code. However, it is
+ // better to use AudioTrack.getCurrentTimestamp() to get the last
+ // known time when a frame is played. However, we will need to
+ // convert the java nano time to C++ timestamp.
+ // If the stream runs too long, getPlaybackHeadPosition() could
+ // overflow. AudioTimestampHelper in MediaSourcePlayer has the same
+ // issue. See http://crbug.com/358801.
+ return mAudioTrack.getPlaybackHeadPosition();
}
@CalledByNative
diff --git a/media/base/android/media_codec_bridge.cc b/media/base/android/media_codec_bridge.cc
index 0178844..3b50949 100644
--- a/media/base/android/media_codec_bridge.cc
+++ b/media/base/android/media_codec_bridge.cc
@@ -593,7 +593,7 @@ bool AudioCodecBridge::ConfigureMediaFormat(jobject j_format,
return true;
}
-void AudioCodecBridge::PlayOutputBuffer(int index, size_t size) {
+int64 AudioCodecBridge::PlayOutputBuffer(int index, size_t size) {
DCHECK_LE(0, index);
int numBytes = base::checked_cast<int>(size);
JNIEnv* env = AttachCurrentThread();
@@ -603,7 +603,8 @@ void AudioCodecBridge::PlayOutputBuffer(int index, size_t size) {
ScopedJavaLocalRef<jbyteArray> byte_array =
base::android::ToJavaByteArray(env, buffer, numBytes);
- Java_MediaCodecBridge_playOutputBuffer(env, media_codec(), byte_array.obj());
+ return Java_MediaCodecBridge_playOutputBuffer(
+ env, media_codec(), byte_array.obj());
}
void AudioCodecBridge::SetVolume(double volume) {
diff --git a/media/base/android/media_codec_bridge.h b/media/base/android/media_codec_bridge.h
index f90edcf..a1a493f 100644
--- a/media/base/android/media_codec_bridge.h
+++ b/media/base/android/media_codec_bridge.h
@@ -222,8 +222,9 @@ class AudioCodecBridge : public MediaCodecBridge {
bool play_audio, jobject media_crypto) WARN_UNUSED_RESULT;
// Play the output buffer. This call must be called after
- // DequeueOutputBuffer() and before ReleaseOutputBuffer.
- void PlayOutputBuffer(int index, size_t size);
+ // DequeueOutputBuffer() and before ReleaseOutputBuffer. Returns the playback
+ // head position expressed in frames.
+ int64 PlayOutputBuffer(int index, size_t size);
// Set the volume of the audio output.
void SetVolume(double volume);
diff --git a/media/base/android/media_decoder_job.cc b/media/base/android/media_decoder_job.cc
index 8b8c9b2..c57da3a 100644
--- a/media/base/android/media_decoder_job.cc
+++ b/media/base/android/media_decoder_job.cc
@@ -80,7 +80,7 @@ void MediaDecoderJob::OnDataReceived(const DemuxerData& data) {
if (stop_decode_pending_) {
DCHECK(is_decoding());
- OnDecodeCompleted(MEDIA_CODEC_STOPPED, kNoTimestamp(), 0);
+ OnDecodeCompleted(MEDIA_CODEC_STOPPED, kNoTimestamp(), kNoTimestamp());
return;
}
@@ -275,8 +275,7 @@ void MediaDecoderJob::DecodeCurrentAccessUnit(
base::Bind(&MediaDecoderJob::OnDecodeCompleted,
base::Unretained(this),
MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER,
- kNoTimestamp(),
- 0));
+ kNoTimestamp(), kNoTimestamp()));
return;
}
@@ -305,7 +304,7 @@ void MediaDecoderJob::DecodeInternal(
output_eos_encountered_ = false;
MediaCodecStatus reset_status = media_codec_bridge_->Reset();
if (MEDIA_CODEC_OK != reset_status) {
- callback.Run(reset_status, kNoTimestamp(), 0);
+ callback.Run(reset_status, kNoTimestamp(), kNoTimestamp());
return;
}
}
@@ -318,7 +317,7 @@ void MediaDecoderJob::DecodeInternal(
// For aborted access unit, just skip it and inform the player.
if (unit.status == DemuxerStream::kAborted) {
// TODO(qinmin): use a new enum instead of MEDIA_CODEC_STOPPED.
- callback.Run(MEDIA_CODEC_STOPPED, kNoTimestamp(), 0);
+ callback.Run(MEDIA_CODEC_STOPPED, kNoTimestamp(), kNoTimestamp());
return;
}
@@ -326,7 +325,8 @@ void MediaDecoderJob::DecodeInternal(
if (unit.end_of_stream || unit.data.empty()) {
input_eos_encountered_ = true;
output_eos_encountered_ = true;
- callback.Run(MEDIA_CODEC_OUTPUT_END_OF_STREAM, kNoTimestamp(), 0);
+ callback.Run(MEDIA_CODEC_OUTPUT_END_OF_STREAM, kNoTimestamp(),
+ kNoTimestamp());
return;
}
@@ -339,7 +339,7 @@ void MediaDecoderJob::DecodeInternal(
if (input_status == MEDIA_CODEC_INPUT_END_OF_STREAM) {
input_eos_encountered_ = true;
} else if (input_status != MEDIA_CODEC_OK) {
- callback.Run(input_status, kNoTimestamp(), 0);
+ callback.Run(input_status, kNoTimestamp(), kNoTimestamp());
return;
}
}
@@ -366,7 +366,7 @@ void MediaDecoderJob::DecodeInternal(
!media_codec_bridge_->GetOutputBuffers()) {
status = MEDIA_CODEC_ERROR;
}
- callback.Run(status, kNoTimestamp(), 0);
+ callback.Run(status, kNoTimestamp(), kNoTimestamp());
return;
}
@@ -393,7 +393,8 @@ void MediaDecoderJob::DecodeInternal(
buffer_index,
size,
render_output,
- base::Bind(callback, status, presentation_timestamp)),
+ presentation_timestamp,
+ base::Bind(callback, status)),
time_to_render);
return;
}
@@ -412,13 +413,14 @@ void MediaDecoderJob::DecodeInternal(
presentation_timestamp = kNoTimestamp();
}
ReleaseOutputCompletionCallback completion_callback = base::Bind(
- callback, status, presentation_timestamp);
- ReleaseOutputBuffer(buffer_index, size, render_output, completion_callback);
+ callback, status);
+ ReleaseOutputBuffer(buffer_index, size, render_output, presentation_timestamp,
+ completion_callback);
}
void MediaDecoderJob::OnDecodeCompleted(
- MediaCodecStatus status, base::TimeDelta presentation_timestamp,
- size_t audio_output_bytes) {
+ MediaCodecStatus status, base::TimeDelta current_presentation_timestamp,
+ base::TimeDelta max_presentation_timestamp) {
DCHECK(ui_task_runner_->BelongsToCurrentThread());
if (destroy_pending_) {
@@ -430,7 +432,7 @@ void MediaDecoderJob::OnDecodeCompleted(
DCHECK(!decode_cb_.is_null());
// If output was queued for rendering, then we have completed prerolling.
- if (presentation_timestamp != kNoTimestamp())
+ if (current_presentation_timestamp != kNoTimestamp())
prerolling_ = false;
switch (status) {
@@ -453,8 +455,8 @@ void MediaDecoderJob::OnDecodeCompleted(
};
stop_decode_pending_ = false;
- base::ResetAndReturn(&decode_cb_).Run(status, presentation_timestamp,
- audio_output_bytes);
+ base::ResetAndReturn(&decode_cb_).Run(
+ status, current_presentation_timestamp, max_presentation_timestamp);
}
const AccessUnit& MediaDecoderJob::CurrentAccessUnit() const {
diff --git a/media/base/android/media_decoder_job.h b/media/base/android/media_decoder_job.h
index 77caa9f..77fe1af 100644
--- a/media/base/android/media_decoder_job.h
+++ b/media/base/android/media_decoder_job.h
@@ -29,15 +29,16 @@ class MediaDecoderJob {
};
// Callback when a decoder job finishes its work. Args: whether decode
- // finished successfully, presentation time, audio output bytes.
- // If the presentation time is equal to kNoTimestamp(), the decoder job
- // skipped rendering of the decoded output and the callback target should
+ // finished successfully, current presentation time, max presentation time.
+ // If the current presentation time is equal to kNoTimestamp(), the decoder
+ // job skipped rendering of the decoded output and the callback target should
// update its clock to avoid introducing extra delays to the next frame.
typedef base::Callback<void(MediaCodecStatus, base::TimeDelta,
- size_t)> DecoderCallback;
+ base::TimeDelta)> DecoderCallback;
// Callback when a decoder job finishes releasing the output buffer.
- // Args: audio output bytes, must be 0 for video.
- typedef base::Callback<void(size_t)> ReleaseOutputCompletionCallback;
+ // Args: current presentation time, max presentation time.
+ typedef base::Callback<void(base::TimeDelta, base::TimeDelta)>
+ ReleaseOutputCompletionCallback;
virtual ~MediaDecoderJob();
@@ -95,6 +96,7 @@ class MediaDecoderJob {
int output_buffer_index,
size_t size,
bool render_output,
+ base::TimeDelta current_presentation_timestamp,
const ReleaseOutputCompletionCallback& callback) = 0;
// Returns true if the "time to render" needs to be computed for frames in
@@ -137,8 +139,8 @@ class MediaDecoderJob {
// Completes any pending job destruction or any pending decode stop. If
// destruction was not pending, passes its arguments to |decode_cb_|.
void OnDecodeCompleted(MediaCodecStatus status,
- base::TimeDelta presentation_timestamp,
- size_t audio_output_bytes);
+ base::TimeDelta current_presentation_timestamp,
+ base::TimeDelta max_presentation_timestamp);
// Helper function to get the current access unit that is being decoded.
const AccessUnit& CurrentAccessUnit() const;
diff --git a/media/base/android/media_source_player.cc b/media/base/android/media_source_player.cc
index e52d0e4..8b19de7 100644
--- a/media/base/android/media_source_player.cc
+++ b/media/base/android/media_source_player.cc
@@ -19,16 +19,8 @@
#include "media/base/android/media_drm_bridge.h"
#include "media/base/android/media_player_manager.h"
#include "media/base/android/video_decoder_job.h"
-#include "media/base/audio_timestamp_helper.h"
#include "media/base/buffers.h"
-namespace {
-
-// Use 16bit PCM for audio output. Keep this value in sync with the output
-// format we passed to AudioTrack in MediaCodecBridge.
-const int kBytesPerAudioOutputSample = 2;
-}
-
namespace media {
MediaSourcePlayer::MediaSourcePlayer(
@@ -117,8 +109,6 @@ void MediaSourcePlayer::ScheduleSeekEventAndStopDecoding(
pending_seek_ = false;
clock_.SetTime(seek_time, seek_time);
- if (audio_timestamp_helper_)
- audio_timestamp_helper_->SetBaseTimestamp(seek_time);
if (audio_decoder_job_ && audio_decoder_job_->is_decoding())
audio_decoder_job_->StopDecode();
@@ -315,14 +305,6 @@ void MediaSourcePlayer::OnDemuxerConfigsAvailable(
sampling_rate_ = configs.audio_sampling_rate;
is_audio_encrypted_ = configs.is_audio_encrypted;
audio_extra_data_ = configs.audio_extra_data;
- if (HasAudio()) {
- DCHECK_GT(num_channels_, 0);
- audio_timestamp_helper_.reset(new AudioTimestampHelper(sampling_rate_));
- audio_timestamp_helper_->SetBaseTimestamp(GetCurrentTime());
- } else {
- audio_timestamp_helper_.reset();
- }
-
video_codec_ = configs.video_codec;
width_ = configs.video_size.width();
height_ = configs.video_size.height();
@@ -444,8 +426,8 @@ void MediaSourcePlayer::OnDemuxerSeekDone(
DVLOG(1) << __FUNCTION__ << " : setting clock to actual browser seek time: "
<< seek_time.InSecondsF();
clock_.SetTime(seek_time, seek_time);
- if (audio_timestamp_helper_)
- audio_timestamp_helper_->SetBaseTimestamp(seek_time);
+ if (audio_decoder_job_)
+ audio_decoder_job_->SetBaseTimestamp(seek_time);
} else {
DCHECK(actual_browser_seek_time == kNoTimestamp());
}
@@ -471,16 +453,10 @@ void MediaSourcePlayer::OnDemuxerSeekDone(
}
void MediaSourcePlayer::UpdateTimestamps(
- base::TimeDelta presentation_timestamp, size_t audio_output_bytes) {
- base::TimeDelta new_max_time = presentation_timestamp;
+ base::TimeDelta current_presentation_timestamp,
+ base::TimeDelta max_presentation_timestamp) {
+ clock_.SetTime(current_presentation_timestamp, max_presentation_timestamp);
- if (audio_output_bytes > 0) {
- audio_timestamp_helper_->AddFrames(
- audio_output_bytes / (kBytesPerAudioOutputSample * num_channels_));
- new_max_time = audio_timestamp_helper_->GetTimestamp();
- }
-
- clock_.SetMaxTime(new_max_time);
manager()->OnTimeUpdate(player_id(), GetCurrentTime());
}
@@ -510,6 +486,8 @@ void MediaSourcePlayer::ProcessPendingEvents() {
if (IsEventPending(SEEK_EVENT_PENDING)) {
DVLOG(1) << __FUNCTION__ << " : Handling SEEK_EVENT";
ClearDecodingData();
+ if (audio_decoder_job_)
+ audio_decoder_job_->SetBaseTimestamp(GetCurrentTime());
demuxer_->RequestDemuxerSeek(GetCurrentTime(), doing_browser_seek_);
return;
}
@@ -581,7 +559,8 @@ void MediaSourcePlayer::ProcessPendingEvents() {
void MediaSourcePlayer::MediaDecoderCallback(
bool is_audio, MediaCodecStatus status,
- base::TimeDelta presentation_timestamp, size_t audio_output_bytes) {
+ base::TimeDelta current_presentation_timestamp,
+ base::TimeDelta max_presentation_timestamp) {
DVLOG(1) << __FUNCTION__ << ": " << is_audio << ", " << status;
// TODO(xhwang): Drop IntToString() when http://crbug.com/303899 is fixed.
@@ -625,6 +604,12 @@ void MediaSourcePlayer::MediaDecoderCallback(
return;
}
+ if (status == MEDIA_CODEC_OK && is_clock_manager &&
+ current_presentation_timestamp != kNoTimestamp()) {
+ UpdateTimestamps(
+ current_presentation_timestamp, max_presentation_timestamp);
+ }
+
if (status == MEDIA_CODEC_OUTPUT_END_OF_STREAM)
PlaybackCompleted(is_audio);
@@ -636,11 +621,6 @@ void MediaSourcePlayer::MediaDecoderCallback(
if (status == MEDIA_CODEC_OUTPUT_END_OF_STREAM)
return;
- if (status == MEDIA_CODEC_OK && is_clock_manager &&
- presentation_timestamp != kNoTimestamp()) {
- UpdateTimestamps(presentation_timestamp, audio_output_bytes);
- }
-
if (!playing_) {
if (is_clock_manager)
clock_.Pause();
@@ -662,8 +642,9 @@ void MediaSourcePlayer::MediaDecoderCallback(
// If we have a valid timestamp, start the starvation callback. Otherwise,
// reset the |start_time_ticks_| so that the next frame will not suffer
// from the decoding delay caused by the current frame.
- if (presentation_timestamp != kNoTimestamp())
- StartStarvationCallback(presentation_timestamp);
+ if (current_presentation_timestamp != kNoTimestamp())
+ StartStarvationCallback(current_presentation_timestamp,
+ max_presentation_timestamp);
else
start_time_ticks_ = base::TimeTicks::Now();
}
@@ -811,6 +792,13 @@ void MediaSourcePlayer::ConfigureAudioDecoderJob() {
if (audio_decoder_job_) {
SetVolumeInternal();
+ // Need to reset the base timestamp in |audio_decoder_job_|.
+ // TODO(qinmin): When reconfiguring the |audio_decoder_job_|, there might
+ // still be some audio frames in the decoder or in AudioTrack. Therefore,
+ // we are losing some time here. http://crbug.com/357726.
+ base::TimeDelta current_time = GetCurrentTime();
+ audio_decoder_job_->SetBaseTimestamp(current_time);
+ clock_.SetTime(current_time, current_time);
audio_decoder_job_->BeginPrerolling(preroll_timestamp_);
reconfig_audio_decoder_ = false;
}
@@ -912,7 +900,8 @@ void MediaSourcePlayer::OnDecoderStarved() {
}
void MediaSourcePlayer::StartStarvationCallback(
- base::TimeDelta presentation_timestamp) {
+ base::TimeDelta current_presentation_timestamp,
+ base::TimeDelta max_presentation_timestamp) {
// 20ms was chosen because it is the typical size of a compressed audio frame.
// Anything smaller than this would likely cause unnecessary cycling in and
// out of the prefetch state.
@@ -922,16 +911,16 @@ void MediaSourcePlayer::StartStarvationCallback(
base::TimeDelta current_timestamp = GetCurrentTime();
base::TimeDelta timeout;
if (HasAudio()) {
- timeout = audio_timestamp_helper_->GetTimestamp() - current_timestamp;
+ timeout = max_presentation_timestamp - current_timestamp;
} else {
- DCHECK(current_timestamp <= presentation_timestamp);
+ DCHECK(current_timestamp <= current_presentation_timestamp);
// For video only streams, fps can be estimated from the difference
// between the previous and current presentation timestamps. The
// previous presentation timestamp is equal to current_timestamp.
// TODO(qinmin): determine whether 2 is a good coefficient for estimating
// video frame timeout.
- timeout = 2 * (presentation_timestamp - current_timestamp);
+ timeout = 2 * (current_presentation_timestamp - current_timestamp);
}
timeout = std::max(timeout, kMinStarvationTimeout);
diff --git a/media/base/android/media_source_player.h b/media/base/android/media_source_player.h
index ed0483c..bd8457e 100644
--- a/media/base/android/media_source_player.h
+++ b/media/base/android/media_source_player.h
@@ -29,7 +29,6 @@
namespace media {
class AudioDecoderJob;
-class AudioTimestampHelper;
class VideoDecoderJob;
// This class handles media source extensions on Android. It uses Android
@@ -77,8 +76,8 @@ class MEDIA_EXPORT MediaSourcePlayer : public MediaPlayerAndroid,
friend class MediaSourcePlayerTest;
// Update the current timestamp.
- void UpdateTimestamps(base::TimeDelta presentation_timestamp,
- size_t audio_output_bytes);
+ void UpdateTimestamps(base::TimeDelta current_presentation_timestamp,
+ base::TimeDelta max_presentation_timestamp);
// Helper function for starting media playback.
void StartInternal();
@@ -89,8 +88,8 @@ class MEDIA_EXPORT MediaSourcePlayer : public MediaPlayerAndroid,
// Called when the decoder finishes its task.
void MediaDecoderCallback(
bool is_audio, MediaCodecStatus status,
- base::TimeDelta presentation_timestamp,
- size_t audio_output_bytes);
+ base::TimeDelta current_presentation_timestamp,
+ base::TimeDelta max_presentation_timestamp);
// Gets MediaCrypto object from |drm_bridge_|.
base::android::ScopedJavaLocalRef<jobject> GetMediaCrypto();
@@ -133,10 +132,14 @@ class MEDIA_EXPORT MediaSourcePlayer : public MediaPlayerAndroid,
void OnDecoderStarved();
// Starts the |decoder_starvation_callback_| task with the timeout value.
- // |presentation_timestamp| - The presentation timestamp used for starvation
- // timeout computations. It represents the timestamp of the last piece of
- // decoded data.
- void StartStarvationCallback(base::TimeDelta presentation_timestamp);
+ // |current_presentation_timestamp| - The presentation timestamp used for
+ // starvation timeout computations. It represents the current timestamp of
+ // rendered data.
+ // |max_presentation_timestamp| - The presentation timestamp if all the
+ // decoded data are rendered.
+ void StartStarvationCallback(
+ base::TimeDelta current_presentation_timestamp,
+ base::TimeDelta max_presentation_timestamp);
// Schedules a seek event in |pending_events_| and calls StopDecode() on all
// the MediaDecoderJobs. Sets clock to |seek_time|, and resets
@@ -266,9 +269,6 @@ class MEDIA_EXPORT MediaSourcePlayer : public MediaPlayerAndroid,
// elapses.
base::CancelableClosure decoder_starvation_callback_;
- // Object to calculate the current audio timestamp for A/V sync.
- scoped_ptr<AudioTimestampHelper> audio_timestamp_helper_;
-
MediaDrmBridge* drm_bridge_;
// No decryption key available to decrypt the encrypted buffer. In this case,
diff --git a/media/base/android/media_source_player_unittest.cc b/media/base/android/media_source_player_unittest.cc
index 9c6a062..7324336 100644
--- a/media/base/android/media_source_player_unittest.cc
+++ b/media/base/android/media_source_player_unittest.cc
@@ -2067,4 +2067,55 @@ TEST_F(MediaSourcePlayerTest, SurfaceChangeClearedEvenIfMediaCryptoAbsent) {
EXPECT_FALSE(GetMediaDecoderJob(false));
}
+TEST_F(MediaSourcePlayerTest, CurrentTimeUpdatedWhileDecoderStarved) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ // Test that current time is updated while decoder is starved.
+ StartAudioDecoderJob(true);
+ for (int i = 0; i < 3; ++i) {
+ player_.OnDemuxerDataAvailable(CreateReadFromDemuxerAckForAudio(i));
+ WaitForAudioDecodeDone();
+ }
+ base::TimeDelta current_time = player_.GetCurrentTime();
+
+ // Trigger starvation while the decoder is decoding.
+ player_.OnDemuxerDataAvailable(CreateReadFromDemuxerAckForAudio(3));
+ TriggerPlayerStarvation();
+ WaitForAudioDecodeDone();
+
+ // Current time should be updated.
+ EXPECT_LT(current_time.InMillisecondsF(),
+ player_.GetCurrentTime().InMillisecondsF());
+}
+
+TEST_F(MediaSourcePlayerTest, CurrentTimeKeepsIncreasingAfterConfigChange) {
+ SKIP_TEST_IF_MEDIA_CODEC_BRIDGE_IS_NOT_AVAILABLE();
+
+ // Test current time keep on increasing after audio config change.
+ // Test that current time is updated while decoder is starved.
+ StartAudioDecoderJob(true);
+
+ for (int i = 0; i < 4; ++i) {
+ player_.OnDemuxerDataAvailable(CreateReadFromDemuxerAckForAudio(i));
+ WaitForAudioDecodeDone();
+ }
+ base::TimeDelta current_time = player_.GetCurrentTime();
+ EXPECT_LT(0.0, current_time.InMillisecondsF());
+
+ DemuxerData data = CreateReadFromDemuxerAckWithConfigChanged(true, 0);
+ player_.OnDemuxerDataAvailable(data);
+ WaitForAudioDecodeDone();
+
+ // Simulate arrival of new configs.
+ player_.OnDemuxerConfigsAvailable(CreateAudioDemuxerConfigs(kCodecVorbis));
+ for (int i = 0; i < 4; ++i) {
+ player_.OnDemuxerDataAvailable(CreateReadFromDemuxerAckForAudio(i));
+ WaitForAudioDecodeDone();
+ base::TimeDelta new_current_time = player_.GetCurrentTime();
+ EXPECT_LE(current_time.InMillisecondsF(),
+ new_current_time.InMillisecondsF());
+ current_time = new_current_time;
+ }
+}
+
} // namespace media
diff --git a/media/base/android/video_decoder_job.cc b/media/base/android/video_decoder_job.cc
index 12ab441..884bc6d 100644
--- a/media/base/android/video_decoder_job.cc
+++ b/media/base/android/video_decoder_job.cc
@@ -63,9 +63,10 @@ void VideoDecoderJob::ReleaseOutputBuffer(
int output_buffer_index,
size_t size,
bool render_output,
+ base::TimeDelta current_presentation_timestamp,
const ReleaseOutputCompletionCallback& callback) {
video_codec_bridge_->ReleaseOutputBuffer(output_buffer_index, render_output);
- callback.Run(0u);
+ callback.Run(current_presentation_timestamp, current_presentation_timestamp);
}
bool VideoDecoderJob::ComputeTimeToRender() const {
diff --git a/media/base/android/video_decoder_job.h b/media/base/android/video_decoder_job.h
index 5c98850..e981ab8 100644
--- a/media/base/android/video_decoder_job.h
+++ b/media/base/android/video_decoder_job.h
@@ -48,6 +48,7 @@ class VideoDecoderJob : public MediaDecoderJob {
int output_buffer_index,
size_t size,
bool render_output,
+ base::TimeDelta current_presentation_timestamp,
const ReleaseOutputCompletionCallback& callback) OVERRIDE;
virtual bool ComputeTimeToRender() const OVERRIDE;