summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
authorqinmin@chromium.org <qinmin@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-07-02 19:55:24 +0000
committerqinmin@chromium.org <qinmin@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-07-02 19:55:24 +0000
commitd19c472f0586c0619e5b3240e8b27a25e630fba6 (patch)
treee6cce568beef22f0bf6f3c324d5f250edafd8f3a /media
parent62269731005768be1dedf14968d69fb74faa524d (diff)
downloadchromium_src-d19c472f0586c0619e5b3240e8b27a25e630fba6.zip
chromium_src-d19c472f0586c0619e5b3240e8b27a25e630fba6.tar.gz
chromium_src-d19c472f0586c0619e5b3240e8b27a25e630fba6.tar.bz2
Set start timestamps when StartIntnernal() is called.
The following changes are made in this CL: 1. Change wallclock_time_ to time_ticks_ due to recent change from base::Time to base::TimeTicks 2. When sending an IPC for data, if the data does not come back within the given timeout, we request both audio and video decoder to wait for data and start together when data arrives. 2. When startInternal is called, we request audio and video decoder job to start together. Previously there is a bug that video can start first without audio, and a/v lose sync afterwards. BUG=233420 Review URL: https://chromiumcodereview.appspot.com/18080003 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@209758 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media')
-rw-r--r--media/base/android/java/src/org/chromium/media/MediaCodecBridge.java2
-rw-r--r--media/base/android/media_source_player.cc236
-rw-r--r--media/base/android/media_source_player.h71
-rw-r--r--media/base/android/media_source_player_unittest.cc89
4 files changed, 316 insertions, 82 deletions
diff --git a/media/base/android/java/src/org/chromium/media/MediaCodecBridge.java b/media/base/android/java/src/org/chromium/media/MediaCodecBridge.java
index 86464b7..18bc529 100644
--- a/media/base/android/java/src/org/chromium/media/MediaCodecBridge.java
+++ b/media/base/android/java/src/org/chromium/media/MediaCodecBridge.java
@@ -218,6 +218,8 @@ class MediaCodecBridge {
int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int channelConfig = (channelCount == 1) ? AudioFormat.CHANNEL_OUT_MONO :
AudioFormat.CHANNEL_OUT_STEREO;
+ // Using 16bit PCM for output. Keep this value in sync with
+ // kBytesPerAudioOutputSample in media_codec_bridge.cc.
int minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig,
AudioFormat.ENCODING_PCM_16BIT);
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig,
diff --git a/media/base/android/media_source_player.cc b/media/base/android/media_source_player.cc
index 96d89bc..f76b4da 100644
--- a/media/base/android/media_source_player.cc
+++ b/media/base/android/media_source_player.cc
@@ -15,6 +15,7 @@
#include "media/base/android/media_codec_bridge.h"
#include "media/base/android/media_drm_bridge.h"
#include "media/base/android/media_player_manager.h"
+#include "media/base/audio_timestamp_helper.h"
namespace {
@@ -23,6 +24,10 @@ namespace {
// here. See b/9357571.
const int kMediaCodecTimeoutInMicroseconds = 250000;
+// Use 16bit PCM for audio output. Keep this value in sync with the output
+// format we passed to AudioTrack in MediaCodecBridge.
+const int kBytesPerAudioOutputSample = 2;
+
class DecoderThread : public base::Thread {
public:
virtual ~DecoderThread() {}
@@ -96,7 +101,7 @@ class VideoDecoderJob : public MediaDecoderJob {
void MediaDecoderJob::Decode(
const MediaPlayerHostMsg_ReadFromDemuxerAck_Params::AccessUnit& unit,
- const base::TimeTicks& start_wallclock_time,
+ const base::TimeTicks& start_time_ticks,
const base::TimeDelta& start_presentation_timestamp,
const MediaDecoderJob::DecoderCallback& callback) {
DCHECK(!is_decoding_);
@@ -104,14 +109,14 @@ void MediaDecoderJob::Decode(
is_decoding_ = true;
decoder_loop_->PostTask(FROM_HERE, base::Bind(
&MediaDecoderJob::DecodeInternal, base::Unretained(this), unit,
- start_wallclock_time, start_presentation_timestamp, needs_flush_,
+ start_time_ticks, start_presentation_timestamp, needs_flush_,
callback));
needs_flush_ = false;
}
void MediaDecoderJob::DecodeInternal(
const MediaPlayerHostMsg_ReadFromDemuxerAck_Params::AccessUnit& unit,
- const base::TimeTicks& start_wallclock_time,
+ const base::TimeTicks& start_time_ticks,
const base::TimeDelta& start_presentation_timestamp,
bool needs_flush,
const MediaDecoderJob::DecoderCallback& callback) {
@@ -122,8 +127,7 @@ void MediaDecoderJob::DecodeInternal(
int input_buf_index = media_codec_bridge_->DequeueInputBuffer(timeout);
if (input_buf_index == MediaCodecBridge::INFO_MEDIA_CODEC_ERROR) {
ui_loop_->PostTask(FROM_HERE, base::Bind(
- callback, DECODE_FAILED, start_presentation_timestamp,
- start_wallclock_time, false));
+ callback, DECODE_FAILED, start_presentation_timestamp, 0, false));
return;
}
// TODO(qinmin): skip frames if video is falling far behind.
@@ -162,9 +166,10 @@ void MediaDecoderJob::DecodeInternal(
if (size == 0 && end_of_stream)
break;
base::TimeDelta time_to_render;
- if (!start_wallclock_time.is_null()) {
+ DCHECK(!start_time_ticks.is_null());
+ if (!is_audio_) {
time_to_render = presentation_timestamp - (base::TimeTicks::Now() -
- start_wallclock_time + start_presentation_timestamp);
+ start_time_ticks + start_presentation_timestamp);
}
if (time_to_render >= base::TimeDelta()) {
base::MessageLoop::current()->PostDelayedTask(
@@ -175,7 +180,7 @@ void MediaDecoderJob::DecodeInternal(
time_to_render);
} else {
// TODO(qinmin): The codec is lagging behind, need to recalculate the
- // |start_presentation_timestamp_| and |start_wallclock_time_|.
+ // |start_presentation_timestamp_| and |start_time_ticks_|.
DVLOG(1) << (is_audio_ ? "audio " : "video ")
<< "codec is lagging behind :" << time_to_render.InMicroseconds();
ReleaseOutputBuffer(outputBufferIndex, size, presentation_timestamp,
@@ -184,8 +189,7 @@ void MediaDecoderJob::DecodeInternal(
return;
}
ui_loop_->PostTask(FROM_HERE, base::Bind(
- callback, decode_status, start_presentation_timestamp,
- start_wallclock_time, end_of_stream));
+ callback, decode_status, start_presentation_timestamp, 0, end_of_stream));
}
void MediaDecoderJob::ReleaseOutputBuffer(
@@ -200,8 +204,8 @@ void MediaDecoderJob::ReleaseOutputBuffer(
}
media_codec_bridge_->ReleaseOutputBuffer(outputBufferIndex, !is_audio_);
ui_loop_->PostTask(FROM_HERE, base::Bind(
- callback, DECODE_SUCCEEDED, presentation_timestamp,
- base::TimeTicks::Now(), end_of_stream));
+ callback, DECODE_SUCCEEDED, presentation_timestamp, is_audio_ ? size : 0,
+ end_of_stream));
}
void MediaDecoderJob::OnDecodeCompleted() {
@@ -286,12 +290,14 @@ MediaSourcePlayer::MediaSourcePlayer(
playing_(false),
is_audio_encrypted_(false),
is_video_encrypted_(false),
+ clock_(&default_tick_clock_),
reconfig_audio_decoder_(false),
reconfig_video_decoder_(false),
audio_access_unit_index_(0),
video_access_unit_index_(0),
waiting_for_audio_data_(false),
waiting_for_video_data_(false),
+ sync_decoder_jobs_(true),
weak_this_(this),
drm_bridge_(NULL) {
}
@@ -336,7 +342,7 @@ void MediaSourcePlayer::Pause() {
// MediaDecoderCallback(). In that case, decoding will continue when
// MediaDecoderCallback() is called.
playing_ = false;
- start_wallclock_time_ = base::TimeTicks();
+ start_time_ticks_ = base::TimeTicks();
}
bool MediaSourcePlayer::IsPlaying() {
@@ -352,13 +358,15 @@ int MediaSourcePlayer::GetVideoHeight() {
}
void MediaSourcePlayer::SeekTo(base::TimeDelta timestamp) {
- last_presentation_timestamp_ = timestamp;
+ clock_.SetTime(timestamp, timestamp);
+ if (audio_timestamp_helper_)
+ audio_timestamp_helper_->SetBaseTimestamp(timestamp);
pending_event_ |= SEEK_EVENT_PENDING;
ProcessPendingEvents();
}
base::TimeDelta MediaSourcePlayer::GetCurrentTime() {
- return last_presentation_timestamp_;
+ return clock_.Elapsed();
}
base::TimeDelta MediaSourcePlayer::GetDuration() {
@@ -412,14 +420,10 @@ void MediaSourcePlayer::StartInternal() {
return;
}
- if (HasAudio() && !audio_decoder_job_->is_decoding()) {
- audio_finished_ = false;
- DecodeMoreAudio();
- }
- if (HasVideo() && !video_decoder_job_->is_decoding()) {
- video_finished_ = false;
- DecodeMoreVideo();
- }
+ audio_finished_ = false;
+ video_finished_ = false;
+ sync_decoder_jobs_ = true;
+ SyncAndStartDecoderJobs();
}
void MediaSourcePlayer::DemuxerReady(
@@ -434,7 +438,10 @@ void MediaSourcePlayer::DemuxerReady(
audio_extra_data_ = params.audio_extra_data;
is_audio_encrypted_ = params.is_audio_encrypted;
is_video_encrypted_ = params.is_video_encrypted;
-
+ clock_.SetDuration(duration_);
+ audio_timestamp_helper_.reset(new AudioTimestampHelper(
+ kBytesPerAudioOutputSample * num_channels_, sampling_rate_));
+ audio_timestamp_helper_->SetBaseTimestamp(GetCurrentTime());
OnMediaMetadataChanged(duration_, width_, height_, true);
if (pending_event_ & CONFIG_CHANGE_EVENT_PENDING) {
if (reconfig_audio_decoder_)
@@ -454,6 +461,7 @@ void MediaSourcePlayer::DemuxerReady(
void MediaSourcePlayer::ReadFromDemuxerAck(
const MediaPlayerHostMsg_ReadFromDemuxerAck_Params& params) {
+ DCHECK_LT(0u, params.access_units.size());
if (params.type == DemuxerStream::AUDIO)
waiting_for_audio_data_ = false;
else
@@ -467,18 +475,28 @@ void MediaSourcePlayer::ReadFromDemuxerAck(
if (params.type == DemuxerStream::AUDIO) {
DCHECK_EQ(0u, audio_access_unit_index_);
received_audio_ = params;
- if (!pending_event_)
- DecodeMoreAudio();
} else {
DCHECK_EQ(0u, video_access_unit_index_);
received_video_ = params;
- if (!pending_event_)
- DecodeMoreVideo();
}
+
+ if (pending_event_ != NO_EVENT_PENDING || !playing_)
+ return;
+
+ if (sync_decoder_jobs_) {
+ SyncAndStartDecoderJobs();
+ return;
+ }
+
+ if (params.type == DemuxerStream::AUDIO)
+ DecodeMoreAudio();
+ else
+ DecodeMoreVideo();
}
void MediaSourcePlayer::DurationChanged(const base::TimeDelta& duration) {
duration_ = duration;
+ clock_.SetDuration(duration_);
}
void MediaSourcePlayer::SetDrmBridge(MediaDrmBridge* drm_bridge) {
@@ -510,14 +528,15 @@ void MediaSourcePlayer::OnSeekRequestAck(unsigned seek_request_id) {
}
void MediaSourcePlayer::UpdateTimestamps(
- const base::TimeDelta& presentation_timestamp,
- const base::TimeTicks& wallclock_time) {
- last_presentation_timestamp_ = presentation_timestamp;
- OnTimeUpdated();
- if (start_wallclock_time_.is_null() && playing_) {
- start_wallclock_time_ = wallclock_time;
- start_presentation_timestamp_ = last_presentation_timestamp_;
+ const base::TimeDelta& presentation_timestamp, size_t audio_output_bytes) {
+ if (audio_output_bytes > 0) {
+ audio_timestamp_helper_->AddBytes(audio_output_bytes);
+ clock_.SetMaxTime(audio_timestamp_helper_->GetTimestamp());
+ } else {
+ clock_.SetMaxTime(presentation_timestamp);
}
+
+ OnTimeUpdated();
}
void MediaSourcePlayer::ProcessPendingEvents() {
@@ -530,11 +549,11 @@ void MediaSourcePlayer::ProcessPendingEvents() {
if (pending_event_ & SEEK_EVENT_PENDING) {
ClearDecodingData();
manager()->OnMediaSeekRequest(
- player_id(), last_presentation_timestamp_, ++seek_request_id_);
+ player_id(), GetCurrentTime(), ++seek_request_id_);
return;
}
- start_wallclock_time_ = base::TimeTicks();
+ start_time_ticks_ = base::TimeTicks();
if (pending_event_ & CONFIG_CHANGE_EVENT_PENDING) {
DCHECK(reconfig_audio_decoder_ || reconfig_video_decoder_);
manager()->OnMediaConfigRequest(player_id());
@@ -553,13 +572,16 @@ void MediaSourcePlayer::ProcessPendingEvents() {
void MediaSourcePlayer::MediaDecoderCallback(
bool is_audio, MediaDecoderJob::DecodeStatus decode_status,
- const base::TimeDelta& presentation_timestamp,
- const base::TimeTicks& wallclock_time, bool end_of_stream) {
+ const base::TimeDelta& presentation_timestamp, size_t audio_output_bytes,
+ bool end_of_stream) {
if (is_audio && audio_decoder_job_)
audio_decoder_job_->OnDecodeCompleted();
if (!is_audio && video_decoder_job_)
video_decoder_job_->OnDecodeCompleted();
+ if (is_audio)
+ decoder_starvation_callback_.Cancel();
+
if (decode_status == MediaDecoderJob::DECODE_FAILED) {
Release();
OnMediaError(MEDIA_ERROR_DECODE);
@@ -577,32 +599,54 @@ void MediaSourcePlayer::MediaDecoderCallback(
}
if (is_audio || !HasAudio())
- UpdateTimestamps(presentation_timestamp, wallclock_time);
+ UpdateTimestamps(presentation_timestamp, audio_output_bytes);
if (end_of_stream) {
PlaybackCompleted(is_audio);
return;
}
- if (!playing_)
+ if (!playing_) {
+ if (is_audio || !HasAudio())
+ clock_.Pause();
return;
+ }
- if (is_audio)
- DecodeMoreAudio();
+ if (sync_decoder_jobs_) {
+ SyncAndStartDecoderJobs();
+ return;
+ }
+
+ base::TimeDelta current_timestamp = GetCurrentTime();
+ if (is_audio) {
+ base::TimeDelta timeout =
+ audio_timestamp_helper_->GetTimestamp() - current_timestamp;
+ StartStarvationCallback(timeout);
+ if (!HasAudioData())
+ RequestAudioData();
+ else
+ DecodeMoreAudio();
+ return;
+ }
+
+ if (!HasAudio()) {
+ DCHECK(current_timestamp <= presentation_timestamp);
+ // For video only streams, fps can be estimated from the difference
+ // between the previous and current presentation timestamps. The
+ // previous presentation timestamp is equal to current_timestamp.
+ // TODO(qinmin): determine whether 2 is a good coefficient for estimating
+ // video frame timeout.
+ StartStarvationCallback(2 * (presentation_timestamp - current_timestamp));
+ }
+ if (!HasVideoData())
+ RequestVideoData();
else
DecodeMoreVideo();
}
void MediaSourcePlayer::DecodeMoreAudio() {
- if (audio_access_unit_index_ >= received_audio_.access_units.size()) {
- if (!waiting_for_audio_data_) {
- manager()->OnReadFromDemuxer(player_id(), DemuxerStream::AUDIO);
- received_audio_ = MediaPlayerHostMsg_ReadFromDemuxerAck_Params();
- audio_access_unit_index_ = 0;
- waiting_for_audio_data_ = true;
- }
- return;
- }
+ DCHECK(!audio_decoder_job_->is_decoding());
+ DCHECK(HasAudioData());
if (DemuxerStream::kConfigChanged ==
received_audio_.access_units[audio_access_unit_index_].status) {
@@ -617,21 +661,14 @@ void MediaSourcePlayer::DecodeMoreAudio() {
audio_decoder_job_->Decode(
received_audio_.access_units[audio_access_unit_index_],
- start_wallclock_time_, start_presentation_timestamp_,
+ start_time_ticks_, start_presentation_timestamp_,
base::Bind(&MediaSourcePlayer::MediaDecoderCallback,
weak_this_.GetWeakPtr(), true));
}
void MediaSourcePlayer::DecodeMoreVideo() {
- if (video_access_unit_index_ >= received_video_.access_units.size()) {
- if (!waiting_for_video_data_) {
- manager()->OnReadFromDemuxer(player_id(), DemuxerStream::VIDEO);
- received_video_ = MediaPlayerHostMsg_ReadFromDemuxerAck_Params();
- video_access_unit_index_ = 0;
- waiting_for_video_data_ = true;
- }
- return;
- }
+ DCHECK(!video_decoder_job_->is_decoding());
+ DCHECK(HasVideoData());
if (DemuxerStream::kConfigChanged ==
received_video_.access_units[video_access_unit_index_].status) {
@@ -646,7 +683,7 @@ void MediaSourcePlayer::DecodeMoreVideo() {
video_decoder_job_->Decode(
received_video_.access_units[video_access_unit_index_],
- start_wallclock_time_, start_presentation_timestamp_,
+ start_time_ticks_, start_presentation_timestamp_,
base::Bind(&MediaSourcePlayer::MediaDecoderCallback,
weak_this_.GetWeakPtr(), false));
}
@@ -659,7 +696,8 @@ void MediaSourcePlayer::PlaybackCompleted(bool is_audio) {
if ((!HasAudio() || audio_finished_) && (!HasVideo() || video_finished_)) {
playing_ = false;
- start_wallclock_time_ = base::TimeTicks();
+ clock_.Pause();
+ start_time_ticks_ = base::TimeTicks();
OnPlaybackComplete();
}
}
@@ -669,7 +707,7 @@ void MediaSourcePlayer::ClearDecodingData() {
audio_decoder_job_->Flush();
if (video_decoder_job_)
video_decoder_job_->Flush();
- start_wallclock_time_ = base::TimeTicks();
+ start_time_ticks_ = base::TimeTicks();
received_audio_ = MediaPlayerHostMsg_ReadFromDemuxerAck_Params();
received_video_ = MediaPlayerHostMsg_ReadFromDemuxerAck_Params();
audio_access_unit_index_ = 0;
@@ -759,4 +797,72 @@ void MediaSourcePlayer::ConfigureVideoDecoderJob() {
OnMediaMetadataChanged(duration_, width_, height_, true);
}
+void MediaSourcePlayer::OnDecoderStarved() {
+ sync_decoder_jobs_ = true;
+}
+
+void MediaSourcePlayer::StartStarvationCallback(
+ const base::TimeDelta& timeout) {
+ decoder_starvation_callback_.Reset(
+ base::Bind(&MediaSourcePlayer::OnDecoderStarved,
+ weak_this_.GetWeakPtr()));
+ base::MessageLoop::current()->PostDelayedTask(
+ FROM_HERE, decoder_starvation_callback_.callback(), timeout);
+}
+
+void MediaSourcePlayer::SyncAndStartDecoderJobs() {
+ // For streams with both audio and video, send the request for video too.
+ // However, don't wait for the response so that we won't have lots of
+ // noticeable pauses in the audio. Video will sync with audio by itself.
+ if (HasVideo() && !HasVideoData()) {
+ RequestVideoData();
+ if (!HasAudio())
+ return;
+ }
+ if (HasAudio() && !HasAudioData()) {
+ RequestAudioData();
+ return;
+ }
+ start_time_ticks_ = base::TimeTicks::Now();
+ start_presentation_timestamp_ = GetCurrentTime();
+ if (!clock_.IsPlaying())
+ clock_.Play();
+ if (HasAudioData() && !audio_decoder_job_->is_decoding())
+ DecodeMoreAudio();
+ if (HasVideoData() && !video_decoder_job_->is_decoding())
+ DecodeMoreVideo();
+ sync_decoder_jobs_ = false;
+}
+
+void MediaSourcePlayer::RequestAudioData() {
+ DCHECK(HasAudio());
+
+ if (waiting_for_audio_data_)
+ return;
+
+ manager()->OnReadFromDemuxer(player_id(), DemuxerStream::AUDIO);
+ received_audio_ = MediaPlayerHostMsg_ReadFromDemuxerAck_Params();
+ audio_access_unit_index_ = 0;
+ waiting_for_audio_data_ = true;
+}
+
+void MediaSourcePlayer::RequestVideoData() {
+ DCHECK(HasVideo());
+ if (waiting_for_video_data_)
+ return;
+
+ manager()->OnReadFromDemuxer(player_id(), DemuxerStream::VIDEO);
+ received_video_ = MediaPlayerHostMsg_ReadFromDemuxerAck_Params();
+ video_access_unit_index_ = 0;
+ waiting_for_video_data_ = true;
+}
+
+bool MediaSourcePlayer::HasAudioData() const {
+ return audio_access_unit_index_ < received_audio_.access_units.size();
+}
+
+bool MediaSourcePlayer::HasVideoData() const {
+ return video_access_unit_index_ < received_video_.access_units.size();
+}
+
} // namespace media
diff --git a/media/base/android/media_source_player.h b/media/base/android/media_source_player.h
index 5448e109..b3be398 100644
--- a/media/base/android/media_source_player.h
+++ b/media/base/android/media_source_player.h
@@ -12,13 +12,16 @@
#include "base/android/scoped_java_ref.h"
#include "base/callback.h"
+#include "base/cancelable_callback.h"
#include "base/memory/scoped_ptr.h"
#include "base/memory/weak_ptr.h"
#include "base/threading/thread.h"
+#include "base/time/default_tick_clock.h"
#include "base/time/time.h"
#include "media/base/android/demuxer_stream_player_params.h"
#include "media/base/android/media_codec_bridge.h"
#include "media/base/android/media_player_android.h"
+#include "media/base/clock.h"
#include "media/base/media_export.h"
namespace base {
@@ -27,8 +30,9 @@ class MessageLoopProxy;
namespace media {
-class VideoDecoderJob;
class AudioDecoderJob;
+class AudioTimestampHelper;
+class VideoDecoderJob;
// Class for managing all the decoding tasks. Each decoding task will be posted
// onto the same thread. The thread will be stopped once Stop() is called.
@@ -43,15 +47,15 @@ class MediaDecoderJob {
virtual ~MediaDecoderJob();
// Callback when a decoder job finishes its work. Args: whether decode
- // finished successfully, presentation time, timestamp when the data is
- // rendered, whether decoder is reaching EOS.
+ // finished successfully, presentation time, audio output bytes, whether
+ // decoder is reaching EOS.
typedef base::Callback<void(DecodeStatus, const base::TimeDelta&,
- const base::TimeTicks&, bool)> DecoderCallback;
+ size_t, bool)> DecoderCallback;
// Called by MediaSourcePlayer to decode some data.
void Decode(
const MediaPlayerHostMsg_ReadFromDemuxerAck_Params::AccessUnit& unit,
- const base::TimeTicks& start_wallclock_time,
+ const base::TimeTicks& start_time_ticks,
const base::TimeDelta& start_presentation_timestamp,
const MediaDecoderJob::DecoderCallback& callback);
@@ -82,14 +86,14 @@ class MediaDecoderJob {
bool end_of_stream, const MediaDecoderJob::DecoderCallback& callback);
// Helper function to decoder data on |thread_|. |unit| contains all the data
- // to be decoded. |start_wallclock_time| and |start_presentation_timestamp|
+ // to be decoded. |start_time_ticks| and |start_presentation_timestamp|
// represent the system time and the presentation timestamp when the first
// frame is rendered. We use these information to estimate when the current
// frame should be rendered. If |needs_flush| is true, codec needs to be
// flushed at the beginning of this call.
void DecodeInternal(
const MediaPlayerHostMsg_ReadFromDemuxerAck_Params::AccessUnit& unit,
- const base::TimeTicks& start_wallclock_time,
+ const base::TimeTicks& start_time_ticks,
const base::TimeDelta& start_presentation_timestamp,
bool needs_flush,
const MediaDecoderJob::DecoderCallback& callback);
@@ -128,8 +132,7 @@ class MEDIA_EXPORT MediaSourcePlayer : public MediaPlayerAndroid {
public:
// Construct a MediaSourcePlayer object with all the needed media player
// callbacks.
- MediaSourcePlayer(int player_id,
- MediaPlayerManager* manager);
+ MediaSourcePlayer(int player_id, MediaPlayerManager* manager);
virtual ~MediaSourcePlayer();
// MediaPlayerAndroid implementation.
@@ -157,10 +160,9 @@ class MEDIA_EXPORT MediaSourcePlayer : public MediaPlayerAndroid {
virtual void SetDrmBridge(MediaDrmBridge* drm_bridge) OVERRIDE;
private:
- // Update the timestamps for A/V sync scheduling.
- void UpdateTimestamps(
- const base::TimeDelta& presentation_timestamp,
- const base::TimeTicks& wallclock_time);
+ // Update the current timestamp.
+ void UpdateTimestamps(const base::TimeDelta& presentation_timestamp,
+ size_t audio_output_bytes);
// Helper function for starting media playback.
void StartInternal();
@@ -172,7 +174,7 @@ class MEDIA_EXPORT MediaSourcePlayer : public MediaPlayerAndroid {
void MediaDecoderCallback(
bool is_audio, MediaDecoderJob::DecodeStatus decode_status,
const base::TimeDelta& presentation_timestamp,
- const base::TimeTicks& wallclock_time, bool end_of_stream);
+ size_t audio_output_bytes, bool end_of_stream);
// Handle pending events when all the decoder jobs finished.
void ProcessPendingEvents();
@@ -195,6 +197,27 @@ class MEDIA_EXPORT MediaSourcePlayer : public MediaPlayerAndroid {
// Determine seekability based on duration.
bool Seekable();
+ // Called when the |decoder_starvation_callback_| times out.
+ void OnDecoderStarved();
+
+ // Starts the |decoder_starvation_callback_| task with the timeout value.
+ void StartStarvationCallback(const base::TimeDelta& timeout);
+
+ // Called to sync decoder jobs. This call requests data from chunk demuxer
+ // first. Then it updates |start_time_ticks_| and
+ // |start_presentation_timestamp_| so that video can resync with audio.
+ void SyncAndStartDecoderJobs();
+
+ // Functions that send IPC requests to the renderer process for more
+ // audio/video data. Returns true if a request has been sent and the decoder
+ // needs to wait, or false otherwise.
+ void RequestAudioData();
+ void RequestVideoData();
+
+ // Check whether audio or video data is available for decoders to consume.
+ bool HasAudioData() const;
+ bool HasVideoData() const;
+
enum PendingEventFlags {
NO_EVENT_PENDING = 0,
SEEK_EVENT_PENDING = 1 << 0,
@@ -215,7 +238,6 @@ class MEDIA_EXPORT MediaSourcePlayer : public MediaPlayerAndroid {
VideoCodec video_codec_;
int num_channels_;
int sampling_rate_;
- base::TimeDelta last_presentation_timestamp_;
std::vector<uint8> audio_extra_data_;
bool audio_finished_;
bool video_finished_;
@@ -223,13 +245,19 @@ class MEDIA_EXPORT MediaSourcePlayer : public MediaPlayerAndroid {
bool is_audio_encrypted_;
bool is_video_encrypted_;
+ // base::TickClock used by |clock_|.
+ base::DefaultTickClock default_tick_clock_;
+
+ // Reference clock. Keeps track of current playback time.
+ Clock clock_;
+
// Timestamps for providing simple A/V sync. When start decoding an audio
// chunk, we record its presentation timestamp and the current system time.
// Then we use this information to estimate when the next audio/video frame
// should be rendered.
// TODO(qinmin): Need to fix the problem if audio/video lagged too far behind
// due to network or decoding problem.
- base::TimeTicks start_wallclock_time_;
+ base::TimeTicks start_time_ticks_;
base::TimeDelta start_presentation_timestamp_;
// The surface object currently owned by the player.
@@ -252,6 +280,17 @@ class MEDIA_EXPORT MediaSourcePlayer : public MediaPlayerAndroid {
MediaPlayerHostMsg_ReadFromDemuxerAck_Params received_audio_;
MediaPlayerHostMsg_ReadFromDemuxerAck_Params received_video_;
+ // A cancelable task that is posted when the audio decoder starts requesting
+ // new data. This callback runs if no data arrives before the timeout period
+ // elapses.
+ base::CancelableClosure decoder_starvation_callback_;
+
+ // Whether the audio and video decoder jobs should resync with each other.
+ bool sync_decoder_jobs_;
+
+ // Object to calculate the current audio timestamp for A/V sync.
+ scoped_ptr<AudioTimestampHelper> audio_timestamp_helper_;
+
// Weak pointer passed to media decoder jobs for callbacks.
base::WeakPtrFactory<MediaSourcePlayer> weak_this_;
diff --git a/media/base/android/media_source_player_unittest.cc b/media/base/android/media_source_player_unittest.cc
index b1a1d01..4d70fa8 100644
--- a/media/base/android/media_source_player_unittest.cc
+++ b/media/base/android/media_source_player_unittest.cc
@@ -141,6 +141,23 @@ class MediaSourcePlayerTest : public testing::Test {
return ack_params;
}
+ MediaPlayerHostMsg_ReadFromDemuxerAck_Params
+ CreateReadFromDemuxerAckForVideo() {
+ MediaPlayerHostMsg_ReadFromDemuxerAck_Params ack_params;
+ ack_params.type = DemuxerStream::VIDEO;
+ ack_params.access_units.resize(1);
+ ack_params.access_units[0].status = DemuxerStream::kOk;
+ scoped_refptr<DecoderBuffer> buffer =
+ ReadTestDataFile("vp8-I-frame-320x240");
+ ack_params.access_units[0].data = std::vector<uint8>(
+ buffer->GetData(), buffer->GetData() + buffer->GetDataSize());
+ return ack_params;
+ }
+
+ base::TimeTicks StartTimeTicks() {
+ return player_->start_time_ticks_;
+ }
+
protected:
scoped_ptr<MockMediaPlayerManager> manager_;
scoped_ptr<MediaSourcePlayer> player_;
@@ -267,7 +284,6 @@ TEST_F(MediaSourcePlayerTest, StartAfterSeekFinish) {
return;
// Test decoder job will not start until all pending seek event is handled.
-
MediaPlayerHostMsg_DemuxerReady_Params params;
params.audio_codec = kCodecVorbis;
params.audio_channels = 2;
@@ -325,4 +341,75 @@ TEST_F(MediaSourcePlayerTest, StartImmediatelyAfterPause) {
EXPECT_FALSE(GetMediaDecoderJob(true)->is_decoding());
}
+TEST_F(MediaSourcePlayerTest, DecoderJobsCannotStartWithoutAudio) {
+ if (!MediaCodecBridge::IsAvailable())
+ return;
+
+ // Test that when Start() is called, video decoder jobs will wait for audio
+ // decoder job before start decoding the data.
+ MediaPlayerHostMsg_DemuxerReady_Params params;
+ params.audio_codec = kCodecVorbis;
+ params.audio_channels = 2;
+ params.audio_sampling_rate = 44100;
+ params.is_audio_encrypted = false;
+ scoped_refptr<DecoderBuffer> buffer = ReadTestDataFile("vorbis-extradata");
+ params.audio_extra_data = std::vector<uint8>(
+ buffer->GetData(),
+ buffer->GetData() + buffer->GetDataSize());
+ params.video_codec = kCodecVP8;
+ params.video_size = gfx::Size(320, 240);
+ params.is_video_encrypted = false;
+ Start(params);
+ EXPECT_EQ(0, manager_->num_requests());
+
+ scoped_refptr<gfx::SurfaceTextureBridge> surface_texture(
+ new gfx::SurfaceTextureBridge(0));
+ gfx::ScopedJavaSurface surface(surface_texture.get());
+ player_->SetVideoSurface(surface.Pass());
+ EXPECT_EQ(1u, manager_->last_seek_request_id());
+ player_->OnSeekRequestAck(manager_->last_seek_request_id());
+
+ MediaDecoderJob* audio_decoder_job = GetMediaDecoderJob(true);
+ MediaDecoderJob* video_decoder_job = GetMediaDecoderJob(false);
+ EXPECT_EQ(2, manager_->num_requests());
+ EXPECT_FALSE(audio_decoder_job->is_decoding());
+ EXPECT_FALSE(video_decoder_job->is_decoding());
+
+ // Sending audio data to player, audio decoder should not start.
+ player_->ReadFromDemuxerAck(CreateReadFromDemuxerAckForVideo());
+ EXPECT_FALSE(video_decoder_job->is_decoding());
+
+ // Sending video data to player, both decoders should start now.
+ player_->ReadFromDemuxerAck(CreateReadFromDemuxerAckForAudio());
+ EXPECT_TRUE(audio_decoder_job->is_decoding());
+ EXPECT_TRUE(video_decoder_job->is_decoding());
+}
+
+TEST_F(MediaSourcePlayerTest, StartTimeTicksResetAfterDecoderUnderruns) {
+ if (!MediaCodecBridge::IsAvailable())
+ return;
+
+ // Test start time ticks will reset after decoder job underruns.
+ StartAudioDecoderJob();
+ EXPECT_TRUE(NULL != GetMediaDecoderJob(true));
+ EXPECT_EQ(1, manager_->num_requests());
+ player_->ReadFromDemuxerAck(CreateReadFromDemuxerAckForAudio());
+ EXPECT_TRUE(GetMediaDecoderJob(true)->is_decoding());
+
+ manager_->message_loop()->Run();
+ // The decoder job should finish and a new request will be sent.
+ EXPECT_EQ(2, manager_->num_requests());
+ EXPECT_FALSE(GetMediaDecoderJob(true)->is_decoding());
+ base::TimeTicks previous = StartTimeTicks();
+
+ // Let the decoder timeout and execute the OnDecoderStarved() callback.
+ base::PlatformThread::Sleep(base::TimeDelta::FromMilliseconds(100));
+ manager_->message_loop()->RunUntilIdle();
+
+ // Send new data to the decoder. This should reset the start time ticks.
+ player_->ReadFromDemuxerAck(CreateReadFromDemuxerAckForAudio());
+ base::TimeTicks current = StartTimeTicks();
+ EXPECT_LE(100.0, (current - previous).InMillisecondsF());
+}
+
} // namespace media