diff options
Diffstat (limited to 'media/libstagefright')
139 files changed, 6275 insertions, 4931 deletions
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 9e1c62a..f11791c 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -36,6 +36,7 @@ #include <media/hardware/HardwareAPI.h> #include <OMX_AudioExt.h> +#include <OMX_VideoExt.h> #include <OMX_Component.h> #include <OMX_IndexExt.h> @@ -365,7 +366,7 @@ ACodec::ACodec() mIsEncoder(false), mUseMetadataOnEncoderOutput(false), mShutdownInProgress(false), - mIsConfiguredForAdaptivePlayback(false), + mExplicitShutdown(false), mEncoderDelay(0), mEncoderPadding(0), mChannelMaskPresent(false), @@ -374,7 +375,10 @@ ACodec::ACodec() mStoreMetaDataInOutputBuffers(false), mMetaDataBuffersToSubmit(0), mRepeatFrameDelayUs(-1ll), - mMaxPtsGapUs(-1l) { + mMaxPtsGapUs(-1ll), + mTimePerCaptureUs(-1ll), + mTimePerFrameUs(-1ll), + mCreateInputBuffersSuspended(false) { mUninitializedState = new UninitializedState(this); mLoadedState = new LoadedState(this); mLoadedToIdleState = new LoadedToIdleState(this); @@ -640,18 +644,34 @@ status_t ACodec::configureOutputBuffersFromNativeWindow( return err; } - // XXX: Is this the right logic to use? It's not clear to me what the OMX - // buffer counts refer to - how do they account for the renderer holding on - // to buffers? - if (def.nBufferCountActual < def.nBufferCountMin + *minUndequeuedBuffers) { - OMX_U32 newBufferCount = def.nBufferCountMin + *minUndequeuedBuffers; + // FIXME: assume that surface is controlled by app (native window + // returns the number for the case when surface is not controlled by app) + // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported + // For now, try to allocate 1 more buffer, but don't fail if unsuccessful + + // Use conservative allocation while also trying to reduce starvation + // + // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the + // minimum needed for the consumer to be able to work + // 2. try to allocate two (2) additional buffers to reduce starvation from + // the consumer + // plus an extra buffer to account for incorrect minUndequeuedBufs + for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { + OMX_U32 newBufferCount = + def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; def.nBufferCountActual = newBufferCount; err = mOMX->setParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - if (err != OK) { - ALOGE("[%s] setting nBufferCountActual to %lu failed: %d", - mComponentName.c_str(), newBufferCount, err); + if (err == OK) { + *minUndequeuedBuffers += extraBuffers; + break; + } + + ALOGW("[%s] setting nBufferCountActual to %lu failed: %d", + mComponentName.c_str(), newBufferCount, err); + /* exit condition */ + if (extraBuffers == 0) { return err; } } @@ -676,6 +696,7 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() { &bufferCount, &bufferSize, &minUndequeuedBuffers); if (err != 0) return err; + mNumUndequeuedBuffers = minUndequeuedBuffers; ALOGV("[%s] Allocating %lu buffers from a native window of size %lu on " "output port", @@ -741,6 +762,7 @@ status_t ACodec::allocateOutputMetaDataBuffers() { &bufferCount, &bufferSize, &minUndequeuedBuffers); if (err != 0) return err; + mNumUndequeuedBuffers = minUndequeuedBuffers; ALOGV("[%s] Allocating %lu meta buffers on output port", mComponentName.c_str(), bufferCount); @@ -961,6 +983,8 @@ status_t ACodec::setComponentRole( "audio_decoder.aac", "audio_encoder.aac" }, { MEDIA_MIMETYPE_AUDIO_VORBIS, "audio_decoder.vorbis", "audio_encoder.vorbis" }, + { MEDIA_MIMETYPE_AUDIO_OPUS, + "audio_decoder.opus", "audio_encoder.opus" }, { MEDIA_MIMETYPE_AUDIO_G711_MLAW, "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, { MEDIA_MIMETYPE_AUDIO_G711_ALAW, @@ -1036,6 +1060,9 @@ status_t ACodec::configureCodec( encoder = false; } + sp<AMessage> inputFormat = new AMessage(); + sp<AMessage> outputFormat = new AMessage(); + mIsEncoder = encoder; status_t err = setComponentRole(encoder /* isEncoder */, mime); @@ -1118,7 +1145,17 @@ status_t ACodec::configureCodec( } if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { - mMaxPtsGapUs = -1l; + mMaxPtsGapUs = -1ll; + } + + if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { + mTimePerCaptureUs = -1ll; + } + + if (!msg->findInt32( + "create-input-buffers-suspended", + (int32_t*)&mCreateInputBuffersSuspended)) { + mCreateInputBuffersSuspended = false; } } @@ -1127,7 +1164,9 @@ status_t ACodec::configureCodec( int32_t haveNativeWindow = msg->findObject("native-window", &obj) && obj != NULL; mStoreMetaDataInOutputBuffers = false; - mIsConfiguredForAdaptivePlayback = false; + if (video && !encoder) { + inputFormat->setInt32("adaptive-playback", false); + } if (!encoder && video && haveNativeWindow) { err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, OMX_TRUE); if (err != OK) { @@ -1172,14 +1211,19 @@ status_t ACodec::configureCodec( ALOGW_IF(err != OK, "[%s] prepareForAdaptivePlayback failed w/ err %d", mComponentName.c_str(), err); - mIsConfiguredForAdaptivePlayback = (err == OK); + + if (err == OK) { + inputFormat->setInt32("max-width", maxWidth); + inputFormat->setInt32("max-height", maxHeight); + inputFormat->setInt32("adaptive-playback", true); + } } // allow failure err = OK; } else { ALOGV("[%s] storeMetaDataInBuffers succeeded", mComponentName.c_str()); mStoreMetaDataInOutputBuffers = true; - mIsConfiguredForAdaptivePlayback = true; + inputFormat->setInt32("adaptive-playback", true); } int32_t push; @@ -1319,6 +1363,11 @@ status_t ACodec::configureCodec( err = setMinBufferSize(kPortIndexInput, 8192); // XXX } + CHECK_EQ(getPortFormat(kPortIndexInput, inputFormat), (status_t)OK); + CHECK_EQ(getPortFormat(kPortIndexOutput, outputFormat), (status_t)OK); + mInputFormat = inputFormat; + mOutputFormat = outputFormat; + return err; } @@ -1909,6 +1958,7 @@ status_t ACodec::setupVideoEncoder(const char *mime, const sp<AMessage> &msg) { return INVALID_OPERATION; } frameRate = (float)tmp; + mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); } video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); @@ -2321,12 +2371,81 @@ status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { int32_t bitrate; + int32_t iFrameInterval = 0; + size_t tsLayers = 0; + OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = + OMX_VIDEO_VPXTemporalLayerPatternNone; + static const uint32_t kVp8LayerRateAlloction + [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] + [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { + {100, 100, 100}, // 1 layer + { 60, 100, 100}, // 2 layers {60%, 40%} + { 40, 60, 100}, // 3 layers {40%, 20%, 40%} + }; if (!msg->findInt32("bitrate", &bitrate)) { return INVALID_OPERATION; } + msg->findInt32("i-frame-interval", &iFrameInterval); OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); + float frameRate; + if (!msg->findFloat("frame-rate", &frameRate)) { + int32_t tmp; + if (!msg->findInt32("frame-rate", &tmp)) { + return INVALID_OPERATION; + } + frameRate = (float)tmp; + } + + AString tsSchema; + if (msg->findString("ts-schema", &tsSchema)) { + if (tsSchema == "webrtc.vp8.1-layer") { + pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; + tsLayers = 1; + } else if (tsSchema == "webrtc.vp8.2-layer") { + pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; + tsLayers = 2; + } else if (tsSchema == "webrtc.vp8.3-layer") { + pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; + tsLayers = 3; + } else { + ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str()); + } + } + + OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; + InitOMXParams(&vp8type); + vp8type.nPortIndex = kPortIndexOutput; + status_t err = mOMX->getParameter( + mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, + &vp8type, sizeof(vp8type)); + + if (err == OK) { + if (iFrameInterval > 0) { + vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); + } + vp8type.eTemporalPattern = pattern; + vp8type.nTemporalLayerCount = tsLayers; + if (tsLayers > 0) { + for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { + vp8type.nTemporalLayerBitrateRatio[i] = + kVp8LayerRateAlloction[tsLayers - 1][i]; + } + } + if (bitrateMode == OMX_Video_ControlRateConstant) { + vp8type.nMinQuantizer = 2; + vp8type.nMaxQuantizer = 63; + } + + err = mOMX->setParameter( + mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, + &vp8type, sizeof(vp8type)); + if (err != OK) { + ALOGW("Extended VP8 parameters set failed: %d", err); + } + } + return configureBitrate(bitrate, bitrateMode); } @@ -2482,19 +2601,7 @@ void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { return; } - int minUndequeuedBufs = 0; - status_t err = mNativeWindow->query( - mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, - &minUndequeuedBufs); - - if (err != OK) { - ALOGE("[%s] NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", - mComponentName.c_str(), strerror(-err), -err); - - minUndequeuedBufs = 0; - } - - while (countBuffersOwnedByNativeWindow() > (size_t)minUndequeuedBufs + while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers && dequeueBufferFromNativeWindow() != NULL) { // these buffers will be submitted as regular buffers; account for this if (mStoreMetaDataInOutputBuffers && mMetaDataBuffersToSubmit > 0) { @@ -2540,79 +2647,78 @@ void ACodec::processDeferredMessages() { } } -void ACodec::sendFormatChange(const sp<AMessage> &reply) { - sp<AMessage> notify = mNotify->dup(); - notify->setInt32("what", kWhatOutputFormatChanged); - +status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { + // TODO: catch errors an return them instead of using CHECK OMX_PARAM_PORTDEFINITIONTYPE def; InitOMXParams(&def); - def.nPortIndex = kPortIndexOutput; + def.nPortIndex = portIndex; CHECK_EQ(mOMX->getParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)), (status_t)OK); - CHECK_EQ((int)def.eDir, (int)OMX_DirOutput); + CHECK_EQ((int)def.eDir, + (int)(portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)); switch (def.eDomain) { case OMX_PortDomainVideo: { OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; + switch ((int)videoDef->eCompressionFormat) { + case OMX_VIDEO_CodingUnused: + { + CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); + notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); + + notify->setInt32("stride", videoDef->nStride); + notify->setInt32("slice-height", videoDef->nSliceHeight); + notify->setInt32("color-format", videoDef->eColorFormat); + + OMX_CONFIG_RECTTYPE rect; + InitOMXParams(&rect); + rect.nPortIndex = kPortIndexOutput; + + if (mOMX->getConfig( + mNode, OMX_IndexConfigCommonOutputCrop, + &rect, sizeof(rect)) != OK) { + rect.nLeft = 0; + rect.nTop = 0; + rect.nWidth = videoDef->nFrameWidth; + rect.nHeight = videoDef->nFrameHeight; + } - AString mime; - if (!mIsEncoder) { - notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); - } else if (GetMimeTypeForVideoCoding( - videoDef->eCompressionFormat, &mime) != OK) { - notify->setString("mime", "application/octet-stream"); - } else { - notify->setString("mime", mime.c_str()); - } - - notify->setInt32("width", videoDef->nFrameWidth); - notify->setInt32("height", videoDef->nFrameHeight); - - if (!mIsEncoder) { - notify->setInt32("stride", videoDef->nStride); - notify->setInt32("slice-height", videoDef->nSliceHeight); - notify->setInt32("color-format", videoDef->eColorFormat); - - OMX_CONFIG_RECTTYPE rect; - InitOMXParams(&rect); - rect.nPortIndex = kPortIndexOutput; - - if (mOMX->getConfig( - mNode, OMX_IndexConfigCommonOutputCrop, - &rect, sizeof(rect)) != OK) { - rect.nLeft = 0; - rect.nTop = 0; - rect.nWidth = videoDef->nFrameWidth; - rect.nHeight = videoDef->nFrameHeight; - } + CHECK_GE(rect.nLeft, 0); + CHECK_GE(rect.nTop, 0); + CHECK_GE(rect.nWidth, 0u); + CHECK_GE(rect.nHeight, 0u); + CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth); + CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight); - CHECK_GE(rect.nLeft, 0); - CHECK_GE(rect.nTop, 0); - CHECK_GE(rect.nWidth, 0u); - CHECK_GE(rect.nHeight, 0u); - CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth); - CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight); - - notify->setRect( - "crop", - rect.nLeft, - rect.nTop, - rect.nLeft + rect.nWidth - 1, - rect.nTop + rect.nHeight - 1); - - if (mNativeWindow != NULL) { - reply->setRect( + notify->setRect( "crop", rect.nLeft, rect.nTop, - rect.nLeft + rect.nWidth, - rect.nTop + rect.nHeight); + rect.nLeft + rect.nWidth - 1, + rect.nTop + rect.nHeight - 1); + + break; + } + default: + { + CHECK(mIsEncoder ^ (portIndex == kPortIndexInput)); + AString mime; + if (GetMimeTypeForVideoCoding( + videoDef->eCompressionFormat, &mime) != OK) { + notify->setString("mime", "application/octet-stream"); + } else { + notify->setString("mime", mime.c_str()); + } + break; } } + + notify->setInt32("width", videoDef->nFrameWidth); + notify->setInt32("height", videoDef->nFrameHeight); break; } @@ -2625,7 +2731,7 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) { { OMX_AUDIO_PARAM_PCMMODETYPE params; InitOMXParams(¶ms); - params.nPortIndex = kPortIndexOutput; + params.nPortIndex = portIndex; CHECK_EQ(mOMX->getParameter( mNode, OMX_IndexParamAudioPcm, @@ -2645,20 +2751,6 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) { notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); notify->setInt32("channel-count", params.nChannels); notify->setInt32("sample-rate", params.nSamplingRate); - if (mEncoderDelay + mEncoderPadding) { - size_t frameSize = params.nChannels * sizeof(int16_t); - if (mSkipCutBuffer != NULL) { - size_t prevbufsize = mSkipCutBuffer->size(); - if (prevbufsize != 0) { - ALOGW("Replacing SkipCutBuffer holding %d " - "bytes", - prevbufsize); - } - } - mSkipCutBuffer = new SkipCutBuffer( - mEncoderDelay * frameSize, - mEncoderPadding * frameSize); - } if (mChannelMaskPresent) { notify->setInt32("channel-mask", mChannelMask); @@ -2670,7 +2762,7 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) { { OMX_AUDIO_PARAM_AACPROFILETYPE params; InitOMXParams(¶ms); - params.nPortIndex = kPortIndexOutput; + params.nPortIndex = portIndex; CHECK_EQ(mOMX->getParameter( mNode, OMX_IndexParamAudioAac, @@ -2687,7 +2779,7 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) { { OMX_AUDIO_PARAM_AMRTYPE params; InitOMXParams(¶ms); - params.nPortIndex = kPortIndexOutput; + params.nPortIndex = portIndex; CHECK_EQ(mOMX->getParameter( mNode, OMX_IndexParamAudioAmr, @@ -2713,7 +2805,7 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) { { OMX_AUDIO_PARAM_FLACTYPE params; InitOMXParams(¶ms); - params.nPortIndex = kPortIndexOutput; + params.nPortIndex = portIndex; CHECK_EQ(mOMX->getParameter( mNode, OMX_IndexParamAudioFlac, @@ -2726,11 +2818,45 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) { break; } + case OMX_AUDIO_CodingMP3: + { + OMX_AUDIO_PARAM_MP3TYPE params; + InitOMXParams(¶ms); + params.nPortIndex = portIndex; + + CHECK_EQ(mOMX->getParameter( + mNode, OMX_IndexParamAudioMp3, + ¶ms, sizeof(params)), + (status_t)OK); + + notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); + notify->setInt32("channel-count", params.nChannels); + notify->setInt32("sample-rate", params.nSampleRate); + break; + } + + case OMX_AUDIO_CodingVORBIS: + { + OMX_AUDIO_PARAM_VORBISTYPE params; + InitOMXParams(¶ms); + params.nPortIndex = portIndex; + + CHECK_EQ(mOMX->getParameter( + mNode, OMX_IndexParamAudioVorbis, + ¶ms, sizeof(params)), + (status_t)OK); + + notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); + notify->setInt32("channel-count", params.nChannels); + notify->setInt32("sample-rate", params.nSampleRate); + break; + } + case OMX_AUDIO_CodingAndroidAC3: { OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; InitOMXParams(¶ms); - params.nPortIndex = kPortIndexOutput; + params.nPortIndex = portIndex; CHECK_EQ((status_t)OK, mOMX->getParameter( mNode, @@ -2745,6 +2871,7 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) { } default: + ALOGE("UNKNOWN AUDIO CODING: %d\n", audioDef->eEncoding); TRESPASS(); } break; @@ -2754,6 +2881,43 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) { TRESPASS(); } + return OK; +} + +void ACodec::sendFormatChange(const sp<AMessage> &reply) { + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatOutputFormatChanged); + + CHECK_EQ(getPortFormat(kPortIndexOutput, notify), (status_t)OK); + + AString mime; + CHECK(notify->findString("mime", &mime)); + + int32_t left, top, right, bottom; + if (mime == MEDIA_MIMETYPE_VIDEO_RAW && + mNativeWindow != NULL && + notify->findRect("crop", &left, &top, &right, &bottom)) { + // notify renderer of the crop change + // NOTE: native window uses extended right-bottom coordinate + reply->setRect("crop", left, top, right + 1, bottom + 1); + } else if (mime == MEDIA_MIMETYPE_AUDIO_RAW && + (mEncoderDelay || mEncoderPadding)) { + int32_t channelCount; + CHECK(notify->findInt32("channel-count", &channelCount)); + size_t frameSize = channelCount * sizeof(int16_t); + if (mSkipCutBuffer != NULL) { + size_t prevbufsize = mSkipCutBuffer->size(); + if (prevbufsize != 0) { + ALOGW("Replacing SkipCutBuffer holding %d " + "bytes", + prevbufsize); + } + } + mSkipCutBuffer = new SkipCutBuffer( + mEncoderDelay * frameSize, + mEncoderPadding * frameSize); + } + notify->post(); mSentFormat = true; @@ -2960,7 +3124,8 @@ ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) mCodec(codec) { } -ACodec::BaseState::PortMode ACodec::BaseState::getPortMode(OMX_U32 portIndex) { +ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( + OMX_U32 /* portIndex */) { return KEEP_BUFFERS; } @@ -3009,6 +3174,14 @@ bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { int32_t type; CHECK(msg->findInt32("type", &type)); + // there is a possibility that this is an outstanding message for a + // codec that we have already destroyed + if (mCodec->mNode == NULL) { + ALOGI("ignoring message as already freed component: %s", + msg->debugString().c_str()); + return true; + } + IOMX::node_id nodeID; CHECK(msg->findPointer("node", &nodeID)); CHECK_EQ(nodeID, mCodec->mNode); @@ -3369,8 +3542,8 @@ bool ACodec::BaseState::onOMXFillBufferDone( size_t rangeOffset, size_t rangeLength, OMX_U32 flags, int64_t timeUs, - void *platformPrivate, - void *dataPtr) { + void * /* platformPrivate */, + void * /* dataPtr */) { ALOGV("[%s] onOMXFillBufferDone %p time %lld us, flags = 0x%08lx", mCodec->mComponentName.c_str(), bufferID, timeUs, flags); @@ -3422,7 +3595,7 @@ bool ACodec::BaseState::onOMXFillBufferDone( sp<AMessage> reply = new AMessage(kWhatOutputBufferDrained, mCodec->id()); - if (!mCodec->mSentFormat) { + if (!mCodec->mSentFormat && rangeLength > 0) { mCodec->sendFormatChange(reply); } @@ -3620,7 +3793,8 @@ bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { int32_t keepComponentAllocated; CHECK(msg->findInt32( "keepComponentAllocated", &keepComponentAllocated)); - CHECK(!keepComponentAllocated); + ALOGW_IF(keepComponentAllocated, + "cannot keep component allocated on shutdown in Uninitialized state"); sp<AMessage> notify = mCodec->mNotify->dup(); notify->setInt32("what", ACodec::kWhatShutdownCompleted); @@ -3782,7 +3956,8 @@ void ACodec::LoadedState::stateEntered() { mCodec->mDequeueCounter = 0; mCodec->mMetaDataBuffersToSubmit = 0; mCodec->mRepeatFrameDelayUs = -1ll; - mCodec->mIsConfiguredForAdaptivePlayback = false; + mCodec->mInputFormat.clear(); + mCodec->mOutputFormat.clear(); if (mCodec->mShutdownInProgress) { bool keepComponentAllocated = mCodec->mKeepComponentAllocated; @@ -3792,6 +3967,7 @@ void ACodec::LoadedState::stateEntered() { onShutdown(keepComponentAllocated); } + mCodec->mExplicitShutdown = false; } void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { @@ -3801,9 +3977,12 @@ void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { mCodec->changeState(mCodec->mUninitializedState); } - sp<AMessage> notify = mCodec->mNotify->dup(); - notify->setInt32("what", ACodec::kWhatShutdownCompleted); - notify->post(); + if (mCodec->mExplicitShutdown) { + sp<AMessage> notify = mCodec->mNotify->dup(); + notify->setInt32("what", ACodec::kWhatShutdownCompleted); + notify->post(); + mCodec->mExplicitShutdown = false; + } } bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { @@ -3837,6 +4016,7 @@ bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { CHECK(msg->findInt32( "keepComponentAllocated", &keepComponentAllocated)); + mCodec->mExplicitShutdown = true; onShutdown(keepComponentAllocated); handled = true; @@ -3896,6 +4076,8 @@ bool ACodec::LoadedState::onConfigureComponent( { sp<AMessage> notify = mCodec->mNotify->dup(); notify->setInt32("what", ACodec::kWhatComponentConfigured); + notify->setMessage("input-format", mCodec->mInputFormat); + notify->setMessage("output-format", mCodec->mOutputFormat); notify->post(); } @@ -3903,7 +4085,7 @@ bool ACodec::LoadedState::onConfigureComponent( } void ACodec::LoadedState::onCreateInputSurface( - const sp<AMessage> &msg) { + const sp<AMessage> & /* msg */) { ALOGV("onCreateInputSurface"); sp<AMessage> notify = mCodec->mNotify->dup(); @@ -3931,7 +4113,7 @@ void ACodec::LoadedState::onCreateInputSurface( } } - if (err == OK && mCodec->mMaxPtsGapUs > 0l) { + if (err == OK && mCodec->mMaxPtsGapUs > 0ll) { err = mCodec->mOMX->setInternalOption( mCodec->mNode, kPortIndexInput, @@ -3941,6 +4123,41 @@ void ACodec::LoadedState::onCreateInputSurface( if (err != OK) { ALOGE("[%s] Unable to configure max timestamp gap (err %d)", + mCodec->mComponentName.c_str(), + err); + } + } + + if (err == OK && mCodec->mTimePerCaptureUs > 0ll + && mCodec->mTimePerFrameUs > 0ll) { + int64_t timeLapse[2]; + timeLapse[0] = mCodec->mTimePerFrameUs; + timeLapse[1] = mCodec->mTimePerCaptureUs; + err = mCodec->mOMX->setInternalOption( + mCodec->mNode, + kPortIndexInput, + IOMX::INTERNAL_OPTION_TIME_LAPSE, + &timeLapse[0], + sizeof(timeLapse)); + + if (err != OK) { + ALOGE("[%s] Unable to configure time lapse (err %d)", + mCodec->mComponentName.c_str(), + err); + } + } + + if (err == OK && mCodec->mCreateInputBuffersSuspended) { + bool suspend = true; + err = mCodec->mOMX->setInternalOption( + mCodec->mNode, + kPortIndexInput, + IOMX::INTERNAL_OPTION_SUSPEND, + &suspend, + sizeof(suspend)); + + if (err != OK) { + ALOGE("[%s] Unable to configure option to suspend (err %d)", mCodec->mComponentName.c_str(), err); } @@ -4003,6 +4220,7 @@ status_t ACodec::LoadedToIdleState::allocateBuffers() { bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { switch (msg->what()) { + case kWhatSetParameters: case kWhatShutdown: { mCodec->deferMessage(msg); @@ -4069,6 +4287,7 @@ void ACodec::IdleToExecutingState::stateEntered() { bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { switch (msg->what()) { + case kWhatSetParameters: case kWhatShutdown: { mCodec->deferMessage(msg); @@ -4129,7 +4348,7 @@ ACodec::ExecutingState::ExecutingState(ACodec *codec) } ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( - OMX_U32 portIndex) { + OMX_U32 /* portIndex */) { return RESUBMIT_BUFFERS; } @@ -4217,6 +4436,7 @@ bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { "keepComponentAllocated", &keepComponentAllocated)); mCodec->mShutdownInProgress = true; + mCodec->mExplicitShutdown = true; mCodec->mKeepComponentAllocated = keepComponentAllocated; mActive = false; @@ -4338,6 +4558,22 @@ status_t ACodec::setParameters(const sp<AMessage> ¶ms) { } } + int64_t skipFramesBeforeUs; + if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { + status_t err = + mOMX->setInternalOption( + mNode, + kPortIndexInput, + IOMX::INTERNAL_OPTION_START_TIME, + &skipFramesBeforeUs, + sizeof(skipFramesBeforeUs)); + + if (err != OK) { + ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); + return err; + } + } + int32_t dropInputFrames; if (params->findInt32("drop-input-frames", &dropInputFrames)) { bool suspend = dropInputFrames != 0; diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk index 6a2a696..714b5e0 100644 --- a/media/libstagefright/Android.mk +++ b/media/libstagefright/Android.mk @@ -15,6 +15,7 @@ LOCAL_SRC_FILES:= \ CameraSource.cpp \ CameraSourceTimeLapse.cpp \ DataSource.cpp \ + DataURISource.cpp \ DRMExtractor.cpp \ ESDS.cpp \ FileSource.cpp \ @@ -30,8 +31,10 @@ LOCAL_SRC_FILES:= \ MediaBufferGroup.cpp \ MediaCodec.cpp \ MediaCodecList.cpp \ + MediaCodecSource.cpp \ MediaDefs.cpp \ MediaExtractor.cpp \ + http/MediaHTTP.cpp \ MediaMuxer.cpp \ MediaSource.cpp \ MetaData.cpp \ @@ -55,8 +58,6 @@ LOCAL_SRC_FILES:= \ WVMExtractor.cpp \ XINGSeeker.cpp \ avc_utils.cpp \ - mp4/FragmentedMP4Parser.cpp \ - mp4/TrackFragment.cpp \ LOCAL_C_INCLUDES:= \ $(TOP)/frameworks/av/include/media/stagefright/timedtext \ @@ -80,6 +81,7 @@ LOCAL_SHARED_LIBRARIES := \ libicuuc \ liblog \ libmedia \ + libopus \ libsonivox \ libssl \ libstagefright_omx \ @@ -95,6 +97,7 @@ LOCAL_STATIC_LIBRARIES := \ libstagefright_color_conversion \ libstagefright_aacenc \ libstagefright_matroska \ + libstagefright_webm \ libstagefright_timedtext \ libvpx \ libwebm \ @@ -103,13 +106,6 @@ LOCAL_STATIC_LIBRARIES := \ libFLAC \ libmedia_helper -LOCAL_SRC_FILES += \ - chromium_http_stub.cpp -LOCAL_CPPFLAGS += -DCHROMIUM_AVAILABLE=1 - -LOCAL_SHARED_LIBRARIES += libstlport -include external/stlport/libstlport.mk - LOCAL_SHARED_LIBRARIES += \ libstagefright_enc_common \ libstagefright_avc_common \ diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp index 8623100..2669849 100644 --- a/media/libstagefright/AudioPlayer.cpp +++ b/media/libstagefright/AudioPlayer.cpp @@ -221,7 +221,8 @@ status_t AudioPlayer::start(bool sourceAlreadyStarted) { mAudioTrack = new AudioTrack( AUDIO_STREAM_MUSIC, mSampleRate, AUDIO_FORMAT_PCM_16_BIT, audioMask, - 0, AUDIO_OUTPUT_FLAG_NONE, &AudioCallback, this, 0); + 0 /*frameCount*/, AUDIO_OUTPUT_FLAG_NONE, &AudioCallback, this, + 0 /*notificationFrames*/); if ((err = mAudioTrack->initCheck()) != OK) { mAudioTrack.clear(); diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp index e68a710..d0e0e8e 100644 --- a/media/libstagefright/AudioSource.cpp +++ b/media/libstagefright/AudioSource.cpp @@ -65,7 +65,7 @@ AudioSource::AudioSource( if (status == OK) { // make sure that the AudioRecord callback never returns more than the maximum // buffer size - int frameCount = kMaxBufferSize / sizeof(int16_t) / channelCount; + uint32_t frameCount = kMaxBufferSize / sizeof(int16_t) / channelCount; // make sure that the AudioRecord total buffer size is large enough size_t bufCount = 2; @@ -76,10 +76,10 @@ AudioSource::AudioSource( mRecord = new AudioRecord( inputSource, sampleRate, AUDIO_FORMAT_PCM_16_BIT, audio_channel_in_mask_from_count(channelCount), - bufCount * frameCount, + (size_t) (bufCount * frameCount), AudioRecordCallbackFunction, this, - frameCount); + frameCount /*notificationFrames*/); mInitCheck = mRecord->initCheck(); } else { mInitCheck = status; @@ -278,7 +278,7 @@ status_t AudioSource::dataCallback(const AudioRecord::Buffer& audioBuffer) { // Drop retrieved and previously lost audio data. if (mNumFramesReceived == 0 && timeUs < mStartTimeUs) { - mRecord->getInputFramesLost(); + (void) mRecord->getInputFramesLost(); ALOGV("Drop audio data at %lld/%lld us", timeUs, mStartTimeUs); return OK; } diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index 0dd867c..6e5003f 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -35,6 +35,8 @@ #include <binder/IPCThreadState.h> #include <binder/IServiceManager.h> +#include <media/IMediaHTTPConnection.h> +#include <media/IMediaHTTPService.h> #include <media/IMediaPlayerService.h> #include <media/stagefright/foundation/hexdump.h> #include <media/stagefright/foundation/ADebug.h> @@ -45,6 +47,7 @@ #include <media/stagefright/MediaBuffer.h> #include <media/stagefright/MediaDefs.h> #include <media/stagefright/MediaExtractor.h> +#include <media/stagefright/MediaHTTP.h> #include <media/stagefright/MediaSource.h> #include <media/stagefright/MetaData.h> #include <media/stagefright/OMXCodec.h> @@ -277,15 +280,20 @@ void AwesomePlayer::setUID(uid_t uid) { } status_t AwesomePlayer::setDataSource( - const char *uri, const KeyedVector<String8, String8> *headers) { + const sp<IMediaHTTPService> &httpService, + const char *uri, + const KeyedVector<String8, String8> *headers) { Mutex::Autolock autoLock(mLock); - return setDataSource_l(uri, headers); + return setDataSource_l(httpService, uri, headers); } status_t AwesomePlayer::setDataSource_l( - const char *uri, const KeyedVector<String8, String8> *headers) { + const sp<IMediaHTTPService> &httpService, + const char *uri, + const KeyedVector<String8, String8> *headers) { reset_l(); + mHTTPService = httpService; mUri = uri; if (headers) { @@ -302,7 +310,7 @@ status_t AwesomePlayer::setDataSource_l( } } - ALOGI("setDataSource_l(URL suppressed)"); + ALOGI("setDataSource_l(%s)", uriDebugString(mUri, mFlags & INCOGNITO).c_str()); // The actual work will be done during preparation in the call to // ::finishSetDataSource_l to avoid blocking the calling thread in @@ -582,6 +590,7 @@ void AwesomePlayer::reset_l() { mSeekNotificationSent = true; mSeekTimeUs = 0; + mHTTPService.clear(); mUri.setTo(""); mUriHeaders.clear(); @@ -709,11 +718,9 @@ void AwesomePlayer::onBufferingUpdate() { finishAsyncPrepare_l(); } } else { - int64_t bitrate; - if (getBitrate(&bitrate)) { - size_t cachedSize = mCachedSource->cachedSize(); - int64_t cachedDurationUs = cachedSize * 8000000ll / bitrate; - + bool eos2; + int64_t cachedDurationUs; + if (getCachedDuration_l(&cachedDurationUs, &eos2) && mDurationUs > 0) { int percentage = 100.0 * (double)cachedDurationUs / mDurationUs; if (percentage > 100) { percentage = 100; @@ -721,7 +728,7 @@ void AwesomePlayer::onBufferingUpdate() { notifyListener_l(MEDIA_BUFFERING_UPDATE, percentage); } else { - // We don't know the bitrate of the stream, use absolute size + // We don't know the bitrate/duration of the stream, use absolute size // limits to maintain the cache. if ((mFlags & PLAYING) && !eos @@ -1483,7 +1490,7 @@ void AwesomePlayer::addTextSource_l(size_t trackIndex, const sp<MediaSource>& so CHECK(source != NULL); if (mTextDriver == NULL) { - mTextDriver = new TimedTextDriver(mListener); + mTextDriver = new TimedTextDriver(mListener, mHTTPService); } mTextDriver->addInBandTextSource(trackIndex, source); @@ -2193,15 +2200,14 @@ status_t AwesomePlayer::finishSetDataSource_l() { if (!strncasecmp("http://", mUri.string(), 7) || !strncasecmp("https://", mUri.string(), 8) || isWidevineStreaming) { - mConnectingDataSource = HTTPBase::Create( - (mFlags & INCOGNITO) - ? HTTPBase::kFlagIncognito - : 0); - - if (mUIDValid) { - mConnectingDataSource->setUID(mUID); + if (mHTTPService == NULL) { + ALOGE("Attempt to play media from http URI without HTTP service."); + return UNKNOWN_ERROR; } + sp<IMediaHTTPConnection> conn = mHTTPService->makeHTTPConnection(); + mConnectingDataSource = new MediaHTTP(conn); + String8 cacheConfig; bool disconnectAtHighwatermark; NuCachedSource2::RemoveCacheSpecificHeaders( @@ -2209,6 +2215,10 @@ status_t AwesomePlayer::finishSetDataSource_l() { mLock.unlock(); status_t err = mConnectingDataSource->connect(mUri, &mUriHeaders); + // force connection at this point, to avoid a race condition between getMIMEType and the + // caching datasource constructed below, which could result in multiple requests to the + // server, and/or failed connections. + String8 contentType = mConnectingDataSource->getMIMEType(); mLock.lock(); if (err != OK) { @@ -2239,8 +2249,6 @@ status_t AwesomePlayer::finishSetDataSource_l() { mConnectingDataSource.clear(); - String8 contentType = dataSource->getMIMEType(); - if (strncasecmp(contentType.string(), "audio/", 6)) { // We're not doing this for streams that appear to be audio-only // streams to ensure that even low bandwidth streams start @@ -2317,7 +2325,8 @@ status_t AwesomePlayer::finishSetDataSource_l() { } } } else { - dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders); + dataSource = DataSource::CreateFromURI( + mHTTPService, mUri.string(), &mUriHeaders); } if (dataSource == NULL) { @@ -2759,7 +2768,7 @@ status_t AwesomePlayer::invoke(const Parcel &request, Parcel *reply) { { Mutex::Autolock autoLock(mLock); if (mTextDriver == NULL) { - mTextDriver = new TimedTextDriver(mListener); + mTextDriver = new TimedTextDriver(mListener, mHTTPService); } // String values written in Parcel are UTF-16 values. String8 uri(request.readString16()); @@ -2771,7 +2780,7 @@ status_t AwesomePlayer::invoke(const Parcel &request, Parcel *reply) { { Mutex::Autolock autoLock(mLock); if (mTextDriver == NULL) { - mTextDriver = new TimedTextDriver(mListener); + mTextDriver = new TimedTextDriver(mListener, mHTTPService); } int fd = request.readFileDescriptor(); off64_t offset = request.readInt64(); @@ -2812,7 +2821,7 @@ status_t AwesomePlayer::dump( fprintf(out, " AwesomePlayer\n"); if (mStats.mFd < 0) { - fprintf(out, " URI(suppressed)"); + fprintf(out, " URI(%s)", uriDebugString(mUri, mFlags & INCOGNITO).c_str()); } else { fprintf(out, " fd(%d)", mStats.mFd); } @@ -2901,6 +2910,8 @@ void AwesomePlayer::onAudioTearDownEvent() { // get current position so we can start recreated stream from here getPosition(&mAudioTearDownPosition); + sp<IMediaHTTPService> savedHTTPService = mHTTPService; + // Reset and recreate reset_l(); @@ -2910,7 +2921,7 @@ void AwesomePlayer::onAudioTearDownEvent() { mFileSource = fileSource; err = setDataSource_l(fileSource); } else { - err = setDataSource_l(uri, &uriHeaders); + err = setDataSource_l(savedHTTPService, uri, &uriHeaders); } mFlags |= PREPARING; diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp index 5b41f30..b31e9e8 100644 --- a/media/libstagefright/CameraSource.cpp +++ b/media/libstagefright/CameraSource.cpp @@ -31,6 +31,12 @@ #include <utils/String8.h> #include <cutils/properties.h> +#if LOG_NDEBUG +#define UNUSED_UNLESS_VERBOSE(x) (void)(x) +#else +#define UNUSED_UNLESS_VERBOSE(x) +#endif + namespace android { static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL; @@ -63,6 +69,9 @@ CameraSourceListener::~CameraSourceListener() { } void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) { + UNUSED_UNLESS_VERBOSE(msgType); + UNUSED_UNLESS_VERBOSE(ext1); + UNUSED_UNLESS_VERBOSE(ext2); ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2); } @@ -577,14 +586,15 @@ CameraSource::~CameraSource() { } } -void CameraSource::startCameraRecording() { +status_t CameraSource::startCameraRecording() { ALOGV("startCameraRecording"); // Reset the identity to the current thread because media server owns the // camera and recording is started by the applications. The applications // will connect to the camera in ICameraRecordingProxy::startRecording. int64_t token = IPCThreadState::self()->clearCallingIdentity(); + status_t err; if (mNumInputBuffers > 0) { - status_t err = mCamera->sendCommand( + err = mCamera->sendCommand( CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0); // This could happen for CameraHAL1 clients; thus the failure is @@ -595,17 +605,25 @@ void CameraSource::startCameraRecording() { } } + err = OK; if (mCameraFlags & FLAGS_HOT_CAMERA) { mCamera->unlock(); mCamera.clear(); - CHECK_EQ((status_t)OK, - mCameraRecordingProxy->startRecording(new ProxyListener(this))); + if ((err = mCameraRecordingProxy->startRecording( + new ProxyListener(this))) != OK) { + ALOGE("Failed to start recording, received error: %s (%d)", + strerror(-err), err); + } } else { mCamera->setListener(new CameraSourceListener(this)); mCamera->startRecording(); - CHECK(mCamera->recordingEnabled()); + if (!mCamera->recordingEnabled()) { + err = -EINVAL; + ALOGE("Failed to start recording"); + } } IPCThreadState::self()->restoreCallingIdentity(token); + return err; } status_t CameraSource::start(MetaData *meta) { @@ -637,10 +655,12 @@ status_t CameraSource::start(MetaData *meta) { } } - startCameraRecording(); + status_t err; + if ((err = startCameraRecording()) == OK) { + mStarted = true; + } - mStarted = true; - return OK; + return err; } void CameraSource::stopCameraRecording() { diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp index 591daac..15ba967 100644 --- a/media/libstagefright/CameraSourceTimeLapse.cpp +++ b/media/libstagefright/CameraSourceTimeLapse.cpp @@ -85,7 +85,8 @@ CameraSourceTimeLapse::CameraSourceTimeLapse( mVideoWidth = videoSize.width; mVideoHeight = videoSize.height; - if (!trySettingVideoSize(videoSize.width, videoSize.height)) { + if (OK == mInitCheck && !trySettingVideoSize(videoSize.width, videoSize.height)) { + releaseCamera(); mInitCheck = NO_INIT; } diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp index 97987e2..6e0f37a 100644 --- a/media/libstagefright/DataSource.cpp +++ b/media/libstagefright/DataSource.cpp @@ -16,10 +16,6 @@ #include "include/AMRExtractor.h" -#if CHROMIUM_AVAILABLE -#include "include/chromium_http_stub.h" -#endif - #include "include/AACExtractor.h" #include "include/DRMExtractor.h" #include "include/FLACExtractor.h" @@ -35,10 +31,14 @@ #include "matroska/MatroskaExtractor.h" +#include <media/IMediaHTTPConnection.h> +#include <media/IMediaHTTPService.h> #include <media/stagefright/foundation/AMessage.h> #include <media/stagefright/DataSource.h> +#include <media/stagefright/DataURISource.h> #include <media/stagefright/FileSource.h> #include <media/stagefright/MediaErrors.h> +#include <media/stagefright/MediaHTTP.h> #include <utils/String8.h> #include <cutils/properties.h> @@ -180,7 +180,9 @@ void DataSource::RegisterDefaultSniffers() { // static sp<DataSource> DataSource::CreateFromURI( - const char *uri, const KeyedVector<String8, String8> *headers) { + const sp<IMediaHTTPService> &httpService, + const char *uri, + const KeyedVector<String8, String8> *headers) { bool isWidevine = !strncasecmp("widevine://", uri, 11); sp<DataSource> source; @@ -189,7 +191,7 @@ sp<DataSource> DataSource::CreateFromURI( } else if (!strncasecmp("http://", uri, 7) || !strncasecmp("https://", uri, 8) || isWidevine) { - sp<HTTPBase> httpSource = HTTPBase::Create(); + sp<HTTPBase> httpSource = new MediaHTTP(httpService->makeHTTPConnection()); String8 tmp; if (isWidevine) { @@ -220,11 +222,8 @@ sp<DataSource> DataSource::CreateFromURI( // in the widevine:// case. source = httpSource; } - -# if CHROMIUM_AVAILABLE } else if (!strncasecmp("data:", uri, 5)) { - source = createDataUriSource(uri); -#endif + source = DataURISource::Create(uri); } else { // Assume it's a filename. source = new FileSource(uri); diff --git a/media/libstagefright/DataURISource.cpp b/media/libstagefright/DataURISource.cpp new file mode 100644 index 0000000..377bc85 --- /dev/null +++ b/media/libstagefright/DataURISource.cpp @@ -0,0 +1,109 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <media/stagefright/DataURISource.h> + +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/AString.h> +#include <media/stagefright/foundation/base64.h> + +namespace android { + +// static +sp<DataURISource> DataURISource::Create(const char *uri) { + if (strncasecmp("data:", uri, 5)) { + return NULL; + } + + char *commaPos = strrchr(uri, ','); + + if (commaPos == NULL) { + return NULL; + } + + sp<ABuffer> buffer; + + AString tmp(&uri[5], commaPos - &uri[5]); + + if (tmp.endsWith(";base64")) { + AString encoded(commaPos + 1); + + // Strip CR and LF... + for (size_t i = encoded.size(); i-- > 0;) { + if (encoded.c_str()[i] == '\r' || encoded.c_str()[i] == '\n') { + encoded.erase(i, 1); + } + } + + buffer = decodeBase64(encoded); + + if (buffer == NULL) { + ALOGE("Malformed base64 encoded content found."); + return NULL; + } + } else { +#if 0 + size_t dataLen = strlen(uri) - tmp.size() - 6; + buffer = new ABuffer(dataLen); + memcpy(buffer->data(), commaPos + 1, dataLen); + + // unescape +#else + // MediaPlayer doesn't care for this right now as we don't + // play any text-based media. + return NULL; +#endif + } + + // We don't really care about charset or mime type. + + return new DataURISource(buffer); +} + +DataURISource::DataURISource(const sp<ABuffer> &buffer) + : mBuffer(buffer) { +} + +DataURISource::~DataURISource() { +} + +status_t DataURISource::initCheck() const { + return OK; +} + +ssize_t DataURISource::readAt(off64_t offset, void *data, size_t size) { + if (offset >= mBuffer->size()) { + return 0; + } + + size_t copy = mBuffer->size() - offset; + if (copy > size) { + copy = size; + } + + memcpy(data, mBuffer->data() + offset, copy); + + return copy; +} + +status_t DataURISource::getSize(off64_t *size) { + *size = mBuffer->size(); + + return OK; +} + +} // namespace android + diff --git a/media/libstagefright/HTTPBase.cpp b/media/libstagefright/HTTPBase.cpp index 5fa4b6f..ca68c3d 100644 --- a/media/libstagefright/HTTPBase.cpp +++ b/media/libstagefright/HTTPBase.cpp @@ -20,10 +20,6 @@ #include "include/HTTPBase.h" -#if CHROMIUM_AVAILABLE -#include "include/chromium_http_stub.h" -#endif - #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/ALooper.h> @@ -40,34 +36,7 @@ HTTPBase::HTTPBase() mTotalTransferBytes(0), mPrevBandwidthMeasureTimeUs(0), mPrevEstimatedBandWidthKbps(0), - mBandWidthCollectFreqMs(5000), - mUIDValid(false), - mUID(0) { -} - -// static -sp<HTTPBase> HTTPBase::Create(uint32_t flags) { -#if CHROMIUM_AVAILABLE - HTTPBase *dataSource = createChromiumHTTPDataSource(flags); - if (dataSource) { - return dataSource; - } -#endif - { - TRESPASS(); - - return NULL; - } -} - -// static -status_t HTTPBase::UpdateProxyConfig( - const char *host, int32_t port, const char *exclusionList) { -#if CHROMIUM_AVAILABLE - return UpdateChromiumHTTPDataSourceProxyConfig(host, port, exclusionList); -#else - return INVALID_OPERATION; -#endif + mBandWidthCollectFreqMs(5000) { } void HTTPBase::addBandwidthMeasurement( @@ -135,21 +104,6 @@ status_t HTTPBase::setBandwidthStatCollectFreq(int32_t freqMs) { return OK; } -void HTTPBase::setUID(uid_t uid) { - mUIDValid = true; - mUID = uid; -} - -bool HTTPBase::getUID(uid_t *uid) const { - if (!mUIDValid) { - return false; - } - - *uid = mUID; - - return true; -} - // static void HTTPBase::RegisterSocketUserTag(int sockfd, uid_t uid, uint32_t kTag) { int res = qtaguid_tagSocket(sockfd, kTag, uid); diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp index 362cd6b..2a3fa04 100644 --- a/media/libstagefright/MPEG4Extractor.cpp +++ b/media/libstagefright/MPEG4Extractor.cpp @@ -488,12 +488,12 @@ status_t MPEG4Extractor::readMetaData() { break; } uint32_t chunk_type = ntohl(hdr[1]); - if (chunk_type == FOURCC('s', 'i', 'd', 'x')) { - // parse the sidx box too - continue; - } else if (chunk_type == FOURCC('m', 'o', 'o', 'f')) { + if (chunk_type == FOURCC('m', 'o', 'o', 'f')) { // store the offset of the first segment mMoofOffset = offset; + } else if (chunk_type != FOURCC('m', 'd', 'a', 't')) { + // keep parsing until we get to the data + continue; } break; } @@ -913,6 +913,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { case FOURCC('e', 'l', 's', 't'): { + *offset += chunk_size; + // See 14496-12 8.6.6 uint8_t version; if (mDataSource->readAt(data_offset, &version, 1) < 1) { @@ -975,12 +977,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { mLastTrack->meta->setInt32(kKeyEncoderPadding, paddingsamples); } } - *offset += chunk_size; break; } case FOURCC('f', 'r', 'm', 'a'): { + *offset += chunk_size; + uint32_t original_fourcc; if (mDataSource->readAt(data_offset, &original_fourcc, 4) < 4) { return ERROR_IO; @@ -994,12 +997,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { mLastTrack->meta->setInt32(kKeyChannelCount, num_channels); mLastTrack->meta->setInt32(kKeySampleRate, sample_rate); } - *offset += chunk_size; break; } case FOURCC('t', 'e', 'n', 'c'): { + *offset += chunk_size; + if (chunk_size < 32) { return ERROR_MALFORMED; } @@ -1044,23 +1048,25 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { mLastTrack->meta->setInt32(kKeyCryptoMode, defaultAlgorithmId); mLastTrack->meta->setInt32(kKeyCryptoDefaultIVSize, defaultIVSize); mLastTrack->meta->setData(kKeyCryptoKey, 'tenc', defaultKeyId, 16); - *offset += chunk_size; break; } case FOURCC('t', 'k', 'h', 'd'): { + *offset += chunk_size; + status_t err; if ((err = parseTrackHeader(data_offset, chunk_data_size)) != OK) { return err; } - *offset += chunk_size; break; } case FOURCC('p', 's', 's', 'h'): { + *offset += chunk_size; + PsshInfo pssh; if (mDataSource->readAt(data_offset + 4, &pssh.uuid, 16) < 16) { @@ -1086,12 +1092,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { } mPssh.push_back(pssh); - *offset += chunk_size; break; } case FOURCC('m', 'd', 'h', 'd'): { + *offset += chunk_size; + if (chunk_data_size < 4) { return ERROR_MALFORMED; } @@ -1172,7 +1179,6 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { mLastTrack->meta->setCString( kKeyMediaLanguage, lang_code); - *offset += chunk_size; break; } @@ -1339,11 +1345,12 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { mLastTrack->sampleTable->setChunkOffsetParams( chunk_type, data_offset, chunk_data_size); + *offset += chunk_size; + if (err != OK) { return err; } - *offset += chunk_size; break; } @@ -1353,11 +1360,12 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { mLastTrack->sampleTable->setSampleToChunkParams( data_offset, chunk_data_size); + *offset += chunk_size; + if (err != OK) { return err; } - *offset += chunk_size; break; } @@ -1368,6 +1376,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { mLastTrack->sampleTable->setSampleSizeParams( chunk_type, data_offset, chunk_data_size); + *offset += chunk_size; + if (err != OK) { return err; } @@ -1408,7 +1418,6 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { } mLastTrack->meta->setInt32(kKeyMaxInputSize, max_size); } - *offset += chunk_size; // NOTE: setting another piece of metadata invalidates any pointers (such as the // mimetype) previously obtained, so don't cache them. @@ -1432,6 +1441,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { case FOURCC('s', 't', 't', 's'): { + *offset += chunk_size; + status_t err = mLastTrack->sampleTable->setTimeToSampleParams( data_offset, chunk_data_size); @@ -1440,12 +1451,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { return err; } - *offset += chunk_size; break; } case FOURCC('c', 't', 't', 's'): { + *offset += chunk_size; + status_t err = mLastTrack->sampleTable->setCompositionTimeToSampleParams( data_offset, chunk_data_size); @@ -1454,12 +1466,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { return err; } - *offset += chunk_size; break; } case FOURCC('s', 't', 's', 's'): { + *offset += chunk_size; + status_t err = mLastTrack->sampleTable->setSyncSampleParams( data_offset, chunk_data_size); @@ -1468,13 +1481,14 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { return err; } - *offset += chunk_size; break; } // @xyz case FOURCC('\xA9', 'x', 'y', 'z'): { + *offset += chunk_size; + // Best case the total data length inside "@xyz" box // would be 8, for instance "@xyz" + "\x00\x04\x15\xc7" + "0+0/", // where "\x00\x04" is the text string length with value = 4, @@ -1503,12 +1517,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { buffer[location_length] = '\0'; mFileMetaData->setCString(kKeyLocation, buffer); - *offset += chunk_size; break; } case FOURCC('e', 's', 'd', 's'): { + *offset += chunk_size; + if (chunk_data_size < 4) { return ERROR_MALFORMED; } @@ -1546,12 +1561,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { } } - *offset += chunk_size; break; } case FOURCC('a', 'v', 'c', 'C'): { + *offset += chunk_size; + sp<ABuffer> buffer = new ABuffer(chunk_data_size); if (mDataSource->readAt( @@ -1562,12 +1578,12 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { mLastTrack->meta->setData( kKeyAVCC, kTypeAVCC, buffer->data(), chunk_data_size); - *offset += chunk_size; break; } case FOURCC('d', '2', '6', '3'): { + *offset += chunk_size; /* * d263 contains a fixed 7 bytes part: * vendor - 4 bytes @@ -1593,7 +1609,6 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { mLastTrack->meta->setData(kKeyD263, kTypeD263, buffer, chunk_data_size); - *offset += chunk_size; break; } @@ -1601,11 +1616,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { { uint8_t buffer[4]; if (chunk_data_size < (off64_t)sizeof(buffer)) { + *offset += chunk_size; return ERROR_MALFORMED; } if (mDataSource->readAt( data_offset, buffer, 4) < 4) { + *offset += chunk_size; return ERROR_IO; } @@ -1639,6 +1656,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { case FOURCC('n', 'a', 'm', 'e'): case FOURCC('d', 'a', 't', 'a'): { + *offset += chunk_size; + if (mPath.size() == 6 && underMetaDataPath(mPath)) { status_t err = parseITunesMetaData(data_offset, chunk_data_size); @@ -1647,12 +1666,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { } } - *offset += chunk_size; break; } case FOURCC('m', 'v', 'h', 'd'): { + *offset += chunk_size; + if (chunk_data_size < 24) { return ERROR_MALFORMED; } @@ -1680,7 +1700,6 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { mFileMetaData->setCString(kKeyDate, s.string()); - *offset += chunk_size; break; } @@ -1701,6 +1720,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { case FOURCC('h', 'd', 'l', 'r'): { + *offset += chunk_size; + uint32_t buffer; if (mDataSource->readAt( data_offset + 8, &buffer, 4) < 4) { @@ -1715,7 +1736,6 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { mLastTrack->meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_TEXT_3GPP); } - *offset += chunk_size; break; } @@ -1740,6 +1760,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { delete[] buffer; buffer = NULL; + // advance read pointer so we don't end up reading this again + *offset += chunk_size; return ERROR_IO; } @@ -1754,6 +1776,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { case FOURCC('c', 'o', 'v', 'r'): { + *offset += chunk_size; + if (mFileMetaData != NULL) { ALOGV("chunk_data_size = %lld and data_offset = %lld", chunk_data_size, data_offset); @@ -1768,7 +1792,6 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { buffer->data() + kSkipBytesOfDataBox, chunk_data_size - kSkipBytesOfDataBox); } - *offset += chunk_size; break; } @@ -1779,25 +1802,27 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { case FOURCC('a', 'l', 'b', 'm'): case FOURCC('y', 'r', 'r', 'c'): { + *offset += chunk_size; + status_t err = parse3GPPMetaData(data_offset, chunk_data_size, depth); if (err != OK) { return err; } - *offset += chunk_size; break; } case FOURCC('I', 'D', '3', '2'): { + *offset += chunk_size; + if (chunk_data_size < 6) { return ERROR_MALFORMED; } parseID3v2MetaData(data_offset + 6); - *offset += chunk_size; break; } @@ -1921,9 +1946,10 @@ status_t MPEG4Extractor::parseSegmentIndex(off64_t offset, size_t size) { ALOGW("sub-sidx boxes not supported yet"); } bool sap = d3 & 0x80000000; - bool saptype = d3 >> 28; - if (!sap || saptype > 2) { - ALOGW("not a stream access point, or unsupported type"); + uint32_t saptype = (d3 >> 28) & 7; + if (!sap || (saptype != 1 && saptype != 2)) { + // type 1 and 2 are sync samples + ALOGW("not a stream access point, or unsupported type: %08x", d3); } total_duration += d2; offset += 12; @@ -2442,6 +2468,58 @@ status_t MPEG4Extractor::verifyTrack(Track *track) { return OK; } +typedef enum { + //AOT_NONE = -1, + //AOT_NULL_OBJECT = 0, + //AOT_AAC_MAIN = 1, /**< Main profile */ + AOT_AAC_LC = 2, /**< Low Complexity object */ + //AOT_AAC_SSR = 3, + //AOT_AAC_LTP = 4, + AOT_SBR = 5, + //AOT_AAC_SCAL = 6, + //AOT_TWIN_VQ = 7, + //AOT_CELP = 8, + //AOT_HVXC = 9, + //AOT_RSVD_10 = 10, /**< (reserved) */ + //AOT_RSVD_11 = 11, /**< (reserved) */ + //AOT_TTSI = 12, /**< TTSI Object */ + //AOT_MAIN_SYNTH = 13, /**< Main Synthetic object */ + //AOT_WAV_TAB_SYNTH = 14, /**< Wavetable Synthesis object */ + //AOT_GEN_MIDI = 15, /**< General MIDI object */ + //AOT_ALG_SYNTH_AUD_FX = 16, /**< Algorithmic Synthesis and Audio FX object */ + AOT_ER_AAC_LC = 17, /**< Error Resilient(ER) AAC Low Complexity */ + //AOT_RSVD_18 = 18, /**< (reserved) */ + //AOT_ER_AAC_LTP = 19, /**< Error Resilient(ER) AAC LTP object */ + AOT_ER_AAC_SCAL = 20, /**< Error Resilient(ER) AAC Scalable object */ + //AOT_ER_TWIN_VQ = 21, /**< Error Resilient(ER) TwinVQ object */ + AOT_ER_BSAC = 22, /**< Error Resilient(ER) BSAC object */ + AOT_ER_AAC_LD = 23, /**< Error Resilient(ER) AAC LowDelay object */ + //AOT_ER_CELP = 24, /**< Error Resilient(ER) CELP object */ + //AOT_ER_HVXC = 25, /**< Error Resilient(ER) HVXC object */ + //AOT_ER_HILN = 26, /**< Error Resilient(ER) HILN object */ + //AOT_ER_PARA = 27, /**< Error Resilient(ER) Parametric object */ + //AOT_RSVD_28 = 28, /**< might become SSC */ + AOT_PS = 29, /**< PS, Parametric Stereo (includes SBR) */ + //AOT_MPEGS = 30, /**< MPEG Surround */ + + AOT_ESCAPE = 31, /**< Signal AOT uses more than 5 bits */ + + //AOT_MP3ONMP4_L1 = 32, /**< MPEG-Layer1 in mp4 */ + //AOT_MP3ONMP4_L2 = 33, /**< MPEG-Layer2 in mp4 */ + //AOT_MP3ONMP4_L3 = 34, /**< MPEG-Layer3 in mp4 */ + //AOT_RSVD_35 = 35, /**< might become DST */ + //AOT_RSVD_36 = 36, /**< might become ALS */ + //AOT_AAC_SLS = 37, /**< AAC + SLS */ + //AOT_SLS = 38, /**< SLS */ + //AOT_ER_AAC_ELD = 39, /**< AAC Enhanced Low Delay */ + + //AOT_USAC = 42, /**< USAC */ + //AOT_SAOC = 43, /**< SAOC */ + //AOT_LD_MPEGS = 44, /**< Low Delay MPEG Surround */ + + //AOT_RSVD50 = 50, /**< Interim AOT for Rsvd50 */ +} AUDIO_OBJECT_TYPE; + status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio( const void *esds_data, size_t esds_size) { ESDS esds(esds_data, esds_size); @@ -2524,7 +2602,7 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio( sampleRate = kSamplingRate[freqIndex]; } - if (objectType == 5 || objectType == 29) { // SBR specific config per 14496-3 table 1.13 + if (objectType == AOT_SBR || objectType == AOT_PS) {//SBR specific config per 14496-3 table 1.13 uint32_t extFreqIndex = br.getBits(4); int32_t extSampleRate; if (extFreqIndex == 15) { @@ -2542,6 +2620,111 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio( // mLastTrack->meta->setInt32(kKeyExtSampleRate, extSampleRate); } + switch (numChannels) { + // values defined in 14496-3_2009 amendment-4 Table 1.19 - Channel Configuration + case 0: + case 1:// FC + case 2:// FL FR + case 3:// FC, FL FR + case 4:// FC, FL FR, RC + case 5:// FC, FL FR, SL SR + case 6:// FC, FL FR, SL SR, LFE + //numChannels already contains the right value + break; + case 11:// FC, FL FR, SL SR, RC, LFE + numChannels = 7; + break; + case 7: // FC, FCL FCR, FL FR, SL SR, LFE + case 12:// FC, FL FR, SL SR, RL RR, LFE + case 14:// FC, FL FR, SL SR, LFE, FHL FHR + numChannels = 8; + break; + default: + return ERROR_UNSUPPORTED; + } + + { + if (objectType == AOT_SBR || objectType == AOT_PS) { + const int32_t extensionSamplingFrequency = br.getBits(4); + objectType = br.getBits(5); + + if (objectType == AOT_ESCAPE) { + objectType = 32 + br.getBits(6); + } + } + if (objectType == AOT_AAC_LC || objectType == AOT_ER_AAC_LC || + objectType == AOT_ER_AAC_LD || objectType == AOT_ER_AAC_SCAL || + objectType == AOT_ER_BSAC) { + const int32_t frameLengthFlag = br.getBits(1); + + const int32_t dependsOnCoreCoder = br.getBits(1); + + if (dependsOnCoreCoder ) { + const int32_t coreCoderDelay = br.getBits(14); + } + + const int32_t extensionFlag = br.getBits(1); + + if (numChannels == 0 ) { + int32_t channelsEffectiveNum = 0; + int32_t channelsNum = 0; + const int32_t ElementInstanceTag = br.getBits(4); + const int32_t Profile = br.getBits(2); + const int32_t SamplingFrequencyIndex = br.getBits(4); + const int32_t NumFrontChannelElements = br.getBits(4); + const int32_t NumSideChannelElements = br.getBits(4); + const int32_t NumBackChannelElements = br.getBits(4); + const int32_t NumLfeChannelElements = br.getBits(2); + const int32_t NumAssocDataElements = br.getBits(3); + const int32_t NumValidCcElements = br.getBits(4); + + const int32_t MonoMixdownPresent = br.getBits(1); + if (MonoMixdownPresent != 0) { + const int32_t MonoMixdownElementNumber = br.getBits(4); + } + + const int32_t StereoMixdownPresent = br.getBits(1); + if (StereoMixdownPresent != 0) { + const int32_t StereoMixdownElementNumber = br.getBits(4); + } + + const int32_t MatrixMixdownIndexPresent = br.getBits(1); + if (MatrixMixdownIndexPresent != 0) { + const int32_t MatrixMixdownIndex = br.getBits(2); + const int32_t PseudoSurroundEnable = br.getBits(1); + } + + int i; + for (i=0; i < NumFrontChannelElements; i++) { + const int32_t FrontElementIsCpe = br.getBits(1); + const int32_t FrontElementTagSelect = br.getBits(4); + channelsNum += FrontElementIsCpe ? 2 : 1; + } + + for (i=0; i < NumSideChannelElements; i++) { + const int32_t SideElementIsCpe = br.getBits(1); + const int32_t SideElementTagSelect = br.getBits(4); + channelsNum += SideElementIsCpe ? 2 : 1; + } + + for (i=0; i < NumBackChannelElements; i++) { + const int32_t BackElementIsCpe = br.getBits(1); + const int32_t BackElementTagSelect = br.getBits(4); + channelsNum += BackElementIsCpe ? 2 : 1; + } + channelsEffectiveNum = channelsNum; + + for (i=0; i < NumLfeChannelElements; i++) { + const int32_t LfeElementTagSelect = br.getBits(4); + channelsNum += 1; + } + ALOGV("mpeg4 audio channelsNum = %d", channelsNum); + ALOGV("mpeg4 audio channelsEffectiveNum = %d", channelsEffectiveNum); + numChannels = channelsNum; + } + } + } + if (numChannels == 0) { return ERROR_UNSUPPORTED; } @@ -2742,9 +2925,20 @@ status_t MPEG4Source::parseChunk(off64_t *offset) { } } if (chunk_type == FOURCC('m', 'o', 'o', 'f')) { - // *offset points to the mdat box following this moof - parseChunk(offset); // doesn't actually parse it, just updates offset - mNextMoofOffset = *offset; + // *offset points to the box following this moof. Find the next moof from there. + + while (true) { + if (mDataSource->readAt(*offset, hdr, 8) < 8) { + return ERROR_END_OF_STREAM; + } + chunk_size = ntohl(hdr[0]); + chunk_type = ntohl(hdr[1]); + if (chunk_type == FOURCC('m', 'o', 'o', 'f')) { + mNextMoofOffset = *offset; + break; + } + *offset += chunk_size; + } } break; } @@ -3549,7 +3743,7 @@ status_t MPEG4Source::fragmentedRead( const SidxEntry *se = &mSegments[i]; if (totalTime + se->mDurationUs > seekTimeUs) { // The requested time is somewhere in this segment - if ((mode == ReadOptions::SEEK_NEXT_SYNC) || + if ((mode == ReadOptions::SEEK_NEXT_SYNC && seekTimeUs > totalTime) || (mode == ReadOptions::SEEK_CLOSEST_SYNC && (seekTimeUs - totalTime) > (totalTime + se->mDurationUs - seekTimeUs))) { // requested next sync, or closest sync and it was closer to the end of @@ -3562,11 +3756,19 @@ status_t MPEG4Source::fragmentedRead( totalTime += se->mDurationUs; totalOffset += se->mSize; } - mCurrentMoofOffset = totalOffset; - mCurrentSamples.clear(); - mCurrentSampleIndex = 0; - parseChunk(&totalOffset); - mCurrentTime = totalTime * mTimescale / 1000000ll; + mCurrentMoofOffset = totalOffset; + mCurrentSamples.clear(); + mCurrentSampleIndex = 0; + parseChunk(&totalOffset); + mCurrentTime = totalTime * mTimescale / 1000000ll; + } else { + // without sidx boxes, we can only seek to 0 + mCurrentMoofOffset = mFirstMoofOffset; + mCurrentSamples.clear(); + mCurrentSampleIndex = 0; + off64_t tmp = mCurrentMoofOffset; + parseChunk(&tmp); + mCurrentTime = 0; } if (mBuffer != NULL) { @@ -3578,7 +3780,7 @@ status_t MPEG4Source::fragmentedRead( } off64_t offset = 0; - size_t size; + size_t size = 0; uint32_t cts = 0; bool isSyncSample = false; bool newBuffer = false; @@ -3586,16 +3788,18 @@ status_t MPEG4Source::fragmentedRead( newBuffer = true; if (mCurrentSampleIndex >= mCurrentSamples.size()) { - // move to next fragment - Sample lastSample = mCurrentSamples[mCurrentSamples.size() - 1]; - off64_t nextMoof = mNextMoofOffset; // lastSample.offset + lastSample.size; + // move to next fragment if there is one + if (mNextMoofOffset <= mCurrentMoofOffset) { + return ERROR_END_OF_STREAM; + } + off64_t nextMoof = mNextMoofOffset; mCurrentMoofOffset = nextMoof; mCurrentSamples.clear(); mCurrentSampleIndex = 0; parseChunk(&nextMoof); - if (mCurrentSampleIndex >= mCurrentSamples.size()) { - return ERROR_END_OF_STREAM; - } + if (mCurrentSampleIndex >= mCurrentSamples.size()) { + return ERROR_END_OF_STREAM; + } } const Sample *smpl = &mCurrentSamples[mCurrentSampleIndex]; diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp index 58a4487..24e53b3 100644 --- a/media/libstagefright/MPEG4Writer.cpp +++ b/media/libstagefright/MPEG4Writer.cpp @@ -41,6 +41,12 @@ #include "include/ESDS.h" +#define WARN_UNLESS(condition, message, ...) \ +( (CONDITION(condition)) ? false : ({ \ + ALOGW("Condition %s failed " message, #condition, ##__VA_ARGS__); \ + true; \ +})) + namespace android { static const int64_t kMinStreamableFileSizeInBytes = 5 * 1024 * 1024; @@ -975,13 +981,16 @@ void MPEG4Writer::writeFtypBox(MetaData *param) { if (param && param->findInt32(kKeyFileType, &fileType) && fileType != OUTPUT_FORMAT_MPEG_4) { writeFourcc("3gp4"); + writeInt32(0); + writeFourcc("isom"); + writeFourcc("3gp4"); } else { + writeFourcc("mp42"); + writeInt32(0); writeFourcc("isom"); + writeFourcc("mp42"); } - writeInt32(0); - writeFourcc("isom"); - writeFourcc("3gp4"); endBox(); } @@ -1763,7 +1772,7 @@ status_t MPEG4Writer::Track::pause() { } status_t MPEG4Writer::Track::stop() { - ALOGD("Stopping %s track", mIsAudio? "Audio": "Video"); + ALOGD("%s track stopping", mIsAudio? "Audio": "Video"); if (!mStarted) { ALOGE("Stop() called but track is not started"); return ERROR_END_OF_STREAM; @@ -1774,19 +1783,14 @@ status_t MPEG4Writer::Track::stop() { } mDone = true; + ALOGD("%s track source stopping", mIsAudio? "Audio": "Video"); + mSource->stop(); + ALOGD("%s track source stopped", mIsAudio? "Audio": "Video"); + void *dummy; pthread_join(mThread, &dummy); - status_t err = static_cast<status_t>(reinterpret_cast<uintptr_t>(dummy)); - ALOGD("Stopping %s track source", mIsAudio? "Audio": "Video"); - { - status_t status = mSource->stop(); - if (err == OK && status != OK && status != ERROR_END_OF_STREAM) { - err = status; - } - } - ALOGD("%s track stopped", mIsAudio? "Audio": "Video"); return err; } @@ -2100,6 +2104,7 @@ status_t MPEG4Writer::Track::threadEntry() { status_t err = OK; MediaBuffer *buffer; + const char *trackName = mIsAudio ? "Audio" : "Video"; while (!mDone && (err = mSource->read(&buffer)) == OK) { if (buffer->range_length() == 0) { buffer->release(); @@ -2195,15 +2200,27 @@ status_t MPEG4Writer::Track::threadEntry() { if (mResumed) { int64_t durExcludingEarlierPausesUs = timestampUs - previousPausedDurationUs; - CHECK_GE(durExcludingEarlierPausesUs, 0ll); + if (WARN_UNLESS(durExcludingEarlierPausesUs >= 0ll, "for %s track", trackName)) { + copy->release(); + return ERROR_MALFORMED; + } + int64_t pausedDurationUs = durExcludingEarlierPausesUs - mTrackDurationUs; - CHECK_GE(pausedDurationUs, lastDurationUs); + if (WARN_UNLESS(pausedDurationUs >= lastDurationUs, "for %s track", trackName)) { + copy->release(); + return ERROR_MALFORMED; + } + previousPausedDurationUs += pausedDurationUs - lastDurationUs; mResumed = false; } timestampUs -= previousPausedDurationUs; - CHECK_GE(timestampUs, 0ll); + if (WARN_UNLESS(timestampUs >= 0ll, "for %s track", trackName)) { + copy->release(); + return ERROR_MALFORMED; + } + if (!mIsAudio) { /* * Composition time: timestampUs @@ -2215,7 +2232,11 @@ status_t MPEG4Writer::Track::threadEntry() { decodingTimeUs -= previousPausedDurationUs; cttsOffsetTimeUs = timestampUs + kMaxCttsOffsetTimeUs - decodingTimeUs; - CHECK_GE(cttsOffsetTimeUs, 0ll); + if (WARN_UNLESS(cttsOffsetTimeUs >= 0ll, "for %s track", trackName)) { + copy->release(); + return ERROR_MALFORMED; + } + timestampUs = decodingTimeUs; ALOGV("decoding time: %lld and ctts offset time: %lld", timestampUs, cttsOffsetTimeUs); @@ -2223,7 +2244,11 @@ status_t MPEG4Writer::Track::threadEntry() { // Update ctts box table if necessary currCttsOffsetTimeTicks = (cttsOffsetTimeUs * mTimeScale + 500000LL) / 1000000LL; - CHECK_LE(currCttsOffsetTimeTicks, 0x0FFFFFFFFLL); + if (WARN_UNLESS(currCttsOffsetTimeTicks <= 0x0FFFFFFFFLL, "for %s track", trackName)) { + copy->release(); + return ERROR_MALFORMED; + } + if (mStszTableEntries->count() == 0) { // Force the first ctts table entry to have one single entry // so that we can do adjustment for the initial track start @@ -2261,9 +2286,13 @@ status_t MPEG4Writer::Track::threadEntry() { } } - CHECK_GE(timestampUs, 0ll); + if (WARN_UNLESS(timestampUs >= 0ll, "for %s track", trackName)) { + copy->release(); + return ERROR_MALFORMED; + } + ALOGV("%s media time stamp: %lld and previous paused duration %lld", - mIsAudio? "Audio": "Video", timestampUs, previousPausedDurationUs); + trackName, timestampUs, previousPausedDurationUs); if (timestampUs > mTrackDurationUs) { mTrackDurationUs = timestampUs; } @@ -2278,10 +2307,27 @@ status_t MPEG4Writer::Track::threadEntry() { (lastTimestampUs * mTimeScale + 500000LL) / 1000000LL); if (currDurationTicks < 0ll) { ALOGE("timestampUs %lld < lastTimestampUs %lld for %s track", - timestampUs, lastTimestampUs, mIsAudio? "Audio": "Video"); + timestampUs, lastTimestampUs, trackName); + copy->release(); return UNKNOWN_ERROR; } + // if the duration is different for this sample, see if it is close enough to the previous + // duration that we can fudge it and use the same value, to avoid filling the stts table + // with lots of near-identical entries. + // "close enough" here means that the current duration needs to be adjusted by less + // than 0.1 milliseconds + if (lastDurationTicks && (currDurationTicks != lastDurationTicks)) { + int64_t deltaUs = ((lastDurationTicks - currDurationTicks) * 1000000LL + + (mTimeScale / 2)) / mTimeScale; + if (deltaUs > -100 && deltaUs < 100) { + // use previous ticks, and adjust timestamp as if it was actually that number + // of ticks + currDurationTicks = lastDurationTicks; + timestampUs += deltaUs; + } + } + mStszTableEntries->add(htonl(sampleSize)); if (mStszTableEntries->count() > 2) { @@ -2302,7 +2348,7 @@ status_t MPEG4Writer::Track::threadEntry() { previousSampleSize = sampleSize; } ALOGV("%s timestampUs/lastTimestampUs: %lld/%lld", - mIsAudio? "Audio": "Video", timestampUs, lastTimestampUs); + trackName, timestampUs, lastTimestampUs); lastDurationUs = timestampUs - lastTimestampUs; lastDurationTicks = currDurationTicks; lastTimestampUs = timestampUs; @@ -2407,7 +2453,7 @@ status_t MPEG4Writer::Track::threadEntry() { sendTrackSummary(hasMultipleTracks); ALOGI("Received total/0-length (%d/%d) buffers and encoded %d frames. - %s", - count, nZeroLengthFrames, mStszTableEntries->count(), mIsAudio? "audio": "video"); + count, nZeroLengthFrames, mStszTableEntries->count(), trackName); if (mIsAudio) { ALOGI("Audio track drift time: %lld us", mOwner->getDriftTimeUs()); } diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp index fe21296..601dccf 100644 --- a/media/libstagefright/MediaCodec.cpp +++ b/media/libstagefright/MediaCodec.cpp @@ -352,6 +352,20 @@ status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const { return OK; } +status_t MediaCodec::getInputFormat(sp<AMessage> *format) const { + sp<AMessage> msg = new AMessage(kWhatGetInputFormat, id()); + + sp<AMessage> response; + status_t err; + if ((err = PostAndAwaitResponse(msg, &response)) != OK) { + return err; + } + + CHECK(response->findMessage("format", format)); + + return OK; +} + status_t MediaCodec::getName(AString *name) const { sp<AMessage> msg = new AMessage(kWhatGetName, id()); @@ -589,6 +603,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { postActivityNotificationIfPossible(); cancelPendingDequeueOperations(); + setState(UNINITIALIZED); break; } @@ -598,6 +613,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { mFlags |= kFlagStickyError; postActivityNotificationIfPossible(); + setState(UNINITIALIZED); break; } } @@ -642,6 +658,9 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { // reset input surface flag mHaveInputSurface = false; + CHECK(msg->findMessage("input-format", &mInputFormat)); + CHECK(msg->findMessage("output-format", &mOutputFormat)); + (new AMessage)->postReply(mReplyID); break; } @@ -1330,14 +1349,19 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { break; } + case kWhatGetInputFormat: case kWhatGetOutputFormat: { + sp<AMessage> format = + (msg->what() == kWhatGetOutputFormat ? mOutputFormat : mInputFormat); + uint32_t replyID; CHECK(msg->senderAwaitsResponse(&replyID)); - if ((mState != STARTED && mState != FLUSHING) + if ((mState != CONFIGURED && mState != STARTING && + mState != STARTED && mState != FLUSHING) || (mFlags & kFlagStickyError) - || mOutputFormat == NULL) { + || format == NULL) { sp<AMessage> response = new AMessage; response->setInt32("err", INVALID_OPERATION); @@ -1346,7 +1370,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { } sp<AMessage> response = new AMessage; - response->setMessage("format", mOutputFormat); + response->setMessage("format", format); response->postReply(replyID); break; } diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp index 6248e90..8a451c8 100644 --- a/media/libstagefright/MediaCodecList.cpp +++ b/media/libstagefright/MediaCodecList.cpp @@ -48,22 +48,43 @@ const MediaCodecList *MediaCodecList::getInstance() { MediaCodecList::MediaCodecList() : mInitCheck(NO_INIT) { - FILE *file = fopen("/etc/media_codecs.xml", "r"); + parseTopLevelXMLFile("/etc/media_codecs.xml"); +} - if (file == NULL) { - ALOGW("unable to open media codecs configuration xml file."); +void MediaCodecList::parseTopLevelXMLFile(const char *codecs_xml) { + // get href_base + char *href_base_end = strrchr(codecs_xml, '/'); + if (href_base_end != NULL) { + mHrefBase = AString(codecs_xml, href_base_end - codecs_xml + 1); + } + + mInitCheck = OK; + mCurrentSection = SECTION_TOPLEVEL; + mDepth = 0; + + parseXMLFile(codecs_xml); + + if (mInitCheck != OK) { + mCodecInfos.clear(); + mCodecQuirks.clear(); return; } - parseXMLFile(file); + // These are currently still used by the video editing suite. + addMediaCodec(true /* encoder */, "AACEncoder", "audio/mp4a-latm"); + addMediaCodec( + false /* encoder */, "OMX.google.raw.decoder", "audio/raw"); - if (mInitCheck == OK) { - // These are currently still used by the video editing suite. + for (size_t i = mCodecInfos.size(); i-- > 0;) { + CodecInfo *info = &mCodecInfos.editItemAt(i); - addMediaCodec(true /* encoder */, "AACEncoder", "audio/mp4a-latm"); + if (info->mTypes == 0) { + // No types supported by this component??? + ALOGW("Component %s does not support any type of media?", + info->mName.c_str()); - addMediaCodec( - false /* encoder */, "OMX.google.raw.decoder", "audio/raw"); + mCodecInfos.removeAt(i); + } } #if 0 @@ -84,9 +105,6 @@ MediaCodecList::MediaCodecList() ALOGI("%s", line.c_str()); } #endif - - fclose(file); - file = NULL; } MediaCodecList::~MediaCodecList() { @@ -96,10 +114,14 @@ status_t MediaCodecList::initCheck() const { return mInitCheck; } -void MediaCodecList::parseXMLFile(FILE *file) { - mInitCheck = OK; - mCurrentSection = SECTION_TOPLEVEL; - mDepth = 0; +void MediaCodecList::parseXMLFile(const char *path) { + FILE *file = fopen(path, "r"); + + if (file == NULL) { + ALOGW("unable to open media codecs configuration xml file: %s", path); + mInitCheck = NAME_NOT_FOUND; + return; + } XML_Parser parser = ::XML_ParserCreate(NULL); CHECK(parser != NULL); @@ -112,7 +134,7 @@ void MediaCodecList::parseXMLFile(FILE *file) { while (mInitCheck == OK) { void *buff = ::XML_GetBuffer(parser, BUFF_SIZE); if (buff == NULL) { - ALOGE("failed to in call to XML_GetBuffer()"); + ALOGE("failed in call to XML_GetBuffer()"); mInitCheck = UNKNOWN_ERROR; break; } @@ -124,8 +146,9 @@ void MediaCodecList::parseXMLFile(FILE *file) { break; } - if (::XML_ParseBuffer(parser, bytes_read, bytes_read == 0) - != XML_STATUS_OK) { + XML_Status status = ::XML_ParseBuffer(parser, bytes_read, bytes_read == 0); + if (status != XML_STATUS_OK) { + ALOGE("malformed (%s)", ::XML_ErrorString(::XML_GetErrorCode(parser))); mInitCheck = ERROR_MALFORMED; break; } @@ -137,25 +160,8 @@ void MediaCodecList::parseXMLFile(FILE *file) { ::XML_ParserFree(parser); - if (mInitCheck == OK) { - for (size_t i = mCodecInfos.size(); i-- > 0;) { - CodecInfo *info = &mCodecInfos.editItemAt(i); - - if (info->mTypes == 0) { - // No types supported by this component??? - - ALOGW("Component %s does not support any type of media?", - info->mName.c_str()); - - mCodecInfos.removeAt(i); - } - } - } - - if (mInitCheck != OK) { - mCodecInfos.clear(); - mCodecQuirks.clear(); - } + fclose(file); + file = NULL; } // static @@ -169,12 +175,63 @@ void MediaCodecList::EndElementHandlerWrapper(void *me, const char *name) { static_cast<MediaCodecList *>(me)->endElementHandler(name); } +status_t MediaCodecList::includeXMLFile(const char **attrs) { + const char *href = NULL; + size_t i = 0; + while (attrs[i] != NULL) { + if (!strcmp(attrs[i], "href")) { + if (attrs[i + 1] == NULL) { + return -EINVAL; + } + href = attrs[i + 1]; + ++i; + } else { + return -EINVAL; + } + ++i; + } + + // For security reasons and for simplicity, file names can only contain + // [a-zA-Z0-9_.] and must start with media_codecs_ and end with .xml + for (i = 0; href[i] != '\0'; i++) { + if (href[i] == '.' || href[i] == '_' || + (href[i] >= '0' && href[i] <= '9') || + (href[i] >= 'A' && href[i] <= 'Z') || + (href[i] >= 'a' && href[i] <= 'z')) { + continue; + } + ALOGE("invalid include file name: %s", href); + return -EINVAL; + } + + AString filename = href; + if (!filename.startsWith("media_codecs_") || + !filename.endsWith(".xml")) { + ALOGE("invalid include file name: %s", href); + return -EINVAL; + } + filename.insert(mHrefBase, 0); + + parseXMLFile(filename.c_str()); + return mInitCheck; +} + void MediaCodecList::startElementHandler( const char *name, const char **attrs) { if (mInitCheck != OK) { return; } + if (!strcmp(name, "Include")) { + mInitCheck = includeXMLFile(attrs); + if (mInitCheck == OK) { + mPastSections.push(mCurrentSection); + mCurrentSection = SECTION_INCLUDE; + } + ++mDepth; + return; + } + switch (mCurrentSection) { case SECTION_TOPLEVEL: { @@ -264,6 +321,15 @@ void MediaCodecList::endElementHandler(const char *name) { break; } + case SECTION_INCLUDE: + { + if (!strcmp(name, "Include") && mPastSections.size() > 0) { + mCurrentSection = mPastSections.top(); + mPastSections.pop(); + } + break; + } + default: break; } diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp new file mode 100644 index 0000000..924173c --- /dev/null +++ b/media/libstagefright/MediaCodecSource.cpp @@ -0,0 +1,881 @@ +/* + * Copyright 2014, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaCodecSource" +#define DEBUG_DRIFT_TIME 0 +#include <gui/IGraphicBufferProducer.h> +#include <gui/Surface.h> +#include <media/ICrypto.h> +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/ALooper.h> +#include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/MediaBuffer.h> +#include <media/stagefright/MediaCodec.h> +#include <media/stagefright/MetaData.h> +#include <media/stagefright/MediaErrors.h> +#include <media/stagefright/MediaSource.h> +#include <media/stagefright/MediaCodecSource.h> +#include <media/stagefright/Utils.h> + +namespace android { + +static void ReleaseMediaBufferReference(const sp<ABuffer> &accessUnit) { + void *mbuf; + if (accessUnit->meta()->findPointer("mediaBuffer", &mbuf) + && mbuf != NULL) { + ALOGV("releasing mbuf %p", mbuf); + + accessUnit->meta()->setPointer("mediaBuffer", NULL); + + static_cast<MediaBuffer *>(mbuf)->release(); + mbuf = NULL; + } +} + +struct MediaCodecSource::Puller : public AHandler { + Puller(const sp<MediaSource> &source); + + status_t start(const sp<MetaData> &meta, const sp<AMessage> ¬ify); + void stopAsync(); + + void pause(); + void resume(); + +protected: + virtual void onMessageReceived(const sp<AMessage> &msg); + virtual ~Puller(); + +private: + enum { + kWhatStart = 'msta', + kWhatStop, + kWhatPull, + kWhatPause, + kWhatResume, + }; + + sp<MediaSource> mSource; + sp<AMessage> mNotify; + sp<ALooper> mLooper; + int32_t mPullGeneration; + bool mIsAudio; + bool mPaused; + bool mReachedEOS; + + status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg); + void schedulePull(); + void handleEOS(); + + DISALLOW_EVIL_CONSTRUCTORS(Puller); +}; + +MediaCodecSource::Puller::Puller(const sp<MediaSource> &source) + : mSource(source), + mLooper(new ALooper()), + mPullGeneration(0), + mIsAudio(false), + mPaused(false), + mReachedEOS(false) { + sp<MetaData> meta = source->getFormat(); + const char *mime; + CHECK(meta->findCString(kKeyMIMEType, &mime)); + + mIsAudio = !strncasecmp(mime, "audio/", 6); + + mLooper->setName("pull_looper"); +} + +MediaCodecSource::Puller::~Puller() { + mLooper->unregisterHandler(id()); + mLooper->stop(); +} + +status_t MediaCodecSource::Puller::postSynchronouslyAndReturnError( + const sp<AMessage> &msg) { + sp<AMessage> response; + status_t err = msg->postAndAwaitResponse(&response); + + if (err != OK) { + return err; + } + + if (!response->findInt32("err", &err)) { + err = OK; + } + + return err; +} + +status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta, + const sp<AMessage> ¬ify) { + ALOGV("puller (%s) start", mIsAudio ? "audio" : "video"); + mLooper->start( + false /* runOnCallingThread */, + false /* canCallJava */, + PRIORITY_AUDIO); + mLooper->registerHandler(this); + mNotify = notify; + + sp<AMessage> msg = new AMessage(kWhatStart, id()); + msg->setObject("meta", meta); + return postSynchronouslyAndReturnError(msg); +} + +void MediaCodecSource::Puller::stopAsync() { + ALOGV("puller (%s) stopAsync", mIsAudio ? "audio" : "video"); + (new AMessage(kWhatStop, id()))->post(); +} + +void MediaCodecSource::Puller::pause() { + (new AMessage(kWhatPause, id()))->post(); +} + +void MediaCodecSource::Puller::resume() { + (new AMessage(kWhatResume, id()))->post(); +} + +void MediaCodecSource::Puller::schedulePull() { + sp<AMessage> msg = new AMessage(kWhatPull, id()); + msg->setInt32("generation", mPullGeneration); + msg->post(); +} + +void MediaCodecSource::Puller::handleEOS() { + if (!mReachedEOS) { + ALOGV("puller (%s) posting EOS", mIsAudio ? "audio" : "video"); + mReachedEOS = true; + sp<AMessage> notify = mNotify->dup(); + notify->setPointer("accessUnit", NULL); + notify->post(); + } +} + +void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatStart: + { + sp<RefBase> obj; + CHECK(msg->findObject("meta", &obj)); + + mReachedEOS = false; + + status_t err = mSource->start(static_cast<MetaData *>(obj.get())); + + if (err == OK) { + schedulePull(); + } + + sp<AMessage> response = new AMessage; + response->setInt32("err", err); + + uint32_t replyID; + CHECK(msg->senderAwaitsResponse(&replyID)); + response->postReply(replyID); + break; + } + + case kWhatStop: + { + ALOGV("source (%s) stopping", mIsAudio ? "audio" : "video"); + mSource->stop(); + ALOGV("source (%s) stopped", mIsAudio ? "audio" : "video"); + ++mPullGeneration; + + handleEOS(); + break; + } + + case kWhatPull: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + + if (generation != mPullGeneration) { + break; + } + + MediaBuffer *mbuf; + status_t err = mSource->read(&mbuf); + + if (mPaused) { + if (err == OK) { + mbuf->release(); + mbuf = NULL; + } + + msg->post(); + break; + } + + if (err != OK) { + if (err == ERROR_END_OF_STREAM) { + ALOGV("stream ended, mbuf %p", mbuf); + } else { + ALOGE("error %d reading stream.", err); + } + handleEOS(); + } else { + sp<AMessage> notify = mNotify->dup(); + + notify->setPointer("accessUnit", mbuf); + notify->post(); + + msg->post(); + } + break; + } + + case kWhatPause: + { + mPaused = true; + break; + } + + case kWhatResume: + { + mPaused = false; + break; + } + + default: + TRESPASS(); + } +} + +// static +sp<MediaCodecSource> MediaCodecSource::Create( + const sp<ALooper> &looper, + const sp<AMessage> &format, + const sp<MediaSource> &source, + uint32_t flags) { + sp<MediaCodecSource> mediaSource = + new MediaCodecSource(looper, format, source, flags); + + if (mediaSource->init() == OK) { + return mediaSource; + } + return NULL; +} + +status_t MediaCodecSource::start(MetaData* params) { + sp<AMessage> msg = new AMessage(kWhatStart, mReflector->id()); + msg->setObject("meta", params); + return postSynchronouslyAndReturnError(msg); +} + +status_t MediaCodecSource::stop() { + sp<AMessage> msg = new AMessage(kWhatStop, mReflector->id()); + return postSynchronouslyAndReturnError(msg); +} + +status_t MediaCodecSource::pause() { + (new AMessage(kWhatPause, mReflector->id()))->post(); + return OK; +} + +sp<IGraphicBufferProducer> MediaCodecSource::getGraphicBufferProducer() { + CHECK(mFlags & FLAG_USE_SURFACE_INPUT); + return mGraphicBufferProducer; +} + +status_t MediaCodecSource::read( + MediaBuffer** buffer, const ReadOptions* /* options */) { + Mutex::Autolock autolock(mOutputBufferLock); + + *buffer = NULL; + while (mOutputBufferQueue.size() == 0 && !mEncodedReachedEOS) { + mOutputBufferCond.wait(mOutputBufferLock); + } + if (!mEncodedReachedEOS) { + *buffer = *mOutputBufferQueue.begin(); + mOutputBufferQueue.erase(mOutputBufferQueue.begin()); + return OK; + } + return mErrorCode; +} + +void MediaCodecSource::signalBufferReturned(MediaBuffer *buffer) { + buffer->setObserver(0); + buffer->release(); +} + +MediaCodecSource::MediaCodecSource( + const sp<ALooper> &looper, + const sp<AMessage> &outputFormat, + const sp<MediaSource> &source, + uint32_t flags) + : mLooper(looper), + mOutputFormat(outputFormat), + mMeta(new MetaData), + mFlags(flags), + mIsVideo(false), + mStarted(false), + mStopping(false), + mDoMoreWorkPending(false), + mPullerReachedEOS(false), + mFirstSampleTimeUs(-1ll), + mEncodedReachedEOS(false), + mErrorCode(OK) { + CHECK(mLooper != NULL); + + AString mime; + CHECK(mOutputFormat->findString("mime", &mime)); + + if (!strncasecmp("video/", mime.c_str(), 6)) { + mIsVideo = true; + } + + if (!(mFlags & FLAG_USE_SURFACE_INPUT)) { + mPuller = new Puller(source); + } +} + +MediaCodecSource::~MediaCodecSource() { + releaseEncoder(); + + mCodecLooper->stop(); + mLooper->unregisterHandler(mReflector->id()); +} + +status_t MediaCodecSource::init() { + status_t err = initEncoder(); + + if (err != OK) { + releaseEncoder(); + } + + return err; +} + +status_t MediaCodecSource::initEncoder() { + mReflector = new AHandlerReflector<MediaCodecSource>(this); + mLooper->registerHandler(mReflector); + + mCodecLooper = new ALooper; + mCodecLooper->setName("codec_looper"); + mCodecLooper->start(); + + if (mFlags & FLAG_USE_METADATA_INPUT) { + mOutputFormat->setInt32("store-metadata-in-buffers", 1); + } + + if (mFlags & FLAG_USE_SURFACE_INPUT) { + mOutputFormat->setInt32("create-input-buffers-suspended", 1); + } + + AString outputMIME; + CHECK(mOutputFormat->findString("mime", &outputMIME)); + + mEncoder = MediaCodec::CreateByType( + mCodecLooper, outputMIME.c_str(), true /* encoder */); + + if (mEncoder == NULL) { + return NO_INIT; + } + + ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str()); + + status_t err = mEncoder->configure( + mOutputFormat, + NULL /* nativeWindow */, + NULL /* crypto */, + MediaCodec::CONFIGURE_FLAG_ENCODE); + + if (err != OK) { + return err; + } + + mEncoder->getOutputFormat(&mOutputFormat); + convertMessageToMetaData(mOutputFormat, mMeta); + + if (mFlags & FLAG_USE_SURFACE_INPUT) { + CHECK(mIsVideo); + + err = mEncoder->createInputSurface(&mGraphicBufferProducer); + + if (err != OK) { + return err; + } + } + + err = mEncoder->start(); + + if (err != OK) { + return err; + } + + err = mEncoder->getInputBuffers(&mEncoderInputBuffers); + + if (err != OK) { + return err; + } + + err = mEncoder->getOutputBuffers(&mEncoderOutputBuffers); + + if (err != OK) { + return err; + } + + mEncodedReachedEOS = false; + mErrorCode = OK; + + return OK; +} + +void MediaCodecSource::releaseEncoder() { + if (mEncoder == NULL) { + return; + } + + mEncoder->release(); + mEncoder.clear(); + + while (!mInputBufferQueue.empty()) { + MediaBuffer *mbuf = *mInputBufferQueue.begin(); + mInputBufferQueue.erase(mInputBufferQueue.begin()); + if (mbuf != NULL) { + mbuf->release(); + } + } + + for (size_t i = 0; i < mEncoderInputBuffers.size(); ++i) { + sp<ABuffer> accessUnit = mEncoderInputBuffers.itemAt(i); + ReleaseMediaBufferReference(accessUnit); + } + + mEncoderInputBuffers.clear(); + mEncoderOutputBuffers.clear(); +} + +bool MediaCodecSource::reachedEOS() { + return mEncodedReachedEOS && ((mPuller == NULL) || mPullerReachedEOS); +} + +status_t MediaCodecSource::postSynchronouslyAndReturnError( + const sp<AMessage> &msg) { + sp<AMessage> response; + status_t err = msg->postAndAwaitResponse(&response); + + if (err != OK) { + return err; + } + + if (!response->findInt32("err", &err)) { + err = OK; + } + + return err; +} + +void MediaCodecSource::signalEOS(status_t err) { + if (!mEncodedReachedEOS) { + ALOGI("encoder (%s) reached EOS", mIsVideo ? "video" : "audio"); + { + Mutex::Autolock autoLock(mOutputBufferLock); + // release all unread media buffers + for (List<MediaBuffer*>::iterator it = mOutputBufferQueue.begin(); + it != mOutputBufferQueue.end(); it++) { + (*it)->release(); + } + mOutputBufferQueue.clear(); + mEncodedReachedEOS = true; + mErrorCode = err; + mOutputBufferCond.signal(); + } + + releaseEncoder(); + } + if (mStopping && reachedEOS()) { + ALOGI("MediaCodecSource (%s) fully stopped", + mIsVideo ? "video" : "audio"); + // posting reply to everyone that's waiting + List<uint32_t>::iterator it; + for (it = mStopReplyIDQueue.begin(); + it != mStopReplyIDQueue.end(); it++) { + (new AMessage)->postReply(*it); + } + mStopReplyIDQueue.clear(); + mStopping = false; + } +} + +void MediaCodecSource::suspend() { + CHECK(mFlags & FLAG_USE_SURFACE_INPUT); + if (mEncoder != NULL) { + sp<AMessage> params = new AMessage; + params->setInt32("drop-input-frames", true); + mEncoder->setParameters(params); + } +} + +void MediaCodecSource::resume(int64_t skipFramesBeforeUs) { + CHECK(mFlags & FLAG_USE_SURFACE_INPUT); + if (mEncoder != NULL) { + sp<AMessage> params = new AMessage; + params->setInt32("drop-input-frames", false); + if (skipFramesBeforeUs > 0) { + params->setInt64("skip-frames-before", skipFramesBeforeUs); + } + mEncoder->setParameters(params); + } +} + +void MediaCodecSource::scheduleDoMoreWork() { + if (mDoMoreWorkPending) { + return; + } + + mDoMoreWorkPending = true; + + if (mEncoderActivityNotify == NULL) { + mEncoderActivityNotify = new AMessage( + kWhatEncoderActivity, mReflector->id()); + } + mEncoder->requestActivityNotification(mEncoderActivityNotify); +} + +status_t MediaCodecSource::feedEncoderInputBuffers() { + while (!mInputBufferQueue.empty() + && !mAvailEncoderInputIndices.empty()) { + MediaBuffer* mbuf = *mInputBufferQueue.begin(); + mInputBufferQueue.erase(mInputBufferQueue.begin()); + + size_t bufferIndex = *mAvailEncoderInputIndices.begin(); + mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin()); + + int64_t timeUs = 0ll; + uint32_t flags = 0; + size_t size = 0; + + if (mbuf != NULL) { + CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs)); + + // push decoding time for video, or drift time for audio + if (mIsVideo) { + mDecodingTimeQueue.push_back(timeUs); + } else { +#if DEBUG_DRIFT_TIME + if (mFirstSampleTimeUs < 0ll) { + mFirstSampleTimeUs = timeUs; + } + + int64_t driftTimeUs = 0; + if (mbuf->meta_data()->findInt64(kKeyDriftTime, &driftTimeUs) + && driftTimeUs) { + driftTimeUs = timeUs - mFirstSampleTimeUs - driftTimeUs; + } + mDriftTimeQueue.push_back(driftTimeUs); +#endif // DEBUG_DRIFT_TIME + } + + size = mbuf->size(); + + memcpy(mEncoderInputBuffers.itemAt(bufferIndex)->data(), + mbuf->data(), size); + + if (mIsVideo) { + // video encoder will release MediaBuffer when done + // with underlying data. + mEncoderInputBuffers.itemAt(bufferIndex)->meta() + ->setPointer("mediaBuffer", mbuf); + } else { + mbuf->release(); + } + } else { + flags = MediaCodec::BUFFER_FLAG_EOS; + } + + status_t err = mEncoder->queueInputBuffer( + bufferIndex, 0, size, timeUs, flags); + + if (err != OK) { + return err; + } + } + + return OK; +} + +status_t MediaCodecSource::doMoreWork() { + status_t err; + + if (!(mFlags & FLAG_USE_SURFACE_INPUT)) { + for (;;) { + size_t bufferIndex; + err = mEncoder->dequeueInputBuffer(&bufferIndex); + + if (err != OK) { + break; + } + + mAvailEncoderInputIndices.push_back(bufferIndex); + } + + feedEncoderInputBuffers(); + } + + for (;;) { + size_t bufferIndex; + size_t offset; + size_t size; + int64_t timeUs; + uint32_t flags; + native_handle_t* handle = NULL; + err = mEncoder->dequeueOutputBuffer( + &bufferIndex, &offset, &size, &timeUs, &flags); + + if (err != OK) { + if (err == INFO_FORMAT_CHANGED) { + continue; + } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) { + mEncoder->getOutputBuffers(&mEncoderOutputBuffers); + continue; + } + + if (err == -EAGAIN) { + err = OK; + } + break; + } + if (!(flags & MediaCodec::BUFFER_FLAG_EOS)) { + sp<ABuffer> outbuf = mEncoderOutputBuffers.itemAt(bufferIndex); + + MediaBuffer *mbuf = new MediaBuffer(outbuf->size()); + memcpy(mbuf->data(), outbuf->data(), outbuf->size()); + + if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) { + if (mIsVideo) { + int64_t decodingTimeUs; + if (mFlags & FLAG_USE_SURFACE_INPUT) { + // GraphicBufferSource is supposed to discard samples + // queued before start, and offset timeUs by start time + CHECK_GE(timeUs, 0ll); + // TODO: + // Decoding time for surface source is unavailable, + // use presentation time for now. May need to move + // this logic into MediaCodec. + decodingTimeUs = timeUs; + } else { + CHECK(!mDecodingTimeQueue.empty()); + decodingTimeUs = *(mDecodingTimeQueue.begin()); + mDecodingTimeQueue.erase(mDecodingTimeQueue.begin()); + } + mbuf->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs); + + ALOGV("[video] time %lld us (%.2f secs), dts/pts diff %lld", + timeUs, timeUs / 1E6, decodingTimeUs - timeUs); + } else { + int64_t driftTimeUs = 0; +#if DEBUG_DRIFT_TIME + CHECK(!mDriftTimeQueue.empty()); + driftTimeUs = *(mDriftTimeQueue.begin()); + mDriftTimeQueue.erase(mDriftTimeQueue.begin()); + mbuf->meta_data()->setInt64(kKeyDriftTime, driftTimeUs); +#endif // DEBUG_DRIFT_TIME + ALOGV("[audio] time %lld us (%.2f secs), drift %lld", + timeUs, timeUs / 1E6, driftTimeUs); + } + mbuf->meta_data()->setInt64(kKeyTime, timeUs); + } else { + mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true); + } + if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) { + mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true); + } + mbuf->setObserver(this); + mbuf->add_ref(); + + { + Mutex::Autolock autoLock(mOutputBufferLock); + mOutputBufferQueue.push_back(mbuf); + mOutputBufferCond.signal(); + } + } + + mEncoder->releaseOutputBuffer(bufferIndex); + + if (flags & MediaCodec::BUFFER_FLAG_EOS) { + err = ERROR_END_OF_STREAM; + break; + } + } + + return err; +} + +status_t MediaCodecSource::onStart(MetaData *params) { + if (mStopping) { + ALOGE("Failed to start while we're stopping"); + return INVALID_OPERATION; + } + + if (mStarted) { + ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio"); + if (mFlags & FLAG_USE_SURFACE_INPUT) { + resume(); + } else { + CHECK(mPuller != NULL); + mPuller->resume(); + } + return OK; + } + + ALOGI("MediaCodecSource (%s) starting", mIsVideo ? "video" : "audio"); + + status_t err = OK; + + if (mFlags & FLAG_USE_SURFACE_INPUT) { + int64_t startTimeUs; + if (!params || !params->findInt64(kKeyTime, &startTimeUs)) { + startTimeUs = -1ll; + } + resume(startTimeUs); + scheduleDoMoreWork(); + } else { + CHECK(mPuller != NULL); + sp<AMessage> notify = new AMessage( + kWhatPullerNotify, mReflector->id()); + err = mPuller->start(params, notify); + if (err != OK) { + mPullerReachedEOS = true; + return err; + } + } + + ALOGI("MediaCodecSource (%s) started", mIsVideo ? "video" : "audio"); + + mStarted = true; + return OK; +} + +void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatPullerNotify: + { + MediaBuffer *mbuf; + CHECK(msg->findPointer("accessUnit", (void**)&mbuf)); + + if (mbuf == NULL) { + ALOGI("puller (%s) reached EOS", + mIsVideo ? "video" : "audio"); + mPullerReachedEOS = true; + } + + if (mEncoder == NULL) { + ALOGV("got msg '%s' after encoder shutdown.", + msg->debugString().c_str()); + + if (mbuf != NULL) { + mbuf->release(); + } else { + signalEOS(); + } + break; + } + + mInputBufferQueue.push_back(mbuf); + + feedEncoderInputBuffers(); + scheduleDoMoreWork(); + + break; + } + case kWhatEncoderActivity: + { + mDoMoreWorkPending = false; + + if (mEncoder == NULL) { + break; + } + + status_t err = doMoreWork(); + + if (err == OK) { + scheduleDoMoreWork(); + } else { + // reached EOS, or error + signalEOS(err); + } + + break; + } + case kWhatStart: + { + uint32_t replyID; + CHECK(msg->senderAwaitsResponse(&replyID)); + + sp<RefBase> obj; + CHECK(msg->findObject("meta", &obj)); + MetaData *params = static_cast<MetaData *>(obj.get()); + + sp<AMessage> response = new AMessage; + response->setInt32("err", onStart(params)); + response->postReply(replyID); + break; + } + case kWhatStop: + { + ALOGI("MediaCodecSource (%s) stopping", mIsVideo ? "video" : "audio"); + + uint32_t replyID; + CHECK(msg->senderAwaitsResponse(&replyID)); + + if (reachedEOS()) { + // if we already reached EOS, reply and return now + ALOGI("MediaCodecSource (%s) already stopped", + mIsVideo ? "video" : "audio"); + (new AMessage)->postReply(replyID); + break; + } + + mStopReplyIDQueue.push_back(replyID); + if (mStopping) { + // nothing to do if we're already stopping, reply will be posted + // to all when we're stopped. + break; + } + + mStopping = true; + + // if using surface, signal source EOS and wait for EOS to come back. + // otherwise, release encoder and post EOS if haven't done already + if (mFlags & FLAG_USE_SURFACE_INPUT) { + mEncoder->signalEndOfInputStream(); + } else { + CHECK(mPuller != NULL); + mPuller->stopAsync(); + signalEOS(); + } + break; + } + case kWhatPause: + { + if (mFlags && FLAG_USE_SURFACE_INPUT) { + suspend(); + } else { + CHECK(mPuller != NULL); + mPuller->pause(); + } + break; + } + default: + TRESPASS(); + } +} + +} // namespace android diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp index 340cba7..c670bb4 100644 --- a/media/libstagefright/MediaDefs.cpp +++ b/media/libstagefright/MediaDefs.cpp @@ -36,6 +36,7 @@ const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II = "audio/mpeg-L2"; const char *MEDIA_MIMETYPE_AUDIO_AAC = "audio/mp4a-latm"; const char *MEDIA_MIMETYPE_AUDIO_QCELP = "audio/qcelp"; const char *MEDIA_MIMETYPE_AUDIO_VORBIS = "audio/vorbis"; +const char *MEDIA_MIMETYPE_AUDIO_OPUS = "audio/opus"; const char *MEDIA_MIMETYPE_AUDIO_G711_ALAW = "audio/g711-alaw"; const char *MEDIA_MIMETYPE_AUDIO_G711_MLAW = "audio/g711-mlaw"; const char *MEDIA_MIMETYPE_AUDIO_RAW = "audio/raw"; diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp index d87e910..90335ee 100644 --- a/media/libstagefright/MediaMuxer.cpp +++ b/media/libstagefright/MediaMuxer.cpp @@ -16,6 +16,9 @@ //#define LOG_NDEBUG 0 #define LOG_TAG "MediaMuxer" + +#include "webm/WebmWriter.h" + #include <utils/Log.h> #include <media/stagefright/MediaMuxer.h> @@ -36,19 +39,30 @@ namespace android { MediaMuxer::MediaMuxer(const char *path, OutputFormat format) - : mState(UNINITIALIZED) { + : mFormat(format), + mState(UNINITIALIZED) { if (format == OUTPUT_FORMAT_MPEG_4) { mWriter = new MPEG4Writer(path); + } else if (format == OUTPUT_FORMAT_WEBM) { + mWriter = new WebmWriter(path); + } + + if (mWriter != NULL) { mFileMeta = new MetaData; mState = INITIALIZED; } - } MediaMuxer::MediaMuxer(int fd, OutputFormat format) - : mState(UNINITIALIZED) { + : mFormat(format), + mState(UNINITIALIZED) { if (format == OUTPUT_FORMAT_MPEG_4) { mWriter = new MPEG4Writer(fd); + } else if (format == OUTPUT_FORMAT_WEBM) { + mWriter = new WebmWriter(fd); + } + + if (mWriter != NULL) { mFileMeta = new MetaData; mState = INITIALIZED; } @@ -109,8 +123,13 @@ status_t MediaMuxer::setLocation(int latitude, int longitude) { ALOGE("setLocation() must be called before start()."); return INVALID_OPERATION; } + if (mFormat != OUTPUT_FORMAT_MPEG_4) { + ALOGE("setLocation() is only supported for .mp4 output."); + return INVALID_OPERATION; + } + ALOGV("Setting location: latitude = %d, longitude = %d", latitude, longitude); - return mWriter->setGeoData(latitude, longitude); + return static_cast<MPEG4Writer*>(mWriter.get())->setGeoData(latitude, longitude); } status_t MediaMuxer::start() { diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp index 06e2d43..61cf0ad 100644 --- a/media/libstagefright/NuCachedSource2.cpp +++ b/media/libstagefright/NuCachedSource2.cpp @@ -213,7 +213,14 @@ NuCachedSource2::NuCachedSource2( mLooper->setName("NuCachedSource2"); mLooper->registerHandler(mReflector); - mLooper->start(); + + // Since it may not be obvious why our looper thread needs to be + // able to call into java since it doesn't appear to do so at all... + // IMediaHTTPConnection may be (and most likely is) implemented in JAVA + // and a local JAVA IBinder will call directly into JNI methods. + // So whenever we call DataSource::readAt it may end up in a call to + // IMediaHTTPConnection::readAt and therefore call back into JAVA. + mLooper->start(false /* runOnCallingThread */, true /* canCallJava */); Mutex::Autolock autoLock(mLock); (new AMessage(kWhatFetchMore, mReflector->id()))->post(); diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp index 7bc7da2..64f56e9 100644 --- a/media/libstagefright/NuMediaExtractor.cpp +++ b/media/libstagefright/NuMediaExtractor.cpp @@ -58,7 +58,9 @@ NuMediaExtractor::~NuMediaExtractor() { } status_t NuMediaExtractor::setDataSource( - const char *path, const KeyedVector<String8, String8> *headers) { + const sp<IMediaHTTPService> &httpService, + const char *path, + const KeyedVector<String8, String8> *headers) { Mutex::Autolock autoLock(mLock); if (mImpl != NULL) { @@ -66,7 +68,7 @@ status_t NuMediaExtractor::setDataSource( } sp<DataSource> dataSource = - DataSource::CreateFromURI(path, headers); + DataSource::CreateFromURI(httpService, path, headers); if (dataSource == NULL) { return -ENOENT; diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp index 96c5a32..a879656 100644 --- a/media/libstagefright/OMXCodec.cpp +++ b/media/libstagefright/OMXCodec.cpp @@ -96,6 +96,7 @@ static sp<MediaSource> InstantiateSoftwareEncoder( #define CODEC_LOGI(x, ...) ALOGI("[%s] "x, mComponentName, ##__VA_ARGS__) #define CODEC_LOGV(x, ...) ALOGV("[%s] "x, mComponentName, ##__VA_ARGS__) +#define CODEC_LOGW(x, ...) ALOGW("[%s] "x, mComponentName, ##__VA_ARGS__) #define CODEC_LOGE(x, ...) ALOGE("[%s] "x, mComponentName, ##__VA_ARGS__) struct OMXCodecObserver : public BnOMXObserver { @@ -491,6 +492,13 @@ status_t OMXCodec::configureCodec(const sp<MetaData> &meta) { CHECK(meta->findData(kKeyVorbisBooks, &type, &data, &size)); addCodecSpecificData(data, size); + } else if (meta->findData(kKeyOpusHeader, &type, &data, &size)) { + addCodecSpecificData(data, size); + + CHECK(meta->findData(kKeyOpusCodecDelay, &type, &data, &size)); + addCodecSpecificData(data, size); + CHECK(meta->findData(kKeyOpusSeekPreRoll, &type, &data, &size)); + addCodecSpecificData(data, size); } } @@ -1389,6 +1397,8 @@ void OMXCodec::setComponentRole( "audio_decoder.aac", "audio_encoder.aac" }, { MEDIA_MIMETYPE_AUDIO_VORBIS, "audio_decoder.vorbis", "audio_encoder.vorbis" }, + { MEDIA_MIMETYPE_AUDIO_OPUS, + "audio_decoder.opus", "audio_encoder.opus" }, { MEDIA_MIMETYPE_AUDIO_G711_MLAW, "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, { MEDIA_MIMETYPE_AUDIO_G711_ALAW, @@ -1796,21 +1806,42 @@ status_t OMXCodec::allocateOutputBuffersFromNativeWindow() { strerror(-err), -err); return err; } - - // XXX: Is this the right logic to use? It's not clear to me what the OMX - // buffer counts refer to - how do they account for the renderer holding on - // to buffers? - if (def.nBufferCountActual < def.nBufferCountMin + minUndequeuedBufs) { - OMX_U32 newBufferCount = def.nBufferCountMin + minUndequeuedBufs; + // FIXME: assume that surface is controlled by app (native window + // returns the number for the case when surface is not controlled by app) + // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported + // For now, try to allocate 1 more buffer, but don't fail if unsuccessful + + // Use conservative allocation while also trying to reduce starvation + // + // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the + // minimum needed for the consumer to be able to work + // 2. try to allocate two (2) additional buffers to reduce starvation from + // the consumer + // plus an extra buffer to account for incorrect minUndequeuedBufs + CODEC_LOGI("OMX-buffers: min=%u actual=%u undeq=%d+1", + def.nBufferCountMin, def.nBufferCountActual, minUndequeuedBufs); + + for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { + OMX_U32 newBufferCount = + def.nBufferCountMin + minUndequeuedBufs + extraBuffers; def.nBufferCountActual = newBufferCount; err = mOMX->setParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - if (err != OK) { - CODEC_LOGE("setting nBufferCountActual to %lu failed: %d", - newBufferCount, err); + + if (err == OK) { + minUndequeuedBufs += extraBuffers; + break; + } + + CODEC_LOGW("setting nBufferCountActual to %lu failed: %d", + newBufferCount, err); + /* exit condition */ + if (extraBuffers == 0) { return err; } } + CODEC_LOGI("OMX-buffers: min=%u actual=%u undeq=%d+1", + def.nBufferCountMin, def.nBufferCountActual, minUndequeuedBufs); err = native_window_set_buffer_count( mNativeWindow.get(), def.nBufferCountActual); @@ -4127,6 +4158,7 @@ static const char *audioCodingTypeString(OMX_AUDIO_CODINGTYPE type) { "OMX_AUDIO_CodingMP3", "OMX_AUDIO_CodingSBC", "OMX_AUDIO_CodingVORBIS", + "OMX_AUDIO_CodingOPUS", "OMX_AUDIO_CodingWMA", "OMX_AUDIO_CodingRA", "OMX_AUDIO_CodingMIDI", diff --git a/media/libstagefright/SkipCutBuffer.cpp b/media/libstagefright/SkipCutBuffer.cpp index 773854f..e2e6d79 100644 --- a/media/libstagefright/SkipCutBuffer.cpp +++ b/media/libstagefright/SkipCutBuffer.cpp @@ -25,7 +25,7 @@ namespace android { SkipCutBuffer::SkipCutBuffer(int32_t skip, int32_t cut) { - mFrontPadding = skip; + mFrontPadding = mSkip = skip; mBackPadding = cut; mWriteHead = 0; mReadHead = 0; @@ -94,6 +94,7 @@ void SkipCutBuffer::submit(const sp<ABuffer>& buffer) { void SkipCutBuffer::clear() { mWriteHead = mReadHead = 0; + mFrontPadding = mSkip; } void SkipCutBuffer::write(const char *src, size_t num) { diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp index 2b51a29..fe20835 100644 --- a/media/libstagefright/StagefrightMediaScanner.cpp +++ b/media/libstagefright/StagefrightMediaScanner.cpp @@ -24,6 +24,7 @@ #include <media/stagefright/StagefrightMediaScanner.h> +#include <media/IMediaHTTPService.h> #include <media/mediametadataretriever.h> #include <private/media/VideoFrame.h> @@ -147,7 +148,7 @@ MediaScanResult StagefrightMediaScanner::processFileInternal( status_t status; if (fd < 0) { // couldn't open it locally, maybe the media server can? - status = mRetriever->setDataSource(path); + status = mRetriever->setDataSource(NULL /* httpService */, path); } else { status = mRetriever->setDataSource(fd, 0, 0x7ffffffffffffffL); close(fd); diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp index fcd9a85..9475d05 100644 --- a/media/libstagefright/StagefrightMetadataRetriever.cpp +++ b/media/libstagefright/StagefrightMetadataRetriever.cpp @@ -21,6 +21,7 @@ #include "include/StagefrightMetadataRetriever.h" +#include <media/IMediaHTTPService.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/ColorConverter.h> #include <media/stagefright/DataSource.h> @@ -51,7 +52,9 @@ StagefrightMetadataRetriever::~StagefrightMetadataRetriever() { } status_t StagefrightMetadataRetriever::setDataSource( - const char *uri, const KeyedVector<String8, String8> *headers) { + const sp<IMediaHTTPService> &httpService, + const char *uri, + const KeyedVector<String8, String8> *headers) { ALOGV("setDataSource(%s)", uri); mParsedMetaData = false; @@ -59,7 +62,7 @@ status_t StagefrightMetadataRetriever::setDataSource( delete mAlbumArt; mAlbumArt = NULL; - mSource = DataSource::CreateFromURI(uri, headers); + mSource = DataSource::CreateFromURI(httpService, uri, headers); if (mSource == NULL) { ALOGE("Unable to create data source for '%s'.", uri); diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp index 686d03a..62aea36 100644 --- a/media/libstagefright/SurfaceMediaSource.cpp +++ b/media/libstagefright/SurfaceMediaSource.cpp @@ -54,9 +54,9 @@ SurfaceMediaSource::SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeig ALOGE("Invalid dimensions %dx%d", bufferWidth, bufferHeight); } - mBufferQueue = new BufferQueue(); - mBufferQueue->setDefaultBufferSize(bufferWidth, bufferHeight); - mBufferQueue->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER | + BufferQueue::createBufferQueue(&mProducer, &mConsumer); + mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight); + mConsumer->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_HW_TEXTURE); sp<ISurfaceComposer> composer(ComposerService::getComposerService()); @@ -68,7 +68,7 @@ SurfaceMediaSource::SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeig wp<ConsumerListener> listener = static_cast<ConsumerListener*>(this); sp<BufferQueue::ProxyConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener); - status_t err = mBufferQueue->consumerConnect(proxy, false); + status_t err = mConsumer->consumerConnect(proxy, false); if (err != NO_ERROR) { ALOGE("SurfaceMediaSource: error connecting to BufferQueue: %s (%d)", strerror(-err), err); @@ -108,7 +108,7 @@ void SurfaceMediaSource::dump( Mutex::Autolock lock(mMutex); result.append(buffer); - mBufferQueue->dump(result, ""); + mConsumer->dump(result, ""); } status_t SurfaceMediaSource::setFrameRate(int32_t fps) @@ -166,7 +166,7 @@ status_t SurfaceMediaSource::start(MetaData *params) CHECK_GT(mMaxAcquiredBufferCount, 1); status_t err = - mBufferQueue->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount); + mConsumer->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount); if (err != OK) { return err; @@ -205,6 +205,9 @@ status_t SurfaceMediaSource::stop() return OK; } + mStarted = false; + mFrameAvailableCondition.signal(); + while (mNumPendingBuffers > 0) { ALOGI("Still waiting for %d buffers to be returned.", mNumPendingBuffers); @@ -218,11 +221,9 @@ status_t SurfaceMediaSource::stop() mMediaBuffersAvailableCondition.wait(mMutex); } - mStarted = false; - mFrameAvailableCondition.signal(); mMediaBuffersAvailableCondition.signal(); - return mBufferQueue->consumerDisconnect(); + return mConsumer->consumerDisconnect(); } sp<MetaData> SurfaceMediaSource::getFormat() @@ -292,7 +293,7 @@ status_t SurfaceMediaSource::read( // wait here till the frames come in from the client side while (mStarted) { - status_t err = mBufferQueue->acquireBuffer(&item, 0); + status_t err = mConsumer->acquireBuffer(&item, 0); if (err == BufferQueue::NO_BUFFER_AVAILABLE) { // wait for a buffer to be queued mFrameAvailableCondition.wait(mMutex); @@ -315,7 +316,7 @@ status_t SurfaceMediaSource::read( if (mStartTimeNs > 0) { if (item.mTimestamp < mStartTimeNs) { // This frame predates start of record, discard - mBufferQueue->releaseBuffer( + mConsumer->releaseBuffer( item.mBuf, item.mFrameNumber, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); continue; @@ -415,7 +416,7 @@ void SurfaceMediaSource::signalBufferReturned(MediaBuffer *buffer) { ALOGV("Slot %d returned, matches handle = %p", id, mSlots[id].mGraphicBuffer->handle); - mBufferQueue->releaseBuffer(id, mSlots[id].mFrameNumber, + mConsumer->releaseBuffer(id, mSlots[id].mFrameNumber, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); @@ -476,4 +477,8 @@ void SurfaceMediaSource::onBuffersReleased() { } } +void SurfaceMediaSource::onSidebandStreamChanged() { + ALOG_ASSERT(false, "SurfaceMediaSource can't consume sideband streams"); +} + } // end of namespace android diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp index 216a329..047fac7 100644 --- a/media/libstagefright/Utils.cpp +++ b/media/libstagefright/Utils.cpp @@ -17,6 +17,7 @@ //#define LOG_NDEBUG 0 #define LOG_TAG "Utils" #include <utils/Log.h> +#include <ctype.h> #include "include/ESDS.h" @@ -251,6 +252,13 @@ status_t convertMetaDataToMessage( buffer->meta()->setInt32("csd", true); buffer->meta()->setInt64("timeUs", 0); msg->setBuffer("csd-1", buffer); + } else if (meta->findData(kKeyOpusHeader, &type, &data, &size)) { + sp<ABuffer> buffer = new ABuffer(size); + memcpy(buffer->data(), data, size); + + buffer->meta()->setInt32("csd", true); + buffer->meta()->setInt64("timeUs", 0); + msg->setBuffer("csd-0", buffer); } *format = msg; @@ -452,6 +460,11 @@ void convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) { } } + int32_t timeScale; + if (msg->findInt32("time-scale", &timeScale)) { + meta->setInt32(kKeyTimeScale, timeScale); + } + // XXX TODO add whatever other keys there are #if 0 @@ -523,6 +536,7 @@ static const struct mime_conv_t mimeLookup[] = { { MEDIA_MIMETYPE_AUDIO_AMR_WB, AUDIO_FORMAT_AMR_WB }, { MEDIA_MIMETYPE_AUDIO_AAC, AUDIO_FORMAT_AAC }, { MEDIA_MIMETYPE_AUDIO_VORBIS, AUDIO_FORMAT_VORBIS }, + { MEDIA_MIMETYPE_AUDIO_OPUS, AUDIO_FORMAT_OPUS}, { 0, AUDIO_FORMAT_INVALID } }; @@ -615,5 +629,40 @@ bool canOffloadStream(const sp<MetaData>& meta, bool hasVideo, return AudioSystem::isOffloadSupported(info); } +AString uriDebugString(const AString &uri, bool incognito) { + if (incognito) { + return AString("<URI suppressed>"); + } + + char prop[PROPERTY_VALUE_MAX]; + if (property_get("media.stagefright.log-uri", prop, "false") && + (!strcmp(prop, "1") || !strcmp(prop, "true"))) { + return uri; + } + + // find scheme + AString scheme; + const char *chars = uri.c_str(); + for (size_t i = 0; i < uri.size(); i++) { + const char c = chars[i]; + if (!isascii(c)) { + break; + } else if (isalpha(c)) { + continue; + } else if (i == 0) { + // first character must be a letter + break; + } else if (isdigit(c) || c == '+' || c == '.' || c =='-') { + continue; + } else if (c != ':') { + break; + } + scheme = AString(uri, 0, i); + scheme.append("://<suppressed>"); + return scheme; + } + return AString("<no-scheme URI suppressed>"); +} + } // namespace android diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp index c6ac0da..38a1f6b 100644 --- a/media/libstagefright/avc_utils.cpp +++ b/media/libstagefright/avc_utils.cpp @@ -40,6 +40,25 @@ unsigned parseUE(ABitReader *br) { return x + (1u << numZeroes) - 1; } +signed parseSE(ABitReader *br) { + unsigned codeNum = parseUE(br); + + return (codeNum & 1) ? (codeNum + 1) / 2 : -(codeNum / 2); +} + +static void skipScalingList(ABitReader *br, size_t sizeOfScalingList) { + size_t lastScale = 8; + size_t nextScale = 8; + for (size_t j = 0; j < sizeOfScalingList; ++j) { + if (nextScale != 0) { + signed delta_scale = parseSE(br); + nextScale = (lastScale + delta_scale + 256) % 256; + } + + lastScale = (nextScale == 0) ? lastScale : nextScale; + } +} + // Determine video dimensions from the sequence parameterset. void FindAVCDimensions( const sp<ABuffer> &seqParamSet, @@ -63,7 +82,24 @@ void FindAVCDimensions( parseUE(&br); // bit_depth_luma_minus8 parseUE(&br); // bit_depth_chroma_minus8 br.skipBits(1); // qpprime_y_zero_transform_bypass_flag - CHECK_EQ(br.getBits(1), 0u); // seq_scaling_matrix_present_flag + + if (br.getBits(1)) { // seq_scaling_matrix_present_flag + for (size_t i = 0; i < 8; ++i) { + if (br.getBits(1)) { // seq_scaling_list_present_flag[i] + + // WARNING: the code below has not ever been exercised... + // need a real-world example. + + if (i < 6) { + // ScalingList4x4[i],16,... + skipScalingList(&br, 16); + } else { + // ScalingList8x8[i-6],64,... + skipScalingList(&br, 64); + } + } + } + } } parseUE(&br); // log2_max_frame_num_minus4 diff --git a/media/libstagefright/chromium_http/Android.mk b/media/libstagefright/chromium_http/Android.mk deleted file mode 100644 index 109e3fe..0000000 --- a/media/libstagefright/chromium_http/Android.mk +++ /dev/null @@ -1,39 +0,0 @@ -LOCAL_PATH:= $(call my-dir) - -ifneq ($(TARGET_BUILD_PDK), true) -include $(CLEAR_VARS) - -LOCAL_SRC_FILES:= \ - DataUriSource.cpp \ - ChromiumHTTPDataSource.cpp \ - support.cpp \ - chromium_http_stub.cpp - -LOCAL_C_INCLUDES:= \ - $(TOP)/frameworks/av/media/libstagefright \ - $(TOP)/frameworks/native/include/media/openmax \ - external/chromium \ - external/chromium/android - -LOCAL_CFLAGS += -Wno-multichar - -LOCAL_SHARED_LIBRARIES += \ - libbinder \ - libstlport \ - libchromium_net \ - libutils \ - libbinder \ - libcutils \ - liblog \ - libstagefright_foundation \ - libstagefright \ - libdrmframework - -include external/stlport/libstlport.mk - -LOCAL_MODULE:= libstagefright_chromium_http - -LOCAL_MODULE_TAGS := optional - -include $(BUILD_SHARED_LIBRARY) -endif diff --git a/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp b/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp deleted file mode 100644 index 7e5c280..0000000 --- a/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp +++ /dev/null @@ -1,355 +0,0 @@ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "ChromiumHTTPDataSource" -#include <media/stagefright/foundation/ADebug.h> - -#include "include/ChromiumHTTPDataSource.h" - -#include <media/stagefright/foundation/ALooper.h> -#include <media/stagefright/MediaErrors.h> - -#include "support.h" - -#include <cutils/properties.h> // for property_get - -namespace android { - -ChromiumHTTPDataSource::ChromiumHTTPDataSource(uint32_t flags) - : mFlags(flags), - mState(DISCONNECTED), - mDelegate(new SfDelegate), - mCurrentOffset(0), - mIOResult(OK), - mContentSize(-1), - mDecryptHandle(NULL), - mDrmManagerClient(NULL) { - mDelegate->setOwner(this); -} - -ChromiumHTTPDataSource::~ChromiumHTTPDataSource() { - disconnect(); - - delete mDelegate; - mDelegate = NULL; - - clearDRMState_l(); - - if (mDrmManagerClient != NULL) { - delete mDrmManagerClient; - mDrmManagerClient = NULL; - } -} - -status_t ChromiumHTTPDataSource::connect( - const char *uri, - const KeyedVector<String8, String8> *headers, - off64_t offset) { - Mutex::Autolock autoLock(mLock); - - uid_t uid; - if (getUID(&uid)) { - mDelegate->setUID(uid); - } - -#if defined(LOG_NDEBUG) && !LOG_NDEBUG - LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG, "connect on behalf of uid %d", uid); -#endif - - return connect_l(uri, headers, offset); -} - -status_t ChromiumHTTPDataSource::connect_l( - const char *uri, - const KeyedVector<String8, String8> *headers, - off64_t offset) { - if (mState != DISCONNECTED) { - disconnect_l(); - } - -#if defined(LOG_NDEBUG) && !LOG_NDEBUG - LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG, - "connect to <URL suppressed> @%lld", offset); -#endif - - mURI = uri; - mContentType = String8("application/octet-stream"); - - if (headers != NULL) { - mHeaders = *headers; - } else { - mHeaders.clear(); - } - - mState = CONNECTING; - mContentSize = -1; - mCurrentOffset = offset; - - mDelegate->initiateConnection(mURI.c_str(), &mHeaders, offset); - - while (mState == CONNECTING || mState == DISCONNECTING) { - mCondition.wait(mLock); - } - - return mState == CONNECTED ? OK : mIOResult; -} - -void ChromiumHTTPDataSource::onRedirect(const char *url) { - Mutex::Autolock autoLock(mLock); - mURI = url; -} - -void ChromiumHTTPDataSource::onConnectionEstablished( - int64_t contentSize, const char *contentType) { - Mutex::Autolock autoLock(mLock); - - if (mState != CONNECTING) { - // We may have initiated disconnection. - CHECK_EQ(mState, DISCONNECTING); - return; - } - - mState = CONNECTED; - mContentSize = (contentSize < 0) ? -1 : contentSize + mCurrentOffset; - mContentType = String8(contentType); - mCondition.broadcast(); -} - -void ChromiumHTTPDataSource::onConnectionFailed(status_t err) { - Mutex::Autolock autoLock(mLock); - mState = DISCONNECTED; - mCondition.broadcast(); - - // mURI.clear(); - - mIOResult = err; -} - -void ChromiumHTTPDataSource::disconnect() { - Mutex::Autolock autoLock(mLock); - disconnect_l(); -} - -void ChromiumHTTPDataSource::disconnect_l() { - if (mState == DISCONNECTED) { - return; - } - - mState = DISCONNECTING; - mIOResult = -EINTR; - - mDelegate->initiateDisconnect(); - - while (mState == DISCONNECTING) { - mCondition.wait(mLock); - } - - CHECK_EQ((int)mState, (int)DISCONNECTED); -} - -status_t ChromiumHTTPDataSource::initCheck() const { - Mutex::Autolock autoLock(mLock); - - return mState == CONNECTED ? OK : NO_INIT; -} - -ssize_t ChromiumHTTPDataSource::readAt(off64_t offset, void *data, size_t size) { - Mutex::Autolock autoLock(mLock); - - if (mState != CONNECTED) { - return INVALID_OPERATION; - } - -#if 0 - char value[PROPERTY_VALUE_MAX]; - if (property_get("media.stagefright.disable-net", value, 0) - && (!strcasecmp(value, "true") || !strcmp(value, "1"))) { - LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "Simulating that the network is down."); - disconnect_l(); - return ERROR_IO; - } -#endif - - if (offset != mCurrentOffset) { - AString tmp = mURI; - KeyedVector<String8, String8> tmpHeaders = mHeaders; - - disconnect_l(); - - status_t err = connect_l(tmp.c_str(), &tmpHeaders, offset); - - if (err != OK) { - return err; - } - } - - mState = READING; - - int64_t startTimeUs = ALooper::GetNowUs(); - - mDelegate->initiateRead(data, size); - - while (mState == READING) { - mCondition.wait(mLock); - } - - if (mIOResult < OK) { - return mIOResult; - } - - if (mState == CONNECTED) { - int64_t delayUs = ALooper::GetNowUs() - startTimeUs; - - // The read operation was successful, mIOResult contains - // the number of bytes read. - addBandwidthMeasurement(mIOResult, delayUs); - - mCurrentOffset += mIOResult; - return mIOResult; - } - - return ERROR_IO; -} - -void ChromiumHTTPDataSource::onReadCompleted(ssize_t size) { - Mutex::Autolock autoLock(mLock); - - mIOResult = size; - - if (mState == READING) { - mState = CONNECTED; - mCondition.broadcast(); - } -} - -status_t ChromiumHTTPDataSource::getSize(off64_t *size) { - Mutex::Autolock autoLock(mLock); - - if (mContentSize < 0) { - return ERROR_UNSUPPORTED; - } - - *size = mContentSize; - - return OK; -} - -uint32_t ChromiumHTTPDataSource::flags() { - return kWantsPrefetching | kIsHTTPBasedSource; -} - -// static -void ChromiumHTTPDataSource::InitiateRead( - ChromiumHTTPDataSource *me, void *data, size_t size) { - me->initiateRead(data, size); -} - -void ChromiumHTTPDataSource::initiateRead(void *data, size_t size) { - mDelegate->initiateRead(data, size); -} - -void ChromiumHTTPDataSource::onDisconnectComplete() { - Mutex::Autolock autoLock(mLock); - CHECK_EQ((int)mState, (int)DISCONNECTING); - - mState = DISCONNECTED; - // mURI.clear(); - mIOResult = -ENOTCONN; - - mCondition.broadcast(); -} - -sp<DecryptHandle> ChromiumHTTPDataSource::DrmInitialization(const char* mime) { - Mutex::Autolock autoLock(mLock); - - if (mDrmManagerClient == NULL) { - mDrmManagerClient = new DrmManagerClient(); - } - - if (mDrmManagerClient == NULL) { - return NULL; - } - - if (mDecryptHandle == NULL) { - /* Note if redirect occurs, mUri is the redirect uri instead of the - * original one - */ - mDecryptHandle = mDrmManagerClient->openDecryptSession( - String8(mURI.c_str()), mime); - } - - if (mDecryptHandle == NULL) { - delete mDrmManagerClient; - mDrmManagerClient = NULL; - } - - return mDecryptHandle; -} - -void ChromiumHTTPDataSource::getDrmInfo( - sp<DecryptHandle> &handle, DrmManagerClient **client) { - Mutex::Autolock autoLock(mLock); - - handle = mDecryptHandle; - *client = mDrmManagerClient; -} - -String8 ChromiumHTTPDataSource::getUri() { - Mutex::Autolock autoLock(mLock); - - return String8(mURI.c_str()); -} - -String8 ChromiumHTTPDataSource::getMIMEType() const { - Mutex::Autolock autoLock(mLock); - - return mContentType; -} - -void ChromiumHTTPDataSource::clearDRMState_l() { - if (mDecryptHandle != NULL) { - // To release mDecryptHandle - CHECK(mDrmManagerClient); - mDrmManagerClient->closeDecryptSession(mDecryptHandle); - mDecryptHandle = NULL; - } -} - -status_t ChromiumHTTPDataSource::reconnectAtOffset(off64_t offset) { - Mutex::Autolock autoLock(mLock); - - if (mURI.empty()) { - return INVALID_OPERATION; - } - - LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "Reconnecting..."); - status_t err = connect_l(mURI.c_str(), &mHeaders, offset); - if (err != OK) { - LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "Reconnect failed w/ err 0x%08x", err); - } - - return err; -} - -// static -status_t ChromiumHTTPDataSource::UpdateProxyConfig( - const char *host, int32_t port, const char *exclusionList) { - return SfDelegate::UpdateProxyConfig(host, port, exclusionList); -} - -} // namespace android - diff --git a/media/libstagefright/chromium_http/DataUriSource.cpp b/media/libstagefright/chromium_http/DataUriSource.cpp deleted file mode 100644 index ecf3fa1..0000000 --- a/media/libstagefright/chromium_http/DataUriSource.cpp +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include <include/DataUriSource.h> - -#include <net/base/data_url.h> -#include <googleurl/src/gurl.h> - - -namespace android { - -DataUriSource::DataUriSource(const char *uri) : - mDataUri(uri), - mInited(NO_INIT) { - - // Copy1: const char *uri -> String8 mDataUri. - std::string mimeTypeStr, unusedCharsetStr, dataStr; - // Copy2: String8 mDataUri -> std::string - const bool ret = net::DataURL::Parse( - GURL(std::string(mDataUri.string())), - &mimeTypeStr, &unusedCharsetStr, &dataStr); - // Copy3: std::string dataStr -> AString mData - mData.setTo(dataStr.data(), dataStr.length()); - mInited = ret ? OK : UNKNOWN_ERROR; - - // The chromium data url implementation defaults to using "text/plain" - // if no mime type is specified. We prefer to leave this unspecified - // instead, since the mime type is sniffed in most cases. - if (mimeTypeStr != "text/plain") { - mMimeType = mimeTypeStr.c_str(); - } -} - -ssize_t DataUriSource::readAt(off64_t offset, void *out, size_t size) { - if (mInited != OK) { - return mInited; - } - - const off64_t length = mData.size(); - if (offset >= length) { - return UNKNOWN_ERROR; - } - - const char *dataBuf = mData.c_str(); - const size_t bytesToCopy = - offset + size >= length ? (length - offset) : size; - - if (bytesToCopy > 0) { - memcpy(out, dataBuf + offset, bytesToCopy); - } - - return bytesToCopy; -} - -} // namespace android diff --git a/media/libstagefright/chromium_http/chromium_http_stub.cpp b/media/libstagefright/chromium_http/chromium_http_stub.cpp deleted file mode 100644 index 289f6de..0000000 --- a/media/libstagefright/chromium_http/chromium_http_stub.cpp +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include <dlfcn.h> - -#include <include/chromium_http_stub.h> -#include <include/ChromiumHTTPDataSource.h> -#include <include/DataUriSource.h> - -namespace android { - -HTTPBase *createChromiumHTTPDataSource(uint32_t flags) { - return new ChromiumHTTPDataSource(flags); -} - -status_t UpdateChromiumHTTPDataSourceProxyConfig( - const char *host, int32_t port, const char *exclusionList) { - return ChromiumHTTPDataSource::UpdateProxyConfig(host, port, exclusionList); -} - -DataSource *createDataUriSource(const char *uri) { - return new DataUriSource(uri); -} - -} diff --git a/media/libstagefright/chromium_http/support.cpp b/media/libstagefright/chromium_http/support.cpp deleted file mode 100644 index 3de4877..0000000 --- a/media/libstagefright/chromium_http/support.cpp +++ /dev/null @@ -1,659 +0,0 @@ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "ChromiumHTTPDataSourceSupport" -#include <utils/Log.h> - -#include <media/stagefright/foundation/AString.h> - -#include "support.h" - -#include "android/net/android_network_library_impl.h" -#include "base/logging.h" -#include "base/threading/thread.h" -#include "net/base/cert_verifier.h" -#include "net/base/cookie_monster.h" -#include "net/base/host_resolver.h" -#include "net/base/ssl_config_service.h" -#include "net/http/http_auth_handler_factory.h" -#include "net/http/http_cache.h" -#include "net/proxy/proxy_config_service_android.h" - -#include "include/ChromiumHTTPDataSource.h" -#include <arpa/inet.h> -#include <binder/Parcel.h> -#include <cutils/log.h> -#include <media/stagefright/MediaErrors.h> -#include <media/stagefright/Utils.h> -#include <string> - -#include <utils/Errors.h> -#include <binder/IInterface.h> -#include <binder/IServiceManager.h> - -namespace android { - -// must be kept in sync with interface defined in IAudioService.aidl -class IAudioService : public IInterface -{ -public: - DECLARE_META_INTERFACE(AudioService); - - virtual int verifyX509CertChain( - const std::vector<std::string>& cert_chain, - const std::string& hostname, - const std::string& auth_type) = 0; -}; - -class BpAudioService : public BpInterface<IAudioService> -{ -public: - BpAudioService(const sp<IBinder>& impl) - : BpInterface<IAudioService>(impl) - { - } - - virtual int verifyX509CertChain( - const std::vector<std::string>& cert_chain, - const std::string& hostname, - const std::string& auth_type) - { - Parcel data, reply; - data.writeInterfaceToken(IAudioService::getInterfaceDescriptor()); - - // The vector of std::string we get isn't really a vector of strings, - // but rather a vector of binary certificate data. If we try to pass - // it to Java language code as a string, it ends up mangled on the other - // side, so send them as bytes instead. - // Since we can't send an array of byte arrays, send a single array, - // which will be split out by the recipient. - - int numcerts = cert_chain.size(); - data.writeInt32(numcerts); - size_t total = 0; - for (int i = 0; i < numcerts; i++) { - total += cert_chain[i].size(); - } - size_t bytesize = total + numcerts * 4; - uint8_t *bytes = (uint8_t*) malloc(bytesize); - if (!bytes) { - return 5; // SSL_INVALID - } - ALOGV("%d certs: %d -> %d", numcerts, total, bytesize); - - int offset = 0; - for (int i = 0; i < numcerts; i++) { - int32_t certsize = cert_chain[i].size(); - // store this in a known order, which just happens to match the default - // byte order of a java ByteBuffer - int32_t bigsize = htonl(certsize); - ALOGV("cert %d, size %d", i, certsize); - memcpy(bytes + offset, &bigsize, sizeof(bigsize)); - offset += sizeof(bigsize); - memcpy(bytes + offset, cert_chain[i].data(), certsize); - offset += certsize; - } - data.writeByteArray(bytesize, bytes); - free(bytes); - data.writeString16(String16(hostname.c_str())); - data.writeString16(String16(auth_type.c_str())); - - int32_t result; - if (remote()->transact(IBinder::FIRST_CALL_TRANSACTION, data, &reply) != NO_ERROR - || reply.readExceptionCode() < 0 || reply.readInt32(&result) != NO_ERROR) { - return 5; // SSL_INVALID; - } - return result; - } - -}; - -IMPLEMENT_META_INTERFACE(AudioService, "android.media.IAudioService"); - - -static Mutex gNetworkThreadLock; -static base::Thread *gNetworkThread = NULL; -static scoped_refptr<SfRequestContext> gReqContext; -static scoped_ptr<net::NetworkChangeNotifier> gNetworkChangeNotifier; - -bool logMessageHandler( - int severity, - const char* file, - int line, - size_t message_start, - const std::string& str) { - int androidSeverity = ANDROID_LOG_VERBOSE; - switch(severity) { - case logging::LOG_FATAL: - androidSeverity = ANDROID_LOG_FATAL; - break; - case logging::LOG_ERROR_REPORT: - case logging::LOG_ERROR: - androidSeverity = ANDROID_LOG_ERROR; - break; - case logging::LOG_WARNING: - androidSeverity = ANDROID_LOG_WARN; - break; - default: - androidSeverity = ANDROID_LOG_VERBOSE; - break; - } - android_printLog(androidSeverity, "chromium-libstagefright", - "%s:%d: %s", file, line, str.c_str()); - return false; -} - -struct AutoPrioritySaver { - AutoPrioritySaver() - : mTID(androidGetTid()), - mPrevPriority(androidGetThreadPriority(mTID)) { - androidSetThreadPriority(mTID, ANDROID_PRIORITY_NORMAL); - } - - ~AutoPrioritySaver() { - androidSetThreadPriority(mTID, mPrevPriority); - } - -private: - pid_t mTID; - int mPrevPriority; - - DISALLOW_EVIL_CONSTRUCTORS(AutoPrioritySaver); -}; - -static void InitializeNetworkThreadIfNecessary() { - Mutex::Autolock autoLock(gNetworkThreadLock); - - if (gNetworkThread == NULL) { - // Make sure any threads spawned by the chromium framework are - // running at normal priority instead of inheriting this thread's. - AutoPrioritySaver saver; - - gNetworkThread = new base::Thread("network"); - base::Thread::Options options; - options.message_loop_type = MessageLoop::TYPE_IO; - CHECK(gNetworkThread->StartWithOptions(options)); - - gReqContext = new SfRequestContext; - - gNetworkChangeNotifier.reset(net::NetworkChangeNotifier::Create()); - - net::AndroidNetworkLibrary::RegisterSharedInstance( - new SfNetworkLibrary); - logging::SetLogMessageHandler(logMessageHandler); - } -} - -static void MY_LOGI(const char *s) { - LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "%s", s); -} - -static void MY_LOGV(const char *s) { -#if !defined(LOG_NDEBUG) || LOG_NDEBUG == 0 - LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG, "%s", s); -#endif -} - -SfNetLog::SfNetLog() - : mNextID(1) { -} - -void SfNetLog::AddEntry( - EventType type, - const base::TimeTicks &time, - const Source &source, - EventPhase phase, - EventParameters *params) { -#if 0 - MY_LOGI(StringPrintf( - "AddEntry time=%s type=%s source=%s phase=%s\n", - TickCountToString(time).c_str(), - EventTypeToString(type), - SourceTypeToString(source.type), - EventPhaseToString(phase)).c_str()); -#endif -} - -uint32 SfNetLog::NextID() { - return mNextID++; -} - -net::NetLog::LogLevel SfNetLog::GetLogLevel() const { - return LOG_BASIC; -} - -//////////////////////////////////////////////////////////////////////////////// - -SfRequestContext::SfRequestContext() { - mUserAgent = MakeUserAgent().c_str(); - - set_net_log(new SfNetLog()); - - set_host_resolver( - net::CreateSystemHostResolver( - net::HostResolver::kDefaultParallelism, - NULL /* resolver_proc */, - net_log())); - - set_ssl_config_service( - net::SSLConfigService::CreateSystemSSLConfigService()); - - mProxyConfigService = new net::ProxyConfigServiceAndroid; - - set_proxy_service(net::ProxyService::CreateWithoutProxyResolver( - mProxyConfigService, net_log())); - - set_http_transaction_factory(new net::HttpCache( - host_resolver(), - new net::CertVerifier(), - dnsrr_resolver(), - dns_cert_checker(), - proxy_service(), - ssl_config_service(), - net::HttpAuthHandlerFactory::CreateDefault(host_resolver()), - network_delegate(), - net_log(), - NULL)); // backend_factory - - set_cookie_store(new net::CookieMonster(NULL, NULL)); -} - -const std::string &SfRequestContext::GetUserAgent(const GURL &url) const { - return mUserAgent; -} - -status_t SfRequestContext::updateProxyConfig( - const char *host, int32_t port, const char *exclusionList) { - Mutex::Autolock autoLock(mProxyConfigLock); - - if (host == NULL || *host == '\0') { - MY_LOGV("updateProxyConfig NULL"); - - std::string proxy; - std::string exList; - mProxyConfigService->UpdateProxySettings(proxy, exList); - } else { -#if !defined(LOG_NDEBUG) || LOG_NDEBUG == 0 - LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG, - "updateProxyConfig %s:%d, exclude '%s'", - host, port, exclusionList); -#endif - - std::string proxy = StringPrintf("%s:%d", host, port).c_str(); - std::string exList = exclusionList; - mProxyConfigService->UpdateProxySettings(proxy, exList); - } - - return OK; -} - -//////////////////////////////////////////////////////////////////////////////// - -SfNetworkLibrary::SfNetworkLibrary() {} - -SfNetworkLibrary::VerifyResult SfNetworkLibrary::VerifyX509CertChain( - const std::vector<std::string>& cert_chain, - const std::string& hostname, - const std::string& auth_type) { - - sp<IBinder> binder = - defaultServiceManager()->checkService(String16("audio")); - if (binder == 0) { - ALOGW("Thread cannot connect to the audio service"); - } else { - sp<IAudioService> service = interface_cast<IAudioService>(binder); - int code = service->verifyX509CertChain(cert_chain, hostname, auth_type); - ALOGV("verified: %d", code); - if (code == -1) { - return VERIFY_OK; - } else if (code == 2) { // SSL_IDMISMATCH - return VERIFY_BAD_HOSTNAME; - } else if (code == 3) { // SSL_UNTRUSTED - return VERIFY_NO_TRUSTED_ROOT; - } - } - return VERIFY_INVOCATION_ERROR; -} - -//////////////////////////////////////////////////////////////////////////////// - -SfDelegate::SfDelegate() - : mOwner(NULL), - mURLRequest(NULL), - mReadBuffer(new net::IOBufferWithSize(8192)), - mNumBytesRead(0), - mNumBytesTotal(0), - mDataDestination(NULL), - mAtEOS(false) { - InitializeNetworkThreadIfNecessary(); -} - -SfDelegate::~SfDelegate() { - CHECK(mURLRequest == NULL); -} - -// static -status_t SfDelegate::UpdateProxyConfig( - const char *host, int32_t port, const char *exclusionList) { - InitializeNetworkThreadIfNecessary(); - - return gReqContext->updateProxyConfig(host, port, exclusionList); -} - -void SfDelegate::setOwner(ChromiumHTTPDataSource *owner) { - mOwner = owner; -} - -void SfDelegate::setUID(uid_t uid) { - gReqContext->setUID(uid); -} - -bool SfDelegate::getUID(uid_t *uid) const { - return gReqContext->getUID(uid); -} - -void SfDelegate::OnReceivedRedirect( - net::URLRequest *request, const GURL &new_url, bool *defer_redirect) { - MY_LOGV("OnReceivedRedirect"); - mOwner->onRedirect(new_url.spec().c_str()); -} - -void SfDelegate::OnAuthRequired( - net::URLRequest *request, net::AuthChallengeInfo *auth_info) { - MY_LOGV("OnAuthRequired"); - - inherited::OnAuthRequired(request, auth_info); -} - -void SfDelegate::OnCertificateRequested( - net::URLRequest *request, net::SSLCertRequestInfo *cert_request_info) { - MY_LOGV("OnCertificateRequested"); - - inherited::OnCertificateRequested(request, cert_request_info); -} - -void SfDelegate::OnSSLCertificateError( - net::URLRequest *request, int cert_error, net::X509Certificate *cert) { - fprintf(stderr, "OnSSLCertificateError cert_error=%d\n", cert_error); - - inherited::OnSSLCertificateError(request, cert_error, cert); -} - -void SfDelegate::OnGetCookies(net::URLRequest *request, bool blocked_by_policy) { - MY_LOGV("OnGetCookies"); -} - -void SfDelegate::OnSetCookie( - net::URLRequest *request, - const std::string &cookie_line, - const net::CookieOptions &options, - bool blocked_by_policy) { - MY_LOGV("OnSetCookie"); -} - -void SfDelegate::OnResponseStarted(net::URLRequest *request) { - if (request->status().status() != net::URLRequestStatus::SUCCESS) { - MY_LOGI(StringPrintf( - "Request failed with status %d and os_error %d", - request->status().status(), - request->status().os_error()).c_str()); - - delete mURLRequest; - mURLRequest = NULL; - - mOwner->onConnectionFailed(ERROR_IO); - return; - } else if (mRangeRequested && request->GetResponseCode() != 206) { - MY_LOGI(StringPrintf( - "We requested a content range, but server didn't " - "support that. (responded with %d)", - request->GetResponseCode()).c_str()); - - delete mURLRequest; - mURLRequest = NULL; - - mOwner->onConnectionFailed(-EPIPE); - return; - } else if ((request->GetResponseCode() / 100) != 2) { - MY_LOGI(StringPrintf( - "Server responded with http status %d", - request->GetResponseCode()).c_str()); - - delete mURLRequest; - mURLRequest = NULL; - - mOwner->onConnectionFailed(ERROR_IO); - return; - } - - MY_LOGV("OnResponseStarted"); - - std::string headers; - request->GetAllResponseHeaders(&headers); - - MY_LOGV(StringPrintf("response headers: %s", headers.c_str()).c_str()); - - std::string contentType; - request->GetResponseHeaderByName("Content-Type", &contentType); - - mOwner->onConnectionEstablished( - request->GetExpectedContentSize(), contentType.c_str()); -} - -void SfDelegate::OnReadCompleted(net::URLRequest *request, int bytes_read) { - if (bytes_read == -1) { - MY_LOGI(StringPrintf( - "OnReadCompleted, read failed, status %d", - request->status().status()).c_str()); - - mOwner->onReadCompleted(ERROR_IO); - return; - } - - MY_LOGV(StringPrintf("OnReadCompleted, read %d bytes", bytes_read).c_str()); - - if (bytes_read < 0) { - MY_LOGI(StringPrintf( - "Read failed w/ status %d\n", - request->status().status()).c_str()); - - mOwner->onReadCompleted(ERROR_IO); - return; - } else if (bytes_read == 0) { - mAtEOS = true; - mOwner->onReadCompleted(mNumBytesRead); - return; - } - - CHECK_GT(bytes_read, 0); - CHECK_LE(mNumBytesRead + bytes_read, mNumBytesTotal); - - memcpy((uint8_t *)mDataDestination + mNumBytesRead, - mReadBuffer->data(), - bytes_read); - - mNumBytesRead += bytes_read; - - readMore(request); -} - -void SfDelegate::readMore(net::URLRequest *request) { - while (mNumBytesRead < mNumBytesTotal) { - size_t copy = mNumBytesTotal - mNumBytesRead; - if (copy > mReadBuffer->size()) { - copy = mReadBuffer->size(); - } - - int n; - if (request->Read(mReadBuffer, copy, &n)) { - MY_LOGV(StringPrintf("Read %d bytes directly.", n).c_str()); - - CHECK_LE((size_t)n, copy); - - memcpy((uint8_t *)mDataDestination + mNumBytesRead, - mReadBuffer->data(), - n); - - mNumBytesRead += n; - - if (n == 0) { - mAtEOS = true; - break; - } - } else { - MY_LOGV("readMore pending read"); - - if (request->status().status() != net::URLRequestStatus::IO_PENDING) { - MY_LOGI(StringPrintf( - "Direct read failed w/ status %d\n", - request->status().status()).c_str()); - - mOwner->onReadCompleted(ERROR_IO); - return; - } - - return; - } - } - - mOwner->onReadCompleted(mNumBytesRead); -} - -void SfDelegate::initiateConnection( - const char *uri, - const KeyedVector<String8, String8> *headers, - off64_t offset) { - GURL url(uri); - - MessageLoop *loop = gNetworkThread->message_loop(); - loop->PostTask( - FROM_HERE, - NewRunnableFunction( - &SfDelegate::OnInitiateConnectionWrapper, - this, - url, - headers, - offset)); - -} - -// static -void SfDelegate::OnInitiateConnectionWrapper( - SfDelegate *me, GURL url, - const KeyedVector<String8, String8> *headers, - off64_t offset) { - me->onInitiateConnection(url, headers, offset); -} - -void SfDelegate::onInitiateConnection( - const GURL &url, - const KeyedVector<String8, String8> *extra, - off64_t offset) { - CHECK(mURLRequest == NULL); - - mURLRequest = new net::URLRequest(url, this); - mAtEOS = false; - - mRangeRequested = false; - - if (offset != 0 || extra != NULL) { - net::HttpRequestHeaders headers = - mURLRequest->extra_request_headers(); - - if (offset != 0) { - headers.AddHeaderFromString( - StringPrintf("Range: bytes=%lld-", offset).c_str()); - - mRangeRequested = true; - } - - if (extra != NULL) { - for (size_t i = 0; i < extra->size(); ++i) { - AString s; - s.append(extra->keyAt(i).string()); - s.append(": "); - s.append(extra->valueAt(i).string()); - - headers.AddHeaderFromString(s.c_str()); - } - } - - mURLRequest->SetExtraRequestHeaders(headers); - } - - mURLRequest->set_context(gReqContext); - - mURLRequest->Start(); -} - -void SfDelegate::initiateDisconnect() { - MessageLoop *loop = gNetworkThread->message_loop(); - loop->PostTask( - FROM_HERE, - NewRunnableFunction( - &SfDelegate::OnInitiateDisconnectWrapper, this)); -} - -// static -void SfDelegate::OnInitiateDisconnectWrapper(SfDelegate *me) { - me->onInitiateDisconnect(); -} - -void SfDelegate::onInitiateDisconnect() { - if (mURLRequest == NULL) { - return; - } - - mURLRequest->Cancel(); - - delete mURLRequest; - mURLRequest = NULL; - - mOwner->onDisconnectComplete(); -} - -void SfDelegate::initiateRead(void *data, size_t size) { - MessageLoop *loop = gNetworkThread->message_loop(); - loop->PostTask( - FROM_HERE, - NewRunnableFunction( - &SfDelegate::OnInitiateReadWrapper, this, data, size)); -} - -// static -void SfDelegate::OnInitiateReadWrapper( - SfDelegate *me, void *data, size_t size) { - me->onInitiateRead(data, size); -} - -void SfDelegate::onInitiateRead(void *data, size_t size) { - CHECK(mURLRequest != NULL); - - mNumBytesRead = 0; - mNumBytesTotal = size; - mDataDestination = data; - - if (mAtEOS) { - mOwner->onReadCompleted(0); - return; - } - - readMore(mURLRequest); -} - -} // namespace android - diff --git a/media/libstagefright/chromium_http/support.h b/media/libstagefright/chromium_http/support.h deleted file mode 100644 index 975a1d3..0000000 --- a/media/libstagefright/chromium_http/support.h +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef SUPPORT_H_ - -#define SUPPORT_H_ - -#include <assert.h> - -#include "net/base/net_log.h" -#include "net/url_request/url_request.h" -#include "net/url_request/url_request_context.h" -#include "net/base/android_network_library.h" -#include "net/base/io_buffer.h" - -#include <utils/KeyedVector.h> -#include <utils/Mutex.h> -#include <utils/String8.h> - -namespace net { - struct ProxyConfigServiceAndroid; -}; - -namespace android { - -struct SfNetLog : public net::NetLog { - SfNetLog(); - - virtual void AddEntry( - EventType type, - const base::TimeTicks &time, - const Source &source, - EventPhase phase, - EventParameters *params); - - virtual uint32 NextID(); - virtual LogLevel GetLogLevel() const; - -private: - uint32 mNextID; - - DISALLOW_EVIL_CONSTRUCTORS(SfNetLog); -}; - -struct SfRequestContext : public net::URLRequestContext { - SfRequestContext(); - - virtual const std::string &GetUserAgent(const GURL &url) const; - - status_t updateProxyConfig( - const char *host, int32_t port, const char *exclusionList); - -private: - Mutex mProxyConfigLock; - - std::string mUserAgent; - net::ProxyConfigServiceAndroid *mProxyConfigService; - - DISALLOW_EVIL_CONSTRUCTORS(SfRequestContext); -}; - -// This is required for https support, we don't really verify certificates, -// we accept anything... -struct SfNetworkLibrary : public net::AndroidNetworkLibrary { - SfNetworkLibrary(); - - virtual VerifyResult VerifyX509CertChain( - const std::vector<std::string>& cert_chain, - const std::string& hostname, - const std::string& auth_type); - -private: - DISALLOW_EVIL_CONSTRUCTORS(SfNetworkLibrary); -}; - -struct ChromiumHTTPDataSource; - -struct SfDelegate : public net::URLRequest::Delegate { - SfDelegate(); - virtual ~SfDelegate(); - - void initiateConnection( - const char *uri, - const KeyedVector<String8, String8> *headers, - off64_t offset); - - void initiateDisconnect(); - void initiateRead(void *data, size_t size); - - void setOwner(ChromiumHTTPDataSource *mOwner); - - // Gets the UID of the calling process - bool getUID(uid_t *uid) const; - - void setUID(uid_t uid); - - virtual void OnReceivedRedirect( - net::URLRequest *request, const GURL &new_url, bool *defer_redirect); - - virtual void OnAuthRequired( - net::URLRequest *request, net::AuthChallengeInfo *auth_info); - - virtual void OnCertificateRequested( - net::URLRequest *request, net::SSLCertRequestInfo *cert_request_info); - - virtual void OnSSLCertificateError( - net::URLRequest *request, int cert_error, net::X509Certificate *cert); - - virtual void OnGetCookies(net::URLRequest *request, bool blocked_by_policy); - - virtual void OnSetCookie( - net::URLRequest *request, - const std::string &cookie_line, - const net::CookieOptions &options, - bool blocked_by_policy); - - virtual void OnResponseStarted(net::URLRequest *request); - - virtual void OnReadCompleted(net::URLRequest *request, int bytes_read); - - static status_t UpdateProxyConfig( - const char *host, int32_t port, const char *exclusionList); - -private: - typedef Delegate inherited; - - ChromiumHTTPDataSource *mOwner; - - net::URLRequest *mURLRequest; - scoped_refptr<net::IOBufferWithSize> mReadBuffer; - - size_t mNumBytesRead; - size_t mNumBytesTotal; - void *mDataDestination; - - bool mRangeRequested; - bool mAtEOS; - - void readMore(net::URLRequest *request); - - static void OnInitiateConnectionWrapper( - SfDelegate *me, - GURL url, - const KeyedVector<String8, String8> *headers, - off64_t offset); - - static void OnInitiateDisconnectWrapper(SfDelegate *me); - - static void OnInitiateReadWrapper( - SfDelegate *me, void *data, size_t size); - - void onInitiateConnection( - const GURL &url, - const KeyedVector<String8, String8> *headers, - off64_t offset); - - void onInitiateDisconnect(); - void onInitiateRead(void *data, size_t size); - - DISALLOW_EVIL_CONSTRUCTORS(SfDelegate); -}; - -} // namespace android - -#endif // SUPPORT_H_ diff --git a/media/libstagefright/chromium_http_stub.cpp b/media/libstagefright/chromium_http_stub.cpp deleted file mode 100644 index ed8a878..0000000 --- a/media/libstagefright/chromium_http_stub.cpp +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include <dlfcn.h> - -#include <media/stagefright/DataSource.h> - -#include "include/chromium_http_stub.h" -#include "include/HTTPBase.h" - -namespace android { - -static bool gFirst = true; -static void *gHandle; -static Mutex gLibMutex; - -HTTPBase *(*gLib_createChromiumHTTPDataSource)(uint32_t flags); -DataSource *(*gLib_createDataUriSource)(const char *uri); - -status_t (*gLib_UpdateChromiumHTTPDataSourceProxyConfig)( - const char *host, int32_t port, const char *exclusionList); - -static bool load_libstagefright_chromium_http() { - Mutex::Autolock autoLock(gLibMutex); - void *sym; - - if (!gFirst) { - return (gHandle != NULL); - } - - gFirst = false; - - gHandle = dlopen("libstagefright_chromium_http.so", RTLD_NOW); - if (gHandle == NULL) { - return false; - } - - sym = dlsym(gHandle, "createChromiumHTTPDataSource"); - if (sym == NULL) { - gHandle = NULL; - return false; - } - gLib_createChromiumHTTPDataSource = (HTTPBase *(*)(uint32_t))sym; - - sym = dlsym(gHandle, "createDataUriSource"); - if (sym == NULL) { - gHandle = NULL; - return false; - } - gLib_createDataUriSource = (DataSource *(*)(const char *))sym; - - sym = dlsym(gHandle, "UpdateChromiumHTTPDataSourceProxyConfig"); - if (sym == NULL) { - gHandle = NULL; - return false; - } - gLib_UpdateChromiumHTTPDataSourceProxyConfig = - (status_t (*)(const char *, int32_t, const char *))sym; - - return true; -} - -HTTPBase *createChromiumHTTPDataSource(uint32_t flags) { - if (!load_libstagefright_chromium_http()) { - return NULL; - } - - return gLib_createChromiumHTTPDataSource(flags); -} - -status_t UpdateChromiumHTTPDataSourceProxyConfig( - const char *host, int32_t port, const char *exclusionList) { - if (!load_libstagefright_chromium_http()) { - return INVALID_OPERATION; - } - - return gLib_UpdateChromiumHTTPDataSourceProxyConfig( - host, port, exclusionList); -} - -DataSource *createDataUriSource(const char *uri) { - if (!load_libstagefright_chromium_http()) { - return NULL; - } - - return gLib_createDataUriSource(uri); -} - -} diff --git a/media/libstagefright/codecs/aacdec/Android.mk b/media/libstagefright/codecs/aacdec/Android.mk index ffa64f9..49ff238 100644 --- a/media/libstagefright/codecs/aacdec/Android.mk +++ b/media/libstagefright/codecs/aacdec/Android.mk @@ -17,6 +17,8 @@ LOCAL_C_INCLUDES := \ LOCAL_CFLAGS := +LOCAL_CFLAGS += -Werror + LOCAL_STATIC_LIBRARIES := libFraunhoferAAC LOCAL_SHARED_LIBRARIES := \ diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp index d4b0de7..532e36f 100644 --- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp +++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp @@ -30,7 +30,7 @@ #define DRC_DEFAULT_MOBILE_REF_LEVEL 64 /* 64*-0.25dB = -16 dB below full scale for mobile conf */ #define DRC_DEFAULT_MOBILE_DRC_CUT 127 /* maximum compression of dynamic range for mobile conf */ #define DRC_DEFAULT_MOBILE_DRC_BOOST 127 /* maximum compression of dynamic range for mobile conf */ -#define MAX_CHANNEL_COUNT 6 /* maximum number of audio channels that can be decoded */ +#define MAX_CHANNEL_COUNT 8 /* maximum number of audio channels that can be decoded */ // names of properties that can be used to override the default DRC settings #define PROP_DRC_OVERRIDE_REF_LEVEL "aac_drc_reference_level" #define PROP_DRC_OVERRIDE_CUT "aac_drc_cut" @@ -296,8 +296,11 @@ void SoftAAC2::maybeConfigureDownmix() const { if (!(property_get("media.aac_51_output_enabled", value, NULL) && (!strcmp(value, "1") || !strcasecmp(value, "true")))) { ALOGI("Downmixing multichannel AAC to stereo"); - aacDecoder_SetParam(mAACDecoder, AAC_PCM_OUTPUT_CHANNELS, 2); + aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, 2); mStreamInfo->numChannels = 2; + // By default, the decoder creates a 5.1 channel downmix signal + // for seven and eight channel input streams. To enable 6.1 and 7.1 channel output + // use aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, -1) } } } @@ -374,7 +377,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { mNumSamplesOutput = 0; } - if (mIsADTS) { + if (mIsADTS && inHeader->nFilledLen) { size_t adtsHeaderSize = 0; // skip 30 bits, aac_frame_length follows. // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll????? diff --git a/media/libstagefright/codecs/aacenc/Android.mk b/media/libstagefright/codecs/aacenc/Android.mk index 057c69b..58ec3ba 100644 --- a/media/libstagefright/codecs/aacenc/Android.mk +++ b/media/libstagefright/codecs/aacenc/Android.mk @@ -82,6 +82,8 @@ LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV5E LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV7 endif +LOCAL_CFLAGS += -Werror + include $(BUILD_STATIC_LIBRARY) ################################################################################ @@ -106,6 +108,8 @@ ifeq ($(AAC_LIBRARY), fraunhofer) LOCAL_CFLAGS := + LOCAL_CFLAGS += -Werror + LOCAL_STATIC_LIBRARIES := libFraunhoferAAC LOCAL_SHARED_LIBRARIES := \ @@ -128,6 +132,8 @@ else # visualon LOCAL_CFLAGS := -DOSCL_IMPORT_REF= + LOCAL_CFLAGS += -Werror + LOCAL_STATIC_LIBRARIES := \ libstagefright_aacenc diff --git a/media/libstagefright/codecs/aacenc/basic_op/oper_32b.c b/media/libstagefright/codecs/aacenc/basic_op/oper_32b.c index cc01927..1d029fc 100644 --- a/media/libstagefright/codecs/aacenc/basic_op/oper_32b.c +++ b/media/libstagefright/codecs/aacenc/basic_op/oper_32b.c @@ -24,6 +24,8 @@ #include "basic_op.h" #include "oper_32b.h" +#define UNUSED(x) (void)(x) + /***************************************************************************** * * * Function L_Extract() * @@ -243,6 +245,8 @@ Word16 iLog4(Word32 value) Word32 rsqrt(Word32 value, /*!< Operand to square root (0.0 ... 1) */ Word32 accuracy) /*!< Number of valid bits that will be calculated */ { + UNUSED(accuracy); + Word32 root = 0; Word32 scale; diff --git a/media/libstagefright/codecs/aacenc/src/aacenc.c b/media/libstagefright/codecs/aacenc/src/aacenc.c index d1c8621..40db92c 100644 --- a/media/libstagefright/codecs/aacenc/src/aacenc.c +++ b/media/libstagefright/codecs/aacenc/src/aacenc.c @@ -27,6 +27,8 @@ #include "cmnMemory.h" #include "memalign.h" +#define UNUSED(x) (void)(x) + /** * Init the audio codec module and return codec handle * \param phCodec [OUT] Return the video codec handle @@ -46,6 +48,8 @@ VO_U32 VO_API voAACEncInit(VO_HANDLE * phCodec,VO_AUDIO_CODINGTYPE vType, VO_COD VO_MEM_OPERATOR *pMemOP; int interMem; + UNUSED(vType); + interMem = 0; error = 0; @@ -471,6 +475,10 @@ VO_U32 VO_API voAACEncSetParam(VO_HANDLE hCodec, VO_S32 uParamID, VO_PTR pData) */ VO_U32 VO_API voAACEncGetParam(VO_HANDLE hCodec, VO_S32 uParamID, VO_PTR pData) { + UNUSED(hCodec); + UNUSED(uParamID); + UNUSED(pData); + return VO_ERR_NONE; } diff --git a/media/libstagefright/codecs/aacenc/src/bitenc.c b/media/libstagefright/codecs/aacenc/src/bitenc.c index fcc12dd..d1fd647 100644 --- a/media/libstagefright/codecs/aacenc/src/bitenc.c +++ b/media/libstagefright/codecs/aacenc/src/bitenc.c @@ -26,6 +26,7 @@ #include "qc_data.h" #include "interface.h" +#define UNUSED(x) (void)(x) static const Word16 globalGainOffset = 100; static const Word16 icsReservedBit = 0; @@ -585,6 +586,8 @@ Word16 WriteBitstream (HANDLE_BIT_BUF hBitStream, Word16 elementUsedBits; Word16 frameBits=0; + UNUSED(ancBytes); + /* struct bitbuffer bsWriteCopy; */ bitMarkUp = GetBitsAvail(hBitStream); if(qcOut->qcElement.adtsUsed) /* write adts header*/ diff --git a/media/libstagefright/codecs/aacenc/src/psy_main.c b/media/libstagefright/codecs/aacenc/src/psy_main.c index 4e9218c..6f0679c 100644 --- a/media/libstagefright/codecs/aacenc/src/psy_main.c +++ b/media/libstagefright/codecs/aacenc/src/psy_main.c @@ -38,6 +38,8 @@ #include "tns_func.h" #include "memalign.h" +#define UNUSED(x) (void)(x) + /* long start short stop */ static Word16 blockType2windowShape[] = {KBD_WINDOW,SINE_WINDOW,SINE_WINDOW,KBD_WINDOW}; @@ -170,7 +172,9 @@ Word16 PsyOutNew(PSY_OUT *hPsyOut, VO_MEM_OPERATOR *pMemOP) *****************************************************************************/ Word16 PsyOutDelete(PSY_OUT *hPsyOut, VO_MEM_OPERATOR *pMemOP) { - hPsyOut=NULL; + UNUSED(hPsyOut); + UNUSED(pMemOP); + return 0; } diff --git a/media/libstagefright/codecs/aacenc/src/qc_main.c b/media/libstagefright/codecs/aacenc/src/qc_main.c index 48ff300..e5d78aa 100644 --- a/media/libstagefright/codecs/aacenc/src/qc_main.c +++ b/media/libstagefright/codecs/aacenc/src/qc_main.c @@ -33,6 +33,7 @@ #include "channel_map.h" #include "memalign.h" +#define UNUSED(x) (void)(x) typedef enum{ FRAME_LEN_BYTES_MODULO = 1, @@ -204,11 +205,8 @@ Word16 QCNew(QC_STATE *hQC, VO_MEM_OPERATOR *pMemOP) **********************************************************************************/ void QCDelete(QC_STATE *hQC, VO_MEM_OPERATOR *pMemOP) { - - /* - nothing to do - */ - hQC=NULL; + UNUSED(hQC); + UNUSED(pMemOP); } /********************************************************************************* diff --git a/media/libstagefright/codecs/aacenc/src/tns.c b/media/libstagefright/codecs/aacenc/src/tns.c index 455a864..5172612 100644 --- a/media/libstagefright/codecs/aacenc/src/tns.c +++ b/media/libstagefright/codecs/aacenc/src/tns.c @@ -30,6 +30,8 @@ #include "psy_configuration.h" #include "tns_func.h" +#define UNUSED(x) (void)(x) + #define TNS_MODIFY_BEGIN 2600 /* Hz */ #define RATIO_PATCH_LOWER_BORDER 380 /* Hz */ #define TNS_GAIN_THRESH 141 /* 1.41*100 */ @@ -643,6 +645,8 @@ static Word16 CalcTnsFilter(const Word16 *signal, Word32 i; Word32 tnsOrderPlus1 = tnsOrder + 1; + UNUSED(window); + assert(tnsOrder <= TNS_MAX_ORDER); /* remove asserts later? (btg) */ for(i=0;i<tnsOrder;i++) { diff --git a/media/libstagefright/codecs/amrnb/common/Android.mk b/media/libstagefright/codecs/amrnb/common/Android.mk index 30ce29c..a2b3c8f 100644 --- a/media/libstagefright/codecs/amrnb/common/Android.mk +++ b/media/libstagefright/codecs/amrnb/common/Android.mk @@ -69,6 +69,8 @@ LOCAL_C_INCLUDES := \ LOCAL_CFLAGS := \ -DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF= -DOSCL_EXPORT_REF= +LOCAL_CFLAGS += -Werror + LOCAL_MODULE := libstagefright_amrnb_common include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/codecs/amrnb/dec/Android.mk b/media/libstagefright/codecs/amrnb/dec/Android.mk index 8d6c6f8..b067456 100644 --- a/media/libstagefright/codecs/amrnb/dec/Android.mk +++ b/media/libstagefright/codecs/amrnb/dec/Android.mk @@ -47,6 +47,8 @@ LOCAL_C_INCLUDES := \ LOCAL_CFLAGS := \ -DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF= +LOCAL_CFLAGS += -Werror + LOCAL_MODULE := libstagefright_amrnbdec include $(BUILD_STATIC_LIBRARY) @@ -68,6 +70,8 @@ LOCAL_C_INCLUDES := \ LOCAL_CFLAGS := -DOSCL_IMPORT_REF= +LOCAL_CFLAGS += -Werror + LOCAL_STATIC_LIBRARIES := \ libstagefright_amrnbdec libstagefright_amrwbdec diff --git a/media/libstagefright/codecs/amrnb/enc/Android.mk b/media/libstagefright/codecs/amrnb/enc/Android.mk index f4e467a..afc0b89 100644 --- a/media/libstagefright/codecs/amrnb/enc/Android.mk +++ b/media/libstagefright/codecs/amrnb/enc/Android.mk @@ -69,6 +69,8 @@ LOCAL_C_INCLUDES := \ LOCAL_CFLAGS := \ -DOSCL_UNUSED_ARG= +LOCAL_CFLAGS += -Werror + LOCAL_MODULE := libstagefright_amrnbenc include $(BUILD_STATIC_LIBRARY) @@ -88,6 +90,8 @@ LOCAL_C_INCLUDES := \ $(LOCAL_PATH)/../common/include \ $(LOCAL_PATH)/../common +LOCAL_CFLAGS += -Werror + LOCAL_STATIC_LIBRARIES := \ libstagefright_amrnbenc diff --git a/media/libstagefright/codecs/amrwb/Android.mk b/media/libstagefright/codecs/amrwb/Android.mk index 677107f..efdf988 100644 --- a/media/libstagefright/codecs/amrwb/Android.mk +++ b/media/libstagefright/codecs/amrwb/Android.mk @@ -50,6 +50,8 @@ LOCAL_C_INCLUDES := \ LOCAL_CFLAGS := \ -DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF= +LOCAL_CFLAGS += -Werror + LOCAL_MODULE := libstagefright_amrwbdec include $(BUILD_STATIC_LIBRARY) diff --git a/media/libstagefright/codecs/amrwbenc/Android.mk b/media/libstagefright/codecs/amrwbenc/Android.mk index c5b8e0c..64fe8d1 100644 --- a/media/libstagefright/codecs/amrwbenc/Android.mk +++ b/media/libstagefright/codecs/amrwbenc/Android.mk @@ -112,6 +112,8 @@ LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV5E LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV7 endif +LOCAL_CFLAGS += -Werror + include $(BUILD_STATIC_LIBRARY) ################################################################################ @@ -126,6 +128,8 @@ LOCAL_C_INCLUDES := \ frameworks/av/media/libstagefright/codecs/common/include \ frameworks/native/include/media/openmax +LOCAL_CFLAGS += -Werror + LOCAL_STATIC_LIBRARIES := \ libstagefright_amrwbenc diff --git a/media/libstagefright/codecs/amrwbenc/src/autocorr.c b/media/libstagefright/codecs/amrwbenc/src/autocorr.c index 8c477ca..0b2ea89 100644 --- a/media/libstagefright/codecs/amrwbenc/src/autocorr.c +++ b/media/libstagefright/codecs/amrwbenc/src/autocorr.c @@ -28,6 +28,8 @@ #include "acelp.h" #include "ham_wind.tab" +#define UNUSED(x) (void)(x) + void Autocorr( Word16 x[], /* (i) : Input signal */ Word16 m, /* (i) : LPC order */ @@ -40,6 +42,8 @@ void Autocorr( Word32 L_sum, L_sum1, L_tmp, F_LEN; Word16 *p1,*p2,*p3; const Word16 *p4; + UNUSED(m); + /* Windowing of signal */ p1 = x; p4 = vo_window; diff --git a/media/libstagefright/codecs/amrwbenc/src/convolve.c b/media/libstagefright/codecs/amrwbenc/src/convolve.c index acba532..4c1f7d4 100644 --- a/media/libstagefright/codecs/amrwbenc/src/convolve.c +++ b/media/libstagefright/codecs/amrwbenc/src/convolve.c @@ -25,6 +25,8 @@ #include "typedef.h" #include "basic_op.h" +#define UNUSED(x) (void)(x) + void Convolve ( Word16 x[], /* (i) : input vector */ Word16 h[], /* (i) : impulse response */ @@ -35,6 +37,8 @@ void Convolve ( Word32 i, n; Word16 *tmpH,*tmpX; Word32 s; + UNUSED(L); + for (n = 0; n < 64;) { tmpH = h+n; diff --git a/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c b/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c index 0d66c31..b66b55e 100644 --- a/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c +++ b/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c @@ -31,6 +31,8 @@ #define UP_SAMP 4 #define L_INTERPOL1 4 +#define UNUSED(x) (void)(x) + /* Local functions */ #ifdef ASM_OPT @@ -171,6 +173,7 @@ static void Norm_Corr( Word32 corr, exp_corr, norm, exp, scale; Word16 exp_norm, excf[L_SUBFR], tmp; Word32 L_tmp, L_tmp1, L_tmp2; + UNUSED(L_subfr); /* compute the filtered excitation for the first delay t_min */ k = -t_min; diff --git a/media/libstagefright/codecs/amrwbenc/src/syn_filt.c b/media/libstagefright/codecs/amrwbenc/src/syn_filt.c index 1bda05a..961aadc 100644 --- a/media/libstagefright/codecs/amrwbenc/src/syn_filt.c +++ b/media/libstagefright/codecs/amrwbenc/src/syn_filt.c @@ -26,6 +26,8 @@ #include "math_op.h" #include "cnst.h" +#define UNUSED(x) (void)(x) + void Syn_filt( Word16 a[], /* (i) Q12 : a[m+1] prediction coefficients */ Word16 x[], /* (i) : input signal */ @@ -95,6 +97,8 @@ void Syn_filt_32( Word32 i,a0; Word32 L_tmp, L_tmp1; Word16 *p1, *p2, *p3; + UNUSED(m); + a0 = a[0] >> (4 + Qnew); /* input / 16 and >>Qnew */ /* Do the filtering. */ for (i = 0; i < lg; i++) diff --git a/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c b/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c index ea9da52..df7b9b3 100644 --- a/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c +++ b/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c @@ -39,6 +39,8 @@ #include "mem_align.h" #include "cmnMemory.h" +#define UNUSED(x) (void)(x) + #ifdef __cplusplus extern "C" { #endif @@ -1602,6 +1604,8 @@ VO_U32 VO_API voAMRWB_Init(VO_HANDLE * phCodec, /* o: the audi VO_MEM_OPERATOR voMemoprator; #endif VO_MEM_OPERATOR *pMemOP; + UNUSED(vType); + int interMem = 0; if(pUserData == NULL || pUserData->memflag != VO_IMF_USERMEMOPERATOR || pUserData->memData == NULL ) diff --git a/media/libstagefright/codecs/avc/common/Android.mk b/media/libstagefright/codecs/avc/common/Android.mk index 22dee15..844ef0a 100644 --- a/media/libstagefright/codecs/avc/common/Android.mk +++ b/media/libstagefright/codecs/avc/common/Android.mk @@ -16,4 +16,6 @@ LOCAL_C_INCLUDES := \ $(LOCAL_PATH)/src \ $(LOCAL_PATH)/include +LOCAL_CFLAGS += -Werror + include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/codecs/avc/enc/Android.mk b/media/libstagefright/codecs/avc/enc/Android.mk index 7d17c2a..537ba42 100644 --- a/media/libstagefright/codecs/avc/enc/Android.mk +++ b/media/libstagefright/codecs/avc/enc/Android.mk @@ -30,6 +30,8 @@ LOCAL_C_INCLUDES := \ LOCAL_CFLAGS := \ -DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_EXPORT_REF= +LOCAL_CFLAGS += -Werror + include $(BUILD_STATIC_LIBRARY) ################################################################################ @@ -69,4 +71,6 @@ LOCAL_SHARED_LIBRARIES := \ LOCAL_MODULE := libstagefright_soft_h264enc LOCAL_MODULE_TAGS := optional +LOCAL_CFLAGS += -Werror + include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp index 89f0fed..0f4a00d 100644 --- a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp +++ b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp @@ -34,6 +34,12 @@ #include "SoftAVCEncoder.h" +#if LOG_NDEBUG +#define UNUSED_UNLESS_VERBOSE(x) (void)(x) +#else +#define UNUSED_UNLESS_VERBOSE(x) +#endif + namespace android { template<class T> @@ -136,14 +142,14 @@ inline static void ConvertYUV420SemiPlanarToYUV420Planar( } static void* MallocWrapper( - void *userData, int32_t size, int32_t attrs) { + void * /* userData */, int32_t size, int32_t /* attrs */) { void *ptr = malloc(size); if (ptr) memset(ptr, 0, size); return ptr; } -static void FreeWrapper(void *userData, void* ptr) { +static void FreeWrapper(void * /* userData */, void* ptr) { free(ptr); } @@ -722,7 +728,7 @@ OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter( } } -void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) { +void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) { if (mSignalledError || mSawInputEOS) { return; } @@ -795,7 +801,7 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) { } } - buffer_handle_t srcBuffer; // for MetaDataMode only + buffer_handle_t srcBuffer = NULL; // for MetaDataMode only // Get next input video frame if (mReadyForNextFrame) { @@ -964,6 +970,7 @@ int32_t SoftAVCEncoder::bindOutputBuffer(int32_t index, uint8_t **yuv) { } void SoftAVCEncoder::signalBufferReturned(MediaBuffer *buffer) { + UNUSED_UNLESS_VERBOSE(buffer); ALOGV("signalBufferReturned: %p", buffer); } diff --git a/media/libstagefright/codecs/common/Android.mk b/media/libstagefright/codecs/common/Android.mk index a33cb92..b0010ff 100644 --- a/media/libstagefright/codecs/common/Android.mk +++ b/media/libstagefright/codecs/common/Android.mk @@ -14,6 +14,8 @@ LOCAL_STATIC_LIBRARIES := LOCAL_C_INCLUDES := \ $(LOCAL_PATH)/include +LOCAL_CFLAGS += -Werror + include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/codecs/flac/enc/Android.mk b/media/libstagefright/codecs/flac/enc/Android.mk index f01d605..59a11de 100644 --- a/media/libstagefright/codecs/flac/enc/Android.mk +++ b/media/libstagefright/codecs/flac/enc/Android.mk @@ -9,6 +9,8 @@ LOCAL_C_INCLUDES := \ frameworks/native/include/media/openmax \ external/flac/include +LOCAL_CFLAGS += -Werror + LOCAL_SHARED_LIBRARIES := \ libstagefright libstagefright_omx libstagefright_foundation libutils liblog diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp index d797197..1301060 100644 --- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp +++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp @@ -27,6 +27,12 @@ #define FLAC_COMPRESSION_LEVEL_DEFAULT 5 #define FLAC_COMPRESSION_LEVEL_MAX 8 +#if LOG_NDEBUG +#define UNUSED_UNLESS_VERBOSE(x) (void)(x) +#else +#define UNUSED_UNLESS_VERBOSE(x) +#endif + namespace android { template<class T> @@ -257,7 +263,7 @@ OMX_ERRORTYPE SoftFlacEncoder::internalSetParameter( } void SoftFlacEncoder::onQueueFilled(OMX_U32 portIndex) { - //UNUSED_UNLESS_VERBOSE(portIndex); + UNUSED_UNLESS_VERBOSE(portIndex); ALOGV("SoftFlacEncoder::onQueueFilled(portIndex=%d)", portIndex); if (mSignalledError) { @@ -343,16 +349,17 @@ void SoftFlacEncoder::onQueueFilled(OMX_U32 portIndex) { } } - FLAC__StreamEncoderWriteStatus SoftFlacEncoder::onEncodedFlacAvailable( const FLAC__byte buffer[], - size_t bytes, unsigned samples, unsigned current_frame) { - ALOGV("SoftFlacEncoder::onEncodedFlacAvailable(bytes=%d, samples=%d, curr_frame=%d)", + size_t bytes, unsigned samples, + unsigned current_frame) { + UNUSED_UNLESS_VERBOSE(current_frame); + ALOGV("SoftFlacEncoder::onEncodedFlacAvailable(bytes=%zu, samples=%u, curr_frame=%u)", bytes, samples, current_frame); #ifdef WRITE_FLAC_HEADER_IN_FIRST_BUFFER if (samples == 0) { - ALOGI(" saving %d bytes of header", bytes); + ALOGI(" saving %zu bytes of header", bytes); memcpy(mHeader + mHeaderOffset, buffer, bytes); mHeaderOffset += bytes;// will contain header size when finished receiving header return FLAC__STREAM_ENCODER_WRITE_STATUS_OK; @@ -444,8 +451,12 @@ return_result: // static FLAC__StreamEncoderWriteStatus SoftFlacEncoder::flacEncoderWriteCallback( - const FLAC__StreamEncoder *encoder, const FLAC__byte buffer[], - size_t bytes, unsigned samples, unsigned current_frame, void *client_data) { + const FLAC__StreamEncoder * /* encoder */, + const FLAC__byte buffer[], + size_t bytes, + unsigned samples, + unsigned current_frame, + void *client_data) { return ((SoftFlacEncoder*) client_data)->onEncodedFlacAvailable( buffer, bytes, samples, current_frame); } diff --git a/media/libstagefright/codecs/g711/dec/Android.mk b/media/libstagefright/codecs/g711/dec/Android.mk index 4c80da6..a0112e1 100644 --- a/media/libstagefright/codecs/g711/dec/Android.mk +++ b/media/libstagefright/codecs/g711/dec/Android.mk @@ -14,4 +14,6 @@ LOCAL_SHARED_LIBRARIES := \ LOCAL_MODULE := libstagefright_soft_g711dec LOCAL_MODULE_TAGS := optional +LOCAL_CFLAGS += -Werror + include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/codecs/gsm/dec/Android.mk b/media/libstagefright/codecs/gsm/dec/Android.mk index 71613d2..30868d5 100644 --- a/media/libstagefright/codecs/gsm/dec/Android.mk +++ b/media/libstagefright/codecs/gsm/dec/Android.mk @@ -9,6 +9,8 @@ LOCAL_C_INCLUDES := \ frameworks/native/include/media/openmax \ external/libgsm/inc +LOCAL_CFLAGS += -Werror + LOCAL_SHARED_LIBRARIES := \ libstagefright libstagefright_omx libstagefright_foundation libutils liblog diff --git a/media/libstagefright/codecs/m4v_h263/dec/Android.mk b/media/libstagefright/codecs/m4v_h263/dec/Android.mk index a3d5779..1d232c6 100644 --- a/media/libstagefright/codecs/m4v_h263/dec/Android.mk +++ b/media/libstagefright/codecs/m4v_h263/dec/Android.mk @@ -46,6 +46,8 @@ LOCAL_C_INCLUDES := \ LOCAL_CFLAGS := -DOSCL_EXPORT_REF= -DOSCL_IMPORT_REF= +LOCAL_CFLAGS += -Werror + include $(BUILD_STATIC_LIBRARY) ################################################################################ @@ -72,4 +74,6 @@ LOCAL_SHARED_LIBRARIES := \ LOCAL_MODULE := libstagefright_soft_mpeg4dec LOCAL_MODULE_TAGS := optional +LOCAL_CFLAGS += -Werror + include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/codecs/m4v_h263/enc/Android.mk b/media/libstagefright/codecs/m4v_h263/enc/Android.mk index 83a2dd2..c9006d9 100644 --- a/media/libstagefright/codecs/m4v_h263/enc/Android.mk +++ b/media/libstagefright/codecs/m4v_h263/enc/Android.mk @@ -33,6 +33,8 @@ LOCAL_C_INCLUDES := \ $(TOP)/frameworks/av/media/libstagefright/include \ $(TOP)/frameworks/native/include/media/openmax +LOCAL_CFLAGS += -Werror + include $(BUILD_STATIC_LIBRARY) ################################################################################ @@ -72,4 +74,6 @@ LOCAL_SHARED_LIBRARIES := \ LOCAL_MODULE := libstagefright_soft_mpeg4enc LOCAL_MODULE_TAGS := optional +LOCAL_CFLAGS += -Werror + include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp index da5b785..e25709d 100644 --- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp +++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp @@ -679,7 +679,7 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) { mSawInputEOS = true; } - buffer_handle_t srcBuffer; // for MetaDataMode only + buffer_handle_t srcBuffer = NULL; // for MetaDataMode only if (inHeader->nFilledLen > 0) { uint8_t *inputData = NULL; if (mStoreMetaDataInBuffers) { diff --git a/media/libstagefright/codecs/mp3dec/Android.mk b/media/libstagefright/codecs/mp3dec/Android.mk index 135c715..8284490 100644 --- a/media/libstagefright/codecs/mp3dec/Android.mk +++ b/media/libstagefright/codecs/mp3dec/Android.mk @@ -50,6 +50,8 @@ LOCAL_C_INCLUDES := \ LOCAL_CFLAGS := \ -DOSCL_UNUSED_ARG= +LOCAL_CFLAGS += -Werror + LOCAL_MODULE := libstagefright_mp3dec LOCAL_ARM_MODE := arm @@ -69,6 +71,8 @@ LOCAL_C_INCLUDES := \ $(LOCAL_PATH)/src \ $(LOCAL_PATH)/include +LOCAL_CFLAGS += -Werror + LOCAL_SHARED_LIBRARIES := \ libstagefright libstagefright_omx libstagefright_foundation libutils liblog diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp index 4d864df..5396022 100644 --- a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp +++ b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp @@ -146,6 +146,23 @@ OMX_ERRORTYPE SoftMP3::internalGetParameter( return OMX_ErrorNone; } + case OMX_IndexParamAudioMp3: + { + OMX_AUDIO_PARAM_MP3TYPE *mp3Params = + (OMX_AUDIO_PARAM_MP3TYPE *)params; + + if (mp3Params->nPortIndex > 1) { + return OMX_ErrorUndefined; + } + + mp3Params->nChannels = mNumChannels; + mp3Params->nBitRate = 0 /* unknown */; + mp3Params->nSampleRate = mSamplingRate; + // other fields are encoder-only + + return OMX_ErrorNone; + } + default: return SimpleSoftOMXComponent::internalGetParameter(index, params); } @@ -335,6 +352,9 @@ void SoftMP3::onPortFlushCompleted(OMX_U32 portIndex) { // depend on fragments from the last one decoded. pvmp3_InitDecoder(mConfig, mDecoderBuf); mIsFirst = true; + mSignalledError = false; + mSawInputEos = false; + mSignalledOutputEos = false; } } diff --git a/media/libstagefright/codecs/on2/dec/Android.mk b/media/libstagefright/codecs/on2/dec/Android.mk index 7f2c46d..93ff64c 100644 --- a/media/libstagefright/codecs/on2/dec/Android.mk +++ b/media/libstagefright/codecs/on2/dec/Android.mk @@ -20,4 +20,6 @@ LOCAL_SHARED_LIBRARIES := \ LOCAL_MODULE := libstagefright_soft_vpxdec LOCAL_MODULE_TAGS := optional +LOCAL_CFLAGS += -Werror + include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp index 5efe022..dc38ea8 100644 --- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp @@ -27,7 +27,6 @@ namespace android { - template<class T> static void InitOMXParams(T *params) { params->nSize = sizeof(T); @@ -141,17 +140,27 @@ SoftVPXEncoder::SoftVPXEncoder(const char *name, mWidth(176), mHeight(144), mBitrate(192000), // in bps + mFramerate(30 << 16), // in Q16 format mBitrateUpdated(false), mBitrateControlMode(VPX_VBR), // variable bitrate - mFrameDurationUs(33333), // Defaults to 30 fps mDCTPartitions(0), mErrorResilience(OMX_FALSE), mColorFormat(OMX_COLOR_FormatYUV420Planar), mLevel(OMX_VIDEO_VP8Level_Version0), + mKeyFrameInterval(0), + mMinQuantizer(0), + mMaxQuantizer(0), + mTemporalLayers(0), + mTemporalPatternType(OMX_VIDEO_VPXTemporalLayerPatternNone), + mTemporalPatternLength(0), + mTemporalPatternIdx(0), + mLastTimestamp(0x7FFFFFFFFFFFFFFFLL), mConversionBuffer(NULL), mInputDataIsMeta(false), mGrallocModule(NULL), mKeyFrameRequested(false) { + memset(mTemporalLayerBitrateRatio, 0, sizeof(mTemporalLayerBitrateRatio)); + mTemporalLayerBitrateRatio[0] = 100; initPorts(); } @@ -180,9 +189,8 @@ void SoftVPXEncoder::initPorts() { inputPort.format.video.nStride = inputPort.format.video.nFrameWidth; inputPort.format.video.nSliceHeight = inputPort.format.video.nFrameHeight; inputPort.format.video.nBitrate = 0; - // frameRate is reciprocal of frameDuration, which is - // in microseconds. It is also in Q16 format. - inputPort.format.video.xFramerate = (1000000/mFrameDurationUs) << 16; + // frameRate is in Q16 format. + inputPort.format.video.xFramerate = mFramerate; inputPort.format.video.bFlagErrorConcealment = OMX_FALSE; inputPort.nPortIndex = kInputPortIndex; inputPort.eDir = OMX_DirInput; @@ -220,7 +228,7 @@ void SoftVPXEncoder::initPorts() { outputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingVP8; outputPort.format.video.eColorFormat = OMX_COLOR_FormatUnused; outputPort.format.video.pNativeWindow = NULL; - outputPort.nBufferSize = 256 * 1024; // arbitrary + outputPort.nBufferSize = 1024 * 1024; // arbitrary addPort(outputPort); } @@ -236,7 +244,9 @@ status_t SoftVPXEncoder::initEncoder() { if (mCodecInterface == NULL) { return UNKNOWN_ERROR; } - + ALOGD("VP8: initEncoder. BRMode: %u. TSLayers: %zu. KF: %u. QP: %u - %u", + (uint32_t)mBitrateControlMode, mTemporalLayers, mKeyFrameInterval, + mMinQuantizer, mMaxQuantizer); codec_return = vpx_codec_enc_config_default(mCodecInterface, mCodecConfiguration, 0); // Codec specific flags @@ -277,8 +287,120 @@ status_t SoftVPXEncoder::initEncoder() { mCodecConfiguration->g_timebase.num = 1; mCodecConfiguration->g_timebase.den = 1000000; // rc_target_bitrate is in kbps, mBitrate in bps - mCodecConfiguration->rc_target_bitrate = mBitrate/1000; + mCodecConfiguration->rc_target_bitrate = (mBitrate + 500) / 1000; mCodecConfiguration->rc_end_usage = mBitrateControlMode; + // Disable frame drop - not allowed in MediaCodec now. + mCodecConfiguration->rc_dropframe_thresh = 0; + if (mBitrateControlMode == VPX_CBR) { + // Disable spatial resizing. + mCodecConfiguration->rc_resize_allowed = 0; + // Single-pass mode. + mCodecConfiguration->g_pass = VPX_RC_ONE_PASS; + // Maximum amount of bits that can be subtracted from the target + // bitrate - expressed as percentage of the target bitrate. + mCodecConfiguration->rc_undershoot_pct = 100; + // Maximum amount of bits that can be added to the target + // bitrate - expressed as percentage of the target bitrate. + mCodecConfiguration->rc_overshoot_pct = 15; + // Initial value of the buffer level in ms. + mCodecConfiguration->rc_buf_initial_sz = 500; + // Amount of data that the encoder should try to maintain in ms. + mCodecConfiguration->rc_buf_optimal_sz = 600; + // The amount of data that may be buffered by the decoding + // application in ms. + mCodecConfiguration->rc_buf_sz = 1000; + // Enable error resilience - needed for packet loss. + mCodecConfiguration->g_error_resilient = 1; + // Disable lagged encoding. + mCodecConfiguration->g_lag_in_frames = 0; + // Maximum key frame interval - for CBR boost to 3000 + mCodecConfiguration->kf_max_dist = 3000; + // Encoder determines optimal key frame placement automatically. + mCodecConfiguration->kf_mode = VPX_KF_AUTO; + } + + // Frames temporal pattern - for now WebRTC like pattern is only supported. + switch (mTemporalLayers) { + case 0: + { + mTemporalPatternLength = 0; + break; + } + case 1: + { + mCodecConfiguration->ts_number_layers = 1; + mCodecConfiguration->ts_rate_decimator[0] = 1; + mCodecConfiguration->ts_periodicity = 1; + mCodecConfiguration->ts_layer_id[0] = 0; + mTemporalPattern[0] = kTemporalUpdateLastRefAll; + mTemporalPatternLength = 1; + break; + } + case 2: + { + mCodecConfiguration->ts_number_layers = 2; + mCodecConfiguration->ts_rate_decimator[0] = 2; + mCodecConfiguration->ts_rate_decimator[1] = 1; + mCodecConfiguration->ts_periodicity = 2; + mCodecConfiguration->ts_layer_id[0] = 0; + mCodecConfiguration->ts_layer_id[1] = 1; + mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef; + mTemporalPattern[1] = kTemporalUpdateGoldenWithoutDependencyRefAltRef; + mTemporalPattern[2] = kTemporalUpdateLastRefAltRef; + mTemporalPattern[3] = kTemporalUpdateGoldenRefAltRef; + mTemporalPattern[4] = kTemporalUpdateLastRefAltRef; + mTemporalPattern[5] = kTemporalUpdateGoldenRefAltRef; + mTemporalPattern[6] = kTemporalUpdateLastRefAltRef; + mTemporalPattern[7] = kTemporalUpdateNone; + mTemporalPatternLength = 8; + break; + } + case 3: + { + mCodecConfiguration->ts_number_layers = 3; + mCodecConfiguration->ts_rate_decimator[0] = 4; + mCodecConfiguration->ts_rate_decimator[1] = 2; + mCodecConfiguration->ts_rate_decimator[2] = 1; + mCodecConfiguration->ts_periodicity = 4; + mCodecConfiguration->ts_layer_id[0] = 0; + mCodecConfiguration->ts_layer_id[1] = 2; + mCodecConfiguration->ts_layer_id[2] = 1; + mCodecConfiguration->ts_layer_id[3] = 2; + mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef; + mTemporalPattern[1] = kTemporalUpdateNoneNoRefGoldenRefAltRef; + mTemporalPattern[2] = kTemporalUpdateGoldenWithoutDependencyRefAltRef; + mTemporalPattern[3] = kTemporalUpdateNone; + mTemporalPattern[4] = kTemporalUpdateLastRefAltRef; + mTemporalPattern[5] = kTemporalUpdateNone; + mTemporalPattern[6] = kTemporalUpdateGoldenRefAltRef; + mTemporalPattern[7] = kTemporalUpdateNone; + mTemporalPatternLength = 8; + break; + } + default: + { + ALOGE("Wrong number of temporal layers %u", mTemporalLayers); + return UNKNOWN_ERROR; + } + } + + // Set bitrate values for each layer + for (size_t i = 0; i < mCodecConfiguration->ts_number_layers; i++) { + mCodecConfiguration->ts_target_bitrate[i] = + mCodecConfiguration->rc_target_bitrate * + mTemporalLayerBitrateRatio[i] / 100; + } + if (mKeyFrameInterval > 0) { + mCodecConfiguration->kf_max_dist = mKeyFrameInterval; + mCodecConfiguration->kf_min_dist = mKeyFrameInterval; + mCodecConfiguration->kf_mode = VPX_KF_AUTO; + } + if (mMinQuantizer > 0) { + mCodecConfiguration->rc_min_quantizer = mMinQuantizer; + } + if (mMaxQuantizer > 0) { + mCodecConfiguration->rc_max_quantizer = mMaxQuantizer; + } codec_return = vpx_codec_enc_init(mCodecContext, mCodecInterface, @@ -298,6 +420,33 @@ status_t SoftVPXEncoder::initEncoder() { return UNKNOWN_ERROR; } + // Extra CBR settings + if (mBitrateControlMode == VPX_CBR) { + codec_return = vpx_codec_control(mCodecContext, + VP8E_SET_STATIC_THRESHOLD, + 1); + if (codec_return == VPX_CODEC_OK) { + uint32_t rc_max_intra_target = + mCodecConfiguration->rc_buf_optimal_sz * (mFramerate >> 17) / 10; + // Don't go below 3 times per frame bandwidth. + if (rc_max_intra_target < 300) { + rc_max_intra_target = 300; + } + codec_return = vpx_codec_control(mCodecContext, + VP8E_SET_MAX_INTRA_BITRATE_PCT, + rc_max_intra_target); + } + if (codec_return == VPX_CODEC_OK) { + codec_return = vpx_codec_control(mCodecContext, + VP8E_SET_CPUUSED, + -8); + } + if (codec_return != VPX_CODEC_OK) { + ALOGE("Error setting cbr parameters for vpx encoder."); + return UNKNOWN_ERROR; + } + } + if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || mInputDataIsMeta) { if (mConversionBuffer == NULL) { mConversionBuffer = (uint8_t *)malloc(mWidth * mHeight * 3 / 2); @@ -361,9 +510,7 @@ OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index, } formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused; - // Converting from microseconds - // Also converting to Q16 format - formatParams->xFramerate = (1000000/mFrameDurationUs) << 16; + formatParams->xFramerate = mFramerate; return OMX_ErrorNone; } else if (formatParams->nPortIndex == kOutputPortIndex) { formatParams->eCompressionFormat = OMX_VIDEO_CodingVP8; @@ -411,6 +558,24 @@ OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index, return OMX_ErrorNone; } + case OMX_IndexParamVideoAndroidVp8Encoder: { + OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vp8AndroidParams = + (OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *)param; + + if (vp8AndroidParams->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + vp8AndroidParams->nKeyFrameInterval = mKeyFrameInterval; + vp8AndroidParams->eTemporalPattern = mTemporalPatternType; + vp8AndroidParams->nTemporalLayerCount = mTemporalLayers; + vp8AndroidParams->nMinQuantizer = mMinQuantizer; + vp8AndroidParams->nMaxQuantizer = mMaxQuantizer; + memcpy(vp8AndroidParams->nTemporalLayerBitrateRatio, + mTemporalLayerBitrateRatio, sizeof(mTemporalLayerBitrateRatio)); + return OMX_ErrorNone; + } + case OMX_IndexParamVideoProfileLevelQuerySupported: { OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel = (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param; @@ -497,11 +662,15 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetParameter(OMX_INDEXTYPE index, return internalSetVp8Params( (const OMX_VIDEO_PARAM_VP8TYPE *)param); + case OMX_IndexParamVideoAndroidVp8Encoder: + return internalSetAndroidVp8Params( + (const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *)param); + case OMX_IndexParamVideoProfileLevelCurrent: return internalSetProfileLevel( (const OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param); - case OMX_IndexVendorStartUnused: + case kStoreMetaDataExtensionIndex: { // storeMetaDataInBuffers const StoreMetaDataInBuffersParams *storeParam = @@ -610,6 +779,50 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetVp8Params( return OMX_ErrorNone; } +OMX_ERRORTYPE SoftVPXEncoder::internalSetAndroidVp8Params( + const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE* vp8AndroidParams) { + if (vp8AndroidParams->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + if (vp8AndroidParams->eTemporalPattern != OMX_VIDEO_VPXTemporalLayerPatternNone && + vp8AndroidParams->eTemporalPattern != OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { + return OMX_ErrorBadParameter; + } + if (vp8AndroidParams->nTemporalLayerCount > OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS) { + return OMX_ErrorBadParameter; + } + if (vp8AndroidParams->nMinQuantizer > vp8AndroidParams->nMaxQuantizer) { + return OMX_ErrorBadParameter; + } + + mTemporalPatternType = vp8AndroidParams->eTemporalPattern; + if (vp8AndroidParams->eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { + mTemporalLayers = vp8AndroidParams->nTemporalLayerCount; + } else if (vp8AndroidParams->eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternNone) { + mTemporalLayers = 0; + } + // Check the bitrate distribution between layers is in increasing order + if (mTemporalLayers > 1) { + for (size_t i = 0; i < mTemporalLayers - 1; i++) { + if (vp8AndroidParams->nTemporalLayerBitrateRatio[i + 1] <= + vp8AndroidParams->nTemporalLayerBitrateRatio[i]) { + ALOGE("Wrong bitrate ratio - should be in increasing order."); + return OMX_ErrorBadParameter; + } + } + } + mKeyFrameInterval = vp8AndroidParams->nKeyFrameInterval; + mMinQuantizer = vp8AndroidParams->nMinQuantizer; + mMaxQuantizer = vp8AndroidParams->nMaxQuantizer; + memcpy(mTemporalLayerBitrateRatio, vp8AndroidParams->nTemporalLayerBitrateRatio, + sizeof(mTemporalLayerBitrateRatio)); + ALOGD("VP8: internalSetAndroidVp8Params. BRMode: %u. TS: %zu. KF: %u." + " QP: %u - %u BR0: %u. BR1: %u. BR2: %u", + (uint32_t)mBitrateControlMode, mTemporalLayers, mKeyFrameInterval, + mMinQuantizer, mMaxQuantizer, mTemporalLayerBitrateRatio[0], + mTemporalLayerBitrateRatio[1], mTemporalLayerBitrateRatio[2]); + return OMX_ErrorNone; +} OMX_ERRORTYPE SoftVPXEncoder::internalSetFormatParams( const OMX_VIDEO_PARAM_PORTFORMATTYPE* format) { @@ -660,9 +873,7 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams( mHeight = port->format.video.nFrameHeight; // xFramerate comes in Q16 format, in frames per second unit - const uint32_t framerate = port->format.video.xFramerate >> 16; - // frame duration is in microseconds - mFrameDurationUs = (1000000/framerate); + mFramerate = port->format.video.xFramerate; if (port->format.video.eColorFormat == OMX_COLOR_FormatYUV420Planar || port->format.video.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || @@ -675,7 +886,7 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams( OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef; def->format.video.nFrameWidth = mWidth; def->format.video.nFrameHeight = mHeight; - def->format.video.xFramerate = port->format.video.xFramerate; + def->format.video.xFramerate = mFramerate; def->format.video.eColorFormat = mColorFormat; def = &editPortInfo(kOutputPortIndex)->mDef; def->format.video.nFrameWidth = mWidth; @@ -684,6 +895,13 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams( return OMX_ErrorNone; } else if (port->nPortIndex == kOutputPortIndex) { mBitrate = port->format.video.nBitrate; + mWidth = port->format.video.nFrameWidth; + mHeight = port->format.video.nFrameHeight; + + OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kOutputPortIndex)->mDef; + def->format.video.nFrameWidth = mWidth; + def->format.video.nFrameHeight = mHeight; + def->format.video.nBitrate = mBitrate; return OMX_ErrorNone; } else { return OMX_ErrorBadPortIndex; @@ -710,6 +928,74 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetBitrateParams( return OMX_ErrorNone; } +vpx_enc_frame_flags_t SoftVPXEncoder::getEncodeFlags() { + vpx_enc_frame_flags_t flags = 0; + int patternIdx = mTemporalPatternIdx % mTemporalPatternLength; + mTemporalPatternIdx++; + switch (mTemporalPattern[patternIdx]) { + case kTemporalUpdateLast: + flags |= VP8_EFLAG_NO_UPD_GF; + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_REF_GF; + flags |= VP8_EFLAG_NO_REF_ARF; + break; + case kTemporalUpdateGoldenWithoutDependency: + flags |= VP8_EFLAG_NO_REF_GF; + // Deliberately no break here. + case kTemporalUpdateGolden: + flags |= VP8_EFLAG_NO_REF_ARF; + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_UPD_LAST; + break; + case kTemporalUpdateAltrefWithoutDependency: + flags |= VP8_EFLAG_NO_REF_ARF; + flags |= VP8_EFLAG_NO_REF_GF; + // Deliberately no break here. + case kTemporalUpdateAltref: + flags |= VP8_EFLAG_NO_UPD_GF; + flags |= VP8_EFLAG_NO_UPD_LAST; + break; + case kTemporalUpdateNoneNoRefAltref: + flags |= VP8_EFLAG_NO_REF_ARF; + // Deliberately no break here. + case kTemporalUpdateNone: + flags |= VP8_EFLAG_NO_UPD_GF; + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_UPD_LAST; + flags |= VP8_EFLAG_NO_UPD_ENTROPY; + break; + case kTemporalUpdateNoneNoRefGoldenRefAltRef: + flags |= VP8_EFLAG_NO_REF_GF; + flags |= VP8_EFLAG_NO_UPD_GF; + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_UPD_LAST; + flags |= VP8_EFLAG_NO_UPD_ENTROPY; + break; + case kTemporalUpdateGoldenWithoutDependencyRefAltRef: + flags |= VP8_EFLAG_NO_REF_GF; + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_UPD_LAST; + break; + case kTemporalUpdateLastRefAltRef: + flags |= VP8_EFLAG_NO_UPD_GF; + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_REF_GF; + break; + case kTemporalUpdateGoldenRefAltRef: + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_UPD_LAST; + break; + case kTemporalUpdateLastAndGoldenRefAltRef: + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_REF_GF; + break; + case kTemporalUpdateLastRefAll: + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_UPD_GF; + break; + } + return flags; +} void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) { // Initialize encoder if not already @@ -794,6 +1080,9 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) { kInputBufferAlignment, source); vpx_enc_frame_flags_t flags = 0; + if (mTemporalPatternLength > 0) { + flags = getEncodeFlags(); + } if (mKeyFrameRequested) { flags |= VPX_EFLAG_FORCE_KF; mKeyFrameRequested = false; @@ -814,11 +1103,18 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) { mBitrateUpdated = false; } + uint32_t frameDuration; + if (inputBufferHeader->nTimeStamp > mLastTimestamp) { + frameDuration = (uint32_t)(inputBufferHeader->nTimeStamp - mLastTimestamp); + } else { + frameDuration = (uint32_t)(((uint64_t)1000000 << 16) / mFramerate); + } + mLastTimestamp = inputBufferHeader->nTimeStamp; codec_return = vpx_codec_encode( mCodecContext, &raw_frame, inputBufferHeader->nTimeStamp, // in timebase units - mFrameDurationUs, // frame duration in timebase units + frameDuration, // frame duration in timebase units flags, // frame flags VPX_DL_REALTIME); // encoding deadline if (codec_return != VPX_CODEC_OK) { @@ -860,10 +1156,9 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) { OMX_ERRORTYPE SoftVPXEncoder::getExtensionIndex( const char *name, OMX_INDEXTYPE *index) { if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) { - *index = OMX_IndexVendorStartUnused; + *(int32_t*)index = kStoreMetaDataExtensionIndex; return OMX_ErrorNone; } - return SimpleSoftOMXComponent::getExtensionIndex(name, index); } diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h index 076830f..c5a83d1 100644 --- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h @@ -91,6 +91,47 @@ protected: const char *name, OMX_INDEXTYPE *index); private: + enum { + kStoreMetaDataExtensionIndex = OMX_IndexVendorStartUnused + 1, + }; + + enum TemporalReferences { + // For 1 layer case: reference all (last, golden, and alt ref), but only + // update last. + kTemporalUpdateLastRefAll = 12, + // First base layer frame for 3 temporal layers, which updates last and + // golden with alt ref dependency. + kTemporalUpdateLastAndGoldenRefAltRef = 11, + // First enhancement layer with alt ref dependency. + kTemporalUpdateGoldenRefAltRef = 10, + // First enhancement layer with alt ref dependency. + kTemporalUpdateGoldenWithoutDependencyRefAltRef = 9, + // Base layer with alt ref dependency. + kTemporalUpdateLastRefAltRef = 8, + // Highest enhacement layer without dependency on golden with alt ref + // dependency. + kTemporalUpdateNoneNoRefGoldenRefAltRef = 7, + // Second layer and last frame in cycle, for 2 layers. + kTemporalUpdateNoneNoRefAltref = 6, + // Highest enhancement layer. + kTemporalUpdateNone = 5, + // Second enhancement layer. + kTemporalUpdateAltref = 4, + // Second enhancement layer without dependency on previous frames in + // the second enhancement layer. + kTemporalUpdateAltrefWithoutDependency = 3, + // First enhancement layer. + kTemporalUpdateGolden = 2, + // First enhancement layer without dependency on previous frames in + // the first enhancement layer. + kTemporalUpdateGoldenWithoutDependency = 1, + // Base layer. + kTemporalUpdateLast = 0, + }; + enum { + kMaxTemporalPattern = 8 + }; + // number of buffers allocated per port static const uint32_t kNumBuffers = 4; @@ -130,16 +171,15 @@ private: // Target bitrate set for the encoder, in bits per second. uint32_t mBitrate; + // Target framerate set for the encoder. + uint32_t mFramerate; + // If a request for a change it bitrate has been received. bool mBitrateUpdated; // Bitrate control mode, either constant or variable vpx_rc_mode mBitrateControlMode; - // Frame duration is the reciprocal of framerate, denoted - // in microseconds - uint64_t mFrameDurationUs; - // vp8 specific configuration parameter // that enables token partitioning of // the stream into substreams @@ -160,6 +200,36 @@ private: // something else. OMX_VIDEO_VP8LEVELTYPE mLevel; + // Key frame interval in frames + uint32_t mKeyFrameInterval; + + // Minimum (best quality) quantizer + uint32_t mMinQuantizer; + + // Maximum (worst quality) quantizer + uint32_t mMaxQuantizer; + + // Number of coding temporal layers to be used. + size_t mTemporalLayers; + + // Temporal layer bitrare ratio in percentage + uint32_t mTemporalLayerBitrateRatio[OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS]; + + // Temporal pattern type + OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE mTemporalPatternType; + + // Temporal pattern length + size_t mTemporalPatternLength; + + // Temporal pattern current index + size_t mTemporalPatternIdx; + + // Frame type temporal pattern + TemporalReferences mTemporalPattern[kMaxTemporalPattern]; + + // Last input buffer timestamp + OMX_TICKS mLastTimestamp; + // Conversion buffer is needed to convert semi // planar yuv420 to planar format // It is only allocated if input format is @@ -185,6 +255,9 @@ private: // dtor. status_t releaseEncoder(); + // Get current encode flags + vpx_enc_frame_flags_t getEncodeFlags(); + // Handles port changes with respect to color formats OMX_ERRORTYPE internalSetFormatParams( const OMX_VIDEO_PARAM_PORTFORMATTYPE* format); @@ -206,6 +279,10 @@ private: OMX_ERRORTYPE internalSetVp8Params( const OMX_VIDEO_PARAM_VP8TYPE* vp8Params); + // Handles Android vp8 specific parameters. + OMX_ERRORTYPE internalSetAndroidVp8Params( + const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE* vp8AndroidParams); + // Updates encoder profile OMX_ERRORTYPE internalSetProfileLevel( const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel); diff --git a/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c b/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c index 2bb4c4d..524a3f0 100644 --- a/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c +++ b/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c @@ -42,6 +42,8 @@ #include "h264bsd_decoder.h" #include "h264bsd_util.h" +#define UNUSED(x) (void)(x) + /*------------------------------------------------------------------------------ Version Information ------------------------------------------------------------------------------*/ @@ -73,6 +75,7 @@ H264DEC_EVALUATION Compile evaluation version, restricts number of frames #endif void H264SwDecTrace(char *string) { + UNUSED(string); } void* H264SwDecMalloc(u32 size) { diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c index c948776..b409a06 100755 --- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c +++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c @@ -42,6 +42,8 @@ #include "armVC.h" #endif /* H264DEC_OMXDL */ +#define UNUSED(x) (void)(x) + /*------------------------------------------------------------------------------ 2. External compiler flags -------------------------------------------------------------------------------- @@ -2136,7 +2138,8 @@ static void FillRow1( i32 center, i32 right) { - + UNUSED(left); + UNUSED(right); ASSERT(ref); ASSERT(fill); diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c index a7c6f64..23401c6 100755 --- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c +++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c @@ -47,6 +47,8 @@ #include "h264bsd_nal_unit.h" #include "h264bsd_dpb.h" +#define UNUSED(x) (void)(x) + /*------------------------------------------------------------------------------ 2. External compiler flags -------------------------------------------------------------------------------- @@ -1407,6 +1409,7 @@ u32 h264bsdCheckPriorPicsFlag(u32 * noOutputOfPriorPicsFlag, u32 tmp, value, i; i32 ivalue; strmData_t tmpStrmData[1]; + UNUSED(nalUnitType); /* Code */ diff --git a/media/libstagefright/codecs/opus/Android.mk b/media/libstagefright/codecs/opus/Android.mk new file mode 100644 index 0000000..365b179 --- /dev/null +++ b/media/libstagefright/codecs/opus/Android.mk @@ -0,0 +1,4 @@ +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +include $(call all-makefiles-under,$(LOCAL_PATH))
\ No newline at end of file diff --git a/media/libstagefright/codecs/opus/dec/Android.mk b/media/libstagefright/codecs/opus/dec/Android.mk new file mode 100644 index 0000000..2379c5f --- /dev/null +++ b/media/libstagefright/codecs/opus/dec/Android.mk @@ -0,0 +1,19 @@ +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + SoftOpus.cpp + +LOCAL_C_INCLUDES := \ + external/libopus/include \ + frameworks/av/media/libstagefright/include \ + frameworks/native/include/media/openmax \ + +LOCAL_SHARED_LIBRARIES := \ + libopus libstagefright libstagefright_omx \ + libstagefright_foundation libutils liblog + +LOCAL_MODULE := libstagefright_soft_opusdec +LOCAL_MODULE_TAGS := optional + +include $(BUILD_SHARED_LIBRARY)
\ No newline at end of file diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp new file mode 100644 index 0000000..b8084ae --- /dev/null +++ b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp @@ -0,0 +1,540 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "SoftOpus" +#include <utils/Log.h> + +#include "SoftOpus.h" +#include <OMX_AudioExt.h> +#include <OMX_IndexExt.h> + +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/MediaDefs.h> + +extern "C" { + #include <opus.h> + #include <opus_multistream.h> +} + +namespace android { + +static const int kRate = 48000; + +template<class T> +static void InitOMXParams(T *params) { + params->nSize = sizeof(T); + params->nVersion.s.nVersionMajor = 1; + params->nVersion.s.nVersionMinor = 0; + params->nVersion.s.nRevision = 0; + params->nVersion.s.nStep = 0; +} + +SoftOpus::SoftOpus( + const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component) + : SimpleSoftOMXComponent(name, callbacks, appData, component), + mInputBufferCount(0), + mDecoder(NULL), + mHeader(NULL), + mCodecDelay(0), + mSeekPreRoll(0), + mAnchorTimeUs(0), + mNumFramesOutput(0), + mOutputPortSettingsChange(NONE) { + initPorts(); + CHECK_EQ(initDecoder(), (status_t)OK); +} + +SoftOpus::~SoftOpus() { + if (mDecoder != NULL) { + opus_multistream_decoder_destroy(mDecoder); + mDecoder = NULL; + } + if (mHeader != NULL) { + delete mHeader; + mHeader = NULL; + } +} + +void SoftOpus::initPorts() { + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + + def.nPortIndex = 0; + def.eDir = OMX_DirInput; + def.nBufferCountMin = kNumBuffers; + def.nBufferCountActual = def.nBufferCountMin; + def.nBufferSize = 960 * 6; + def.bEnabled = OMX_TRUE; + def.bPopulated = OMX_FALSE; + def.eDomain = OMX_PortDomainAudio; + def.bBuffersContiguous = OMX_FALSE; + def.nBufferAlignment = 1; + + def.format.audio.cMIMEType = + const_cast<char *>(MEDIA_MIMETYPE_AUDIO_OPUS); + + def.format.audio.pNativeRender = NULL; + def.format.audio.bFlagErrorConcealment = OMX_FALSE; + def.format.audio.eEncoding = + (OMX_AUDIO_CODINGTYPE)OMX_AUDIO_CodingAndroidOPUS; + + addPort(def); + + def.nPortIndex = 1; + def.eDir = OMX_DirOutput; + def.nBufferCountMin = kNumBuffers; + def.nBufferCountActual = def.nBufferCountMin; + def.nBufferSize = kMaxNumSamplesPerBuffer * sizeof(int16_t); + def.bEnabled = OMX_TRUE; + def.bPopulated = OMX_FALSE; + def.eDomain = OMX_PortDomainAudio; + def.bBuffersContiguous = OMX_FALSE; + def.nBufferAlignment = 2; + + def.format.audio.cMIMEType = const_cast<char *>("audio/raw"); + def.format.audio.pNativeRender = NULL; + def.format.audio.bFlagErrorConcealment = OMX_FALSE; + def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; + + addPort(def); +} + +status_t SoftOpus::initDecoder() { + return OK; +} + +OMX_ERRORTYPE SoftOpus::internalGetParameter( + OMX_INDEXTYPE index, OMX_PTR params) { + switch ((int)index) { + case OMX_IndexParamAudioAndroidOpus: + { + OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *opusParams = + (OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *)params; + + if (opusParams->nPortIndex != 0) { + return OMX_ErrorUndefined; + } + + opusParams->nAudioBandWidth = 0; + opusParams->nSampleRate = kRate; + opusParams->nBitRate = 0; + + if (!isConfigured()) { + opusParams->nChannels = 1; + } else { + opusParams->nChannels = mHeader->channels; + } + + return OMX_ErrorNone; + } + + case OMX_IndexParamAudioPcm: + { + OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams = + (OMX_AUDIO_PARAM_PCMMODETYPE *)params; + + if (pcmParams->nPortIndex != 1) { + return OMX_ErrorUndefined; + } + + pcmParams->eNumData = OMX_NumericalDataSigned; + pcmParams->eEndian = OMX_EndianBig; + pcmParams->bInterleaved = OMX_TRUE; + pcmParams->nBitPerSample = 16; + pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear; + pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF; + pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF; + pcmParams->nSamplingRate = kRate; + + if (!isConfigured()) { + pcmParams->nChannels = 1; + } else { + pcmParams->nChannels = mHeader->channels; + } + + return OMX_ErrorNone; + } + + default: + return SimpleSoftOMXComponent::internalGetParameter(index, params); + } +} + +OMX_ERRORTYPE SoftOpus::internalSetParameter( + OMX_INDEXTYPE index, const OMX_PTR params) { + switch ((int)index) { + case OMX_IndexParamStandardComponentRole: + { + const OMX_PARAM_COMPONENTROLETYPE *roleParams = + (const OMX_PARAM_COMPONENTROLETYPE *)params; + + if (strncmp((const char *)roleParams->cRole, + "audio_decoder.opus", + OMX_MAX_STRINGNAME_SIZE - 1)) { + return OMX_ErrorUndefined; + } + + return OMX_ErrorNone; + } + + case OMX_IndexParamAudioAndroidOpus: + { + const OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *opusParams = + (const OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *)params; + + if (opusParams->nPortIndex != 0) { + return OMX_ErrorUndefined; + } + + return OMX_ErrorNone; + } + + default: + return SimpleSoftOMXComponent::internalSetParameter(index, params); + } +} + +bool SoftOpus::isConfigured() const { + return mInputBufferCount >= 1; +} + +static uint16_t ReadLE16(const uint8_t *data, size_t data_size, + uint32_t read_offset) { + if (read_offset + 1 > data_size) + return 0; + uint16_t val; + val = data[read_offset]; + val |= data[read_offset + 1] << 8; + return val; +} + +// Opus uses Vorbis channel mapping, and Vorbis channel mapping specifies +// mappings for up to 8 channels. This information is part of the Vorbis I +// Specification: +// http://www.xiph.org/vorbis/doc/Vorbis_I_spec.html +static const int kMaxChannels = 8; + +// Maximum packet size used in Xiph's opusdec. +static const int kMaxOpusOutputPacketSizeSamples = 960 * 6; + +// Default audio output channel layout. Used to initialize |stream_map| in +// OpusHeader, and passed to opus_multistream_decoder_create() when the header +// does not contain mapping information. The values are valid only for mono and +// stereo output: Opus streams with more than 2 channels require a stream map. +static const int kMaxChannelsWithDefaultLayout = 2; +static const uint8_t kDefaultOpusChannelLayout[kMaxChannelsWithDefaultLayout] = { 0, 1 }; + +// Parses Opus Header. Header spec: http://wiki.xiph.org/OggOpus#ID_Header +static bool ParseOpusHeader(const uint8_t *data, size_t data_size, + OpusHeader* header) { + // Size of the Opus header excluding optional mapping information. + const size_t kOpusHeaderSize = 19; + + // Offset to the channel count byte in the Opus header. + const size_t kOpusHeaderChannelsOffset = 9; + + // Offset to the pre-skip value in the Opus header. + const size_t kOpusHeaderSkipSamplesOffset = 10; + + // Offset to the gain value in the Opus header. + const size_t kOpusHeaderGainOffset = 16; + + // Offset to the channel mapping byte in the Opus header. + const size_t kOpusHeaderChannelMappingOffset = 18; + + // Opus Header contains a stream map. The mapping values are in the header + // beyond the always present |kOpusHeaderSize| bytes of data. The mapping + // data contains stream count, coupling information, and per channel mapping + // values: + // - Byte 0: Number of streams. + // - Byte 1: Number coupled. + // - Byte 2: Starting at byte 2 are |header->channels| uint8 mapping + // values. + const size_t kOpusHeaderNumStreamsOffset = kOpusHeaderSize; + const size_t kOpusHeaderNumCoupledOffset = kOpusHeaderNumStreamsOffset + 1; + const size_t kOpusHeaderStreamMapOffset = kOpusHeaderNumStreamsOffset + 2; + + if (data_size < kOpusHeaderSize) { + ALOGV("Header size is too small."); + return false; + } + header->channels = *(data + kOpusHeaderChannelsOffset); + + if (header->channels <= 0 || header->channels > kMaxChannels) { + ALOGV("Invalid Header, wrong channel count: %d", header->channels); + return false; + } + header->skip_samples = ReadLE16(data, data_size, + kOpusHeaderSkipSamplesOffset); + header->gain_db = static_cast<int16_t>( + ReadLE16(data, data_size, + kOpusHeaderGainOffset)); + header->channel_mapping = *(data + kOpusHeaderChannelMappingOffset); + if (!header->channel_mapping) { + if (header->channels > kMaxChannelsWithDefaultLayout) { + ALOGV("Invalid Header, missing stream map."); + return false; + } + header->num_streams = 1; + header->num_coupled = header->channels > 1; + header->stream_map[0] = 0; + header->stream_map[1] = 1; + return true; + } + if (data_size < kOpusHeaderStreamMapOffset + header->channels) { + ALOGV("Invalid stream map; insufficient data for current channel " + "count: %d", header->channels); + return false; + } + header->num_streams = *(data + kOpusHeaderNumStreamsOffset); + header->num_coupled = *(data + kOpusHeaderNumCoupledOffset); + if (header->num_streams + header->num_coupled != header->channels) { + ALOGV("Inconsistent channel mapping."); + return false; + } + for (int i = 0; i < header->channels; ++i) + header->stream_map[i] = *(data + kOpusHeaderStreamMapOffset + i); + return true; +} + +// Convert nanoseconds to number of samples. +static uint64_t ns_to_samples(uint64_t ns, int kRate) { + return static_cast<double>(ns) * kRate / 1000000000; +} + +void SoftOpus::onQueueFilled(OMX_U32 portIndex) { + List<BufferInfo *> &inQueue = getPortQueue(0); + List<BufferInfo *> &outQueue = getPortQueue(1); + + if (mOutputPortSettingsChange != NONE) { + return; + } + + if (portIndex == 0 && mInputBufferCount < 3) { + BufferInfo *info = *inQueue.begin(); + OMX_BUFFERHEADERTYPE *header = info->mHeader; + + const uint8_t *data = header->pBuffer + header->nOffset; + size_t size = header->nFilledLen; + + if (mInputBufferCount == 0) { + CHECK(mHeader == NULL); + mHeader = new OpusHeader(); + memset(mHeader, 0, sizeof(*mHeader)); + if (!ParseOpusHeader(data, size, mHeader)) { + ALOGV("Parsing Opus Header failed."); + notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); + return; + } + + uint8_t channel_mapping[kMaxChannels] = {0}; + memcpy(&channel_mapping, + kDefaultOpusChannelLayout, + kMaxChannelsWithDefaultLayout); + + int status = OPUS_INVALID_STATE; + mDecoder = opus_multistream_decoder_create(kRate, + mHeader->channels, + mHeader->num_streams, + mHeader->num_coupled, + channel_mapping, + &status); + if (!mDecoder || status != OPUS_OK) { + ALOGV("opus_multistream_decoder_create failed status=%s", + opus_strerror(status)); + notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); + return; + } + status = + opus_multistream_decoder_ctl(mDecoder, + OPUS_SET_GAIN(mHeader->gain_db)); + if (status != OPUS_OK) { + ALOGV("Failed to set OPUS header gain; status=%s", + opus_strerror(status)); + notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); + return; + } + } else if (mInputBufferCount == 1) { + mCodecDelay = ns_to_samples( + *(reinterpret_cast<int64_t*>(header->pBuffer + + header->nOffset)), + kRate); + mSamplesToDiscard = mCodecDelay; + } else { + mSeekPreRoll = ns_to_samples( + *(reinterpret_cast<int64_t*>(header->pBuffer + + header->nOffset)), + kRate); + notify(OMX_EventPortSettingsChanged, 1, 0, NULL); + mOutputPortSettingsChange = AWAITING_DISABLED; + } + + inQueue.erase(inQueue.begin()); + info->mOwnedByUs = false; + notifyEmptyBufferDone(header); + ++mInputBufferCount; + return; + } + + while (!inQueue.empty() && !outQueue.empty()) { + BufferInfo *inInfo = *inQueue.begin(); + OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; + + BufferInfo *outInfo = *outQueue.begin(); + OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader; + + if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { + inQueue.erase(inQueue.begin()); + inInfo->mOwnedByUs = false; + notifyEmptyBufferDone(inHeader); + + outHeader->nFilledLen = 0; + outHeader->nFlags = OMX_BUFFERFLAG_EOS; + + outQueue.erase(outQueue.begin()); + outInfo->mOwnedByUs = false; + notifyFillBufferDone(outHeader); + return; + } + + if (inHeader->nOffset == 0) { + mAnchorTimeUs = inHeader->nTimeStamp; + mNumFramesOutput = 0; + } + + // When seeking to zero, |mCodecDelay| samples has to be discarded + // instead of |mSeekPreRoll| samples (as we would when seeking to any + // other timestamp). + if (inHeader->nTimeStamp == 0) { + mSamplesToDiscard = mCodecDelay; + } + + const uint8_t *data = inHeader->pBuffer + inHeader->nOffset; + const uint32_t size = inHeader->nFilledLen; + + int numFrames = opus_multistream_decode(mDecoder, + data, + size, + (int16_t *)outHeader->pBuffer, + kMaxOpusOutputPacketSizeSamples, + 0); + if (numFrames < 0) { + ALOGE("opus_multistream_decode returned %d", numFrames); + notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); + return; + } + + outHeader->nOffset = 0; + if (mSamplesToDiscard > 0) { + if (mSamplesToDiscard > numFrames) { + mSamplesToDiscard -= numFrames; + numFrames = 0; + } else { + numFrames -= mSamplesToDiscard; + outHeader->nOffset = mSamplesToDiscard * sizeof(int16_t) * + mHeader->channels; + mSamplesToDiscard = 0; + } + } + + outHeader->nFilledLen = numFrames * sizeof(int16_t) * mHeader->channels; + outHeader->nFlags = 0; + + outHeader->nTimeStamp = mAnchorTimeUs + + (mNumFramesOutput * 1000000ll) / + kRate; + + mNumFramesOutput += numFrames; + + inInfo->mOwnedByUs = false; + inQueue.erase(inQueue.begin()); + inInfo = NULL; + notifyEmptyBufferDone(inHeader); + inHeader = NULL; + + outInfo->mOwnedByUs = false; + outQueue.erase(outQueue.begin()); + outInfo = NULL; + notifyFillBufferDone(outHeader); + outHeader = NULL; + + ++mInputBufferCount; + } +} + +void SoftOpus::onPortFlushCompleted(OMX_U32 portIndex) { + if (portIndex == 0 && mDecoder != NULL) { + // Make sure that the next buffer output does not still + // depend on fragments from the last one decoded. + mNumFramesOutput = 0; + opus_multistream_decoder_ctl(mDecoder, OPUS_RESET_STATE); + mAnchorTimeUs = 0; + mSamplesToDiscard = mSeekPreRoll; + } +} + +void SoftOpus::onReset() { + mInputBufferCount = 0; + mNumFramesOutput = 0; + if (mDecoder != NULL) { + opus_multistream_decoder_destroy(mDecoder); + mDecoder = NULL; + } + if (mHeader != NULL) { + delete mHeader; + mHeader = NULL; + } + + mOutputPortSettingsChange = NONE; +} + +void SoftOpus::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { + if (portIndex != 1) { + return; + } + + switch (mOutputPortSettingsChange) { + case NONE: + break; + + case AWAITING_DISABLED: + { + CHECK(!enabled); + mOutputPortSettingsChange = AWAITING_ENABLED; + break; + } + + default: + { + CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED); + CHECK(enabled); + mOutputPortSettingsChange = NONE; + break; + } + } +} + +} // namespace android + +android::SoftOMXComponent *createSoftOMXComponent( + const char *name, const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, OMX_COMPONENTTYPE **component) { + return new android::SoftOpus(name, callbacks, appData, component); +} diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.h b/media/libstagefright/codecs/opus/dec/SoftOpus.h new file mode 100644 index 0000000..97f6561 --- /dev/null +++ b/media/libstagefright/codecs/opus/dec/SoftOpus.h @@ -0,0 +1,94 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * The Opus specification is part of IETF RFC 6716: + * http://tools.ietf.org/html/rfc6716 + */ + +#ifndef SOFT_OPUS_H_ + +#define SOFT_OPUS_H_ + +#include "SimpleSoftOMXComponent.h" + +struct OpusMSDecoder; + +namespace android { + +struct OpusHeader { + int channels; + int skip_samples; + int channel_mapping; + int num_streams; + int num_coupled; + int16_t gain_db; + uint8_t stream_map[8]; +}; + +struct SoftOpus : public SimpleSoftOMXComponent { + SoftOpus(const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component); + +protected: + virtual ~SoftOpus(); + + virtual OMX_ERRORTYPE internalGetParameter( + OMX_INDEXTYPE index, OMX_PTR params); + + virtual OMX_ERRORTYPE internalSetParameter( + OMX_INDEXTYPE index, const OMX_PTR params); + + virtual void onQueueFilled(OMX_U32 portIndex); + virtual void onPortFlushCompleted(OMX_U32 portIndex); + virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled); + virtual void onReset(); + +private: + enum { + kNumBuffers = 4, + kMaxNumSamplesPerBuffer = 960 * 6 + }; + + size_t mInputBufferCount; + + OpusMSDecoder *mDecoder; + OpusHeader *mHeader; + + int64_t mCodecDelay; + int64_t mSeekPreRoll; + int64_t mSamplesToDiscard; + int64_t mAnchorTimeUs; + int64_t mNumFramesOutput; + + enum { + NONE, + AWAITING_DISABLED, + AWAITING_ENABLED + } mOutputPortSettingsChange; + + void initPorts(); + status_t initDecoder(); + bool isConfigured() const; + + DISALLOW_EVIL_CONSTRUCTORS(SoftOpus); +}; + +} // namespace android + +#endif // SOFT_OPUS_H_ diff --git a/media/libstagefright/codecs/raw/Android.mk b/media/libstagefright/codecs/raw/Android.mk index fe90a03..87080e7 100644 --- a/media/libstagefright/codecs/raw/Android.mk +++ b/media/libstagefright/codecs/raw/Android.mk @@ -8,6 +8,8 @@ LOCAL_C_INCLUDES := \ frameworks/av/media/libstagefright/include \ frameworks/native/include/media/openmax +LOCAL_CFLAGS += -Werror + LOCAL_SHARED_LIBRARIES := \ libstagefright_omx libstagefright_foundation libutils liblog diff --git a/media/libstagefright/codecs/vorbis/dec/Android.mk b/media/libstagefright/codecs/vorbis/dec/Android.mk index 2232353..217a6d2 100644 --- a/media/libstagefright/codecs/vorbis/dec/Android.mk +++ b/media/libstagefright/codecs/vorbis/dec/Android.mk @@ -16,4 +16,6 @@ LOCAL_SHARED_LIBRARIES := \ LOCAL_MODULE := libstagefright_soft_vorbisdec LOCAL_MODULE_TAGS := optional +LOCAL_CFLAGS += -Werror + include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/data/media_codecs_google_audio.xml b/media/libstagefright/data/media_codecs_google_audio.xml new file mode 100644 index 0000000..b1f93de --- /dev/null +++ b/media/libstagefright/data/media_codecs_google_audio.xml @@ -0,0 +1,35 @@ +<?xml version="1.0" encoding="utf-8" ?> +<!-- Copyright (C) 2014 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> + +<Included> + <Decoders> + <MediaCodec name="OMX.google.mp3.decoder" type="audio/mpeg" /> + <MediaCodec name="OMX.google.amrnb.decoder" type="audio/3gpp" /> + <MediaCodec name="OMX.google.amrwb.decoder" type="audio/amr-wb" /> + <MediaCodec name="OMX.google.aac.decoder" type="audio/mp4a-latm" /> + <MediaCodec name="OMX.google.g711.alaw.decoder" type="audio/g711-alaw" /> + <MediaCodec name="OMX.google.g711.mlaw.decoder" type="audio/g711-mlaw" /> + <MediaCodec name="OMX.google.vorbis.decoder" type="audio/vorbis" /> + <MediaCodec name="OMX.google.opus.decoder" type="audio/opus" /> + </Decoders> + + <Encoders> + <MediaCodec name="OMX.google.aac.encoder" type="audio/mp4a-latm" /> + <MediaCodec name="OMX.google.amrnb.encoder" type="audio/3gpp" /> + <MediaCodec name="OMX.google.amrwb.encoder" type="audio/amr-wb" /> + <MediaCodec name="OMX.google.flac.encoder" type="audio/flac" /> + </Encoders> +</Included> diff --git a/media/libstagefright/data/media_codecs_google_telephony.xml b/media/libstagefright/data/media_codecs_google_telephony.xml new file mode 100644 index 0000000..28f5ffc --- /dev/null +++ b/media/libstagefright/data/media_codecs_google_telephony.xml @@ -0,0 +1,21 @@ +<?xml version="1.0" encoding="utf-8" ?> +<!-- Copyright (C) 2014 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> + +<Included> + <Decoders> + <MediaCodec name="OMX.google.gsm.decoder" type="audio/gsm" /> + </Decoders> +</Included> diff --git a/media/libstagefright/data/media_codecs_google_video.xml b/media/libstagefright/data/media_codecs_google_video.xml new file mode 100644 index 0000000..41e0efb --- /dev/null +++ b/media/libstagefright/data/media_codecs_google_video.xml @@ -0,0 +1,32 @@ +<?xml version="1.0" encoding="utf-8" ?> +<!-- Copyright (C) 2014 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> + +<Included> + <Decoders> + <MediaCodec name="OMX.google.mpeg4.decoder" type="video/mp4v-es" /> + <MediaCodec name="OMX.google.h263.decoder" type="video/3gpp" /> + <MediaCodec name="OMX.google.h264.decoder" type="video/avc" /> + <MediaCodec name="OMX.google.vp8.decoder" type="video/x-vnd.on2.vp8" /> + <MediaCodec name="OMX.google.vp9.decoder" type="video/x-vnd.on2.vp9" /> + </Decoders> + + <Encoders> + <MediaCodec name="OMX.google.h263.encoder" type="video/3gpp" /> + <MediaCodec name="OMX.google.h264.encoder" type="video/avc" /> + <MediaCodec name="OMX.google.mpeg4.encoder" type="video/mp4v-es" /> + <MediaCodec name="OMX.google.vp8.encoder" type="video/x-vnd.on2.vp8" /> + </Encoders> +</Included> diff --git a/media/libstagefright/foundation/AString.cpp b/media/libstagefright/foundation/AString.cpp index b6b21f1..f2d501e 100644 --- a/media/libstagefright/foundation/AString.cpp +++ b/media/libstagefright/foundation/AString.cpp @@ -20,6 +20,7 @@ #include <stdlib.h> #include <string.h> +#include <utils/String8.h> #include "ADebug.h" #include "AString.h" @@ -48,6 +49,13 @@ AString::AString(const char *s, size_t size) setTo(s, size); } +AString::AString(const String8 &from) + : mData(NULL), + mSize(0), + mAllocSize(1) { + setTo(from.string(), from.length()); +} + AString::AString(const AString &from) : mData(NULL), mSize(0), diff --git a/media/libstagefright/foundation/base64.cpp b/media/libstagefright/foundation/base64.cpp index d5fb4e0..dcf5bef 100644 --- a/media/libstagefright/foundation/base64.cpp +++ b/media/libstagefright/foundation/base64.cpp @@ -33,6 +33,10 @@ sp<ABuffer> decodeBase64(const AString &s) { if (n >= 2 && s.c_str()[n - 2] == '=') { padding = 2; + + if (n >= 3 && s.c_str()[n - 3] == '=') { + padding = 3; + } } } @@ -71,7 +75,7 @@ sp<ABuffer> decodeBase64(const AString &s) { if (((i + 1) % 4) == 0) { out[j++] = (accum >> 16); - if (j < outLen) { out[j++] = (accum >> 8) & 0xff; } + if (j < outLen) { out[j++] = (accum >> 8) & 0xff; } if (j < outLen) { out[j++] = accum & 0xff; } accum = 0; diff --git a/media/libstagefright/http/Android.mk b/media/libstagefright/http/Android.mk new file mode 100644 index 0000000..7f3307d --- /dev/null +++ b/media/libstagefright/http/Android.mk @@ -0,0 +1,28 @@ +LOCAL_PATH:= $(call my-dir) + +ifneq ($(TARGET_BUILD_PDK), true) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + HTTPHelper.cpp \ + +LOCAL_C_INCLUDES:= \ + $(TOP)/frameworks/av/media/libstagefright \ + $(TOP)/frameworks/native/include/media/openmax \ + $(TOP)/frameworks/base/core/jni \ + +LOCAL_SHARED_LIBRARIES := \ + libstagefright liblog libutils libbinder libstagefright_foundation \ + libandroid_runtime \ + libmedia + +LOCAL_MODULE:= libstagefright_http_support + +LOCAL_CFLAGS += -Wno-multichar + +LOCAL_CFLAGS += -Werror + +include $(BUILD_SHARED_LIBRARY) + +endif diff --git a/media/libstagefright/http/HTTPHelper.cpp b/media/libstagefright/http/HTTPHelper.cpp new file mode 100644 index 0000000..77845e2 --- /dev/null +++ b/media/libstagefright/http/HTTPHelper.cpp @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "HTTPHelper" +#include <utils/Log.h> + +#include "HTTPHelper.h" + +#include "android_runtime/AndroidRuntime.h" +#include "android_util_Binder.h" +#include <media/IMediaHTTPService.h> +#include <media/stagefright/foundation/ADebug.h> +#include <nativehelper/ScopedLocalRef.h> +#include "jni.h" + +namespace android { + +sp<IMediaHTTPService> CreateHTTPServiceInCurrentJavaContext() { + if (AndroidRuntime::getJavaVM() == NULL) { + ALOGE("CreateHTTPServiceInCurrentJavaContext called outside " + "JAVA environment."); + return NULL; + } + + JNIEnv *env = AndroidRuntime::getJNIEnv(); + + ScopedLocalRef<jclass> clazz( + env, env->FindClass("android/media/MediaHTTPService")); + CHECK(clazz.get() != NULL); + + jmethodID constructID = env->GetMethodID(clazz.get(), "<init>", "()V"); + CHECK(constructID != NULL); + + ScopedLocalRef<jobject> httpServiceObj( + env, env->NewObject(clazz.get(), constructID)); + + sp<IMediaHTTPService> httpService; + if (httpServiceObj.get() != NULL) { + jmethodID asBinderID = + env->GetMethodID(clazz.get(), "asBinder", "()Landroid/os/IBinder;"); + CHECK(asBinderID != NULL); + + ScopedLocalRef<jobject> httpServiceBinderObj( + env, env->CallObjectMethod(httpServiceObj.get(), asBinderID)); + CHECK(httpServiceBinderObj.get() != NULL); + + sp<IBinder> binder = + ibinderForJavaObject(env, httpServiceBinderObj.get()); + + httpService = interface_cast<IMediaHTTPService>(binder); + } + + return httpService; +} + +} // namespace android diff --git a/media/libstagefright/include/chromium_http_stub.h b/media/libstagefright/http/HTTPHelper.h index e0651a4..8aef115 100644 --- a/media/libstagefright/include/chromium_http_stub.h +++ b/media/libstagefright/http/HTTPHelper.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2012 The Android Open Source Project + * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,21 +14,18 @@ * limitations under the License. */ -#ifndef CHROMIUM_HTTP_STUB_H_ -#define CHROMIUM_HTTP_STUB_H_ +#ifndef HTTP_HELPER_H_ -#include <include/HTTPBase.h> -#include <media/stagefright/DataSource.h> +#define HTTP_HELPER_H_ + +#include <utils/RefBase.h> namespace android { -extern "C" { -HTTPBase *createChromiumHTTPDataSource(uint32_t flags); -status_t UpdateChromiumHTTPDataSourceProxyConfig( - const char *host, int32_t port, const char *exclusionList); +struct IMediaHTTPService; + +sp<IMediaHTTPService> CreateHTTPServiceInCurrentJavaContext(); -DataSource *createDataUriSource(const char *uri); -} -} +} // namespace android -#endif +#endif // HTTP_HELPER_H_ diff --git a/media/libstagefright/http/MediaHTTP.cpp b/media/libstagefright/http/MediaHTTP.cpp new file mode 100644 index 0000000..2d29913 --- /dev/null +++ b/media/libstagefright/http/MediaHTTP.cpp @@ -0,0 +1,205 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaHTTP" +#include <utils/Log.h> + +#include <media/stagefright/MediaHTTP.h> + +#include <binder/IServiceManager.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/ALooper.h> +#include <media/stagefright/Utils.h> + +#include <media/IMediaHTTPConnection.h> + +namespace android { + +MediaHTTP::MediaHTTP(const sp<IMediaHTTPConnection> &conn) + : mInitCheck(NO_INIT), + mHTTPConnection(conn), + mCachedSizeValid(false), + mCachedSize(0ll), + mDrmManagerClient(NULL) { + mInitCheck = OK; +} + +MediaHTTP::~MediaHTTP() { + clearDRMState_l(); +} + +status_t MediaHTTP::connect( + const char *uri, + const KeyedVector<String8, String8> *headers, + off64_t /* offset */) { + if (mInitCheck != OK) { + return mInitCheck; + } + + KeyedVector<String8, String8> extHeaders; + if (headers != NULL) { + extHeaders = *headers; + } + extHeaders.add(String8("User-Agent"), String8(MakeUserAgent().c_str())); + + bool success = mHTTPConnection->connect(uri, &extHeaders); + + mLastHeaders = extHeaders; + mLastURI = uri; + + mCachedSizeValid = false; + + return success ? OK : UNKNOWN_ERROR; +} + +void MediaHTTP::disconnect() { + if (mInitCheck != OK) { + return; + } + + mHTTPConnection->disconnect(); +} + +status_t MediaHTTP::initCheck() const { + return mInitCheck; +} + +ssize_t MediaHTTP::readAt(off64_t offset, void *data, size_t size) { + if (mInitCheck != OK) { + return mInitCheck; + } + + int64_t startTimeUs = ALooper::GetNowUs(); + + size_t numBytesRead = 0; + while (numBytesRead < size) { + size_t copy = size - numBytesRead; + + if (copy > 64 * 1024) { + // limit the buffer sizes transferred across binder boundaries + // to avoid spurious transaction failures. + copy = 64 * 1024; + } + + ssize_t n = mHTTPConnection->readAt( + offset + numBytesRead, (uint8_t *)data + numBytesRead, copy); + + if (n < 0) { + return n; + } else if (n == 0) { + break; + } + + numBytesRead += n; + } + + int64_t delayUs = ALooper::GetNowUs() - startTimeUs; + + addBandwidthMeasurement(numBytesRead, delayUs); + + return numBytesRead; +} + +status_t MediaHTTP::getSize(off64_t *size) { + if (mInitCheck != OK) { + return mInitCheck; + } + + // Caching the returned size so that it stays valid even after a + // disconnect. NuCachedSource2 relies on this. + + if (!mCachedSizeValid) { + mCachedSize = mHTTPConnection->getSize(); + mCachedSizeValid = true; + } + + *size = mCachedSize; + + return *size < 0 ? *size : OK; +} + +uint32_t MediaHTTP::flags() { + return kWantsPrefetching | kIsHTTPBasedSource; +} + +status_t MediaHTTP::reconnectAtOffset(off64_t offset) { + return connect(mLastURI.c_str(), &mLastHeaders, offset); +} + +// DRM... + +sp<DecryptHandle> MediaHTTP::DrmInitialization(const char* mime) { + if (mDrmManagerClient == NULL) { + mDrmManagerClient = new DrmManagerClient(); + } + + if (mDrmManagerClient == NULL) { + return NULL; + } + + if (mDecryptHandle == NULL) { + mDecryptHandle = mDrmManagerClient->openDecryptSession( + String8(mLastURI.c_str()), mime); + } + + if (mDecryptHandle == NULL) { + delete mDrmManagerClient; + mDrmManagerClient = NULL; + } + + return mDecryptHandle; +} + +void MediaHTTP::getDrmInfo( + sp<DecryptHandle> &handle, DrmManagerClient **client) { + handle = mDecryptHandle; + *client = mDrmManagerClient; +} + +String8 MediaHTTP::getUri() { + String8 uri; + if (OK == mHTTPConnection->getUri(&uri)) { + return uri; + } + return String8(mLastURI.c_str()); +} + +String8 MediaHTTP::getMIMEType() const { + if (mInitCheck != OK) { + return String8("application/octet-stream"); + } + + String8 mimeType; + status_t err = mHTTPConnection->getMIMEType(&mimeType); + + if (err != OK) { + return String8("application/octet-stream"); + } + + return mimeType; +} + +void MediaHTTP::clearDRMState_l() { + if (mDecryptHandle != NULL) { + // To release mDecryptHandle + CHECK(mDrmManagerClient); + mDrmManagerClient->closeDecryptSession(mDecryptHandle); + mDecryptHandle = NULL; + } +} + +} // namespace android diff --git a/media/libstagefright/httplive/Android.mk b/media/libstagefright/httplive/Android.mk index f3529f9..e8d558c 100644 --- a/media/libstagefright/httplive/Android.mk +++ b/media/libstagefright/httplive/Android.mk @@ -13,6 +13,8 @@ LOCAL_C_INCLUDES:= \ $(TOP)/frameworks/native/include/media/openmax \ $(TOP)/external/openssl/include +LOCAL_CFLAGS += -Werror + LOCAL_SHARED_LIBRARIES := \ libbinder \ libcrypto \ diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp index 6d48ab7..08a146f 100644 --- a/media/libstagefright/httplive/LiveSession.cpp +++ b/media/libstagefright/httplive/LiveSession.cpp @@ -27,6 +27,8 @@ #include "mpeg2ts/AnotherPacketSource.h" #include <cutils/properties.h> +#include <media/IMediaHTTPConnection.h> +#include <media/IMediaHTTPService.h> #include <media/stagefright/foundation/hexdump.h> #include <media/stagefright/foundation/ABuffer.h> #include <media/stagefright/foundation/ADebug.h> @@ -34,6 +36,7 @@ #include <media/stagefright/DataSource.h> #include <media/stagefright/FileSource.h> #include <media/stagefright/MediaErrors.h> +#include <media/stagefright/MediaHTTP.h> #include <media/stagefright/MetaData.h> #include <media/stagefright/Utils.h> @@ -47,17 +50,13 @@ namespace android { LiveSession::LiveSession( - const sp<AMessage> ¬ify, uint32_t flags, bool uidValid, uid_t uid) + const sp<AMessage> ¬ify, uint32_t flags, + const sp<IMediaHTTPService> &httpService) : mNotify(notify), mFlags(flags), - mUIDValid(uidValid), - mUID(uid), + mHTTPService(httpService), mInPreparationPhase(true), - mHTTPDataSource( - HTTPBase::Create( - (mFlags & kFlagIncognito) - ? HTTPBase::kFlagIncognito - : 0)), + mHTTPDataSource(new MediaHTTP(mHTTPService->makeHTTPConnection())), mPrevBandwidthIndex(-1), mStreamMask(0), mNewStreamMask(0), @@ -70,9 +69,6 @@ LiveSession::LiveSession( mSwitchInProgress(false), mDisconnectReplyID(0), mSeekReplyID(0) { - if (mUIDValid) { - mHTTPDataSource->setUID(mUID); - } mStreams[kAudioIndex] = StreamItem("audio"); mStreams[kVideoIndex] = StreamItem("video"); @@ -481,11 +477,8 @@ void LiveSession::onConnect(const sp<AMessage> &msg) { headers = NULL; } -#if 1 - ALOGI("onConnect <URL suppressed>"); -#else - ALOGI("onConnect %s", url.c_str()); -#endif + // TODO currently we don't know if we are coming here from incognito mode + ALOGI("onConnect %s", uriDebugString(url).c_str()); mMasterURL = url; @@ -493,7 +486,7 @@ void LiveSession::onConnect(const sp<AMessage> &msg) { mPlaylist = fetchPlaylist(url.c_str(), NULL /* curPlaylistHash */, &dummy); if (mPlaylist == NULL) { - ALOGE("unable to fetch master playlist <URL suppressed>."); + ALOGE("unable to fetch master playlist %s.", uriDebugString(url).c_str()); postPrepared(ERROR_IO); return; @@ -680,7 +673,7 @@ ssize_t LiveSession::fetchFile( ssize_t bytesRead = 0; // adjust range_length if only reading partial block - if (block_size > 0 && (range_length == -1 || buffer->size() + block_size < range_length)) { + if (block_size > 0 && (range_length == -1 || (int64_t)(buffer->size() + block_size) < range_length)) { range_length = buffer->size() + block_size; } for (;;) { diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h index 3f8fee5..d7ed56f 100644 --- a/media/libstagefright/httplive/LiveSession.h +++ b/media/libstagefright/httplive/LiveSession.h @@ -28,6 +28,7 @@ struct ABuffer; struct AnotherPacketSource; struct DataSource; struct HTTPBase; +struct IMediaHTTPService; struct LiveDataSource; struct M3UParser; struct PlaylistFetcher; @@ -40,7 +41,8 @@ struct LiveSession : public AHandler { }; LiveSession( const sp<AMessage> ¬ify, - uint32_t flags = 0, bool uidValid = false, uid_t uid = 0); + uint32_t flags, + const sp<IMediaHTTPService> &httpService); enum StreamIndex { kAudioIndex = 0, @@ -134,8 +136,7 @@ private: sp<AMessage> mNotify; uint32_t mFlags; - bool mUIDValid; - uid_t mUID; + sp<IMediaHTTPService> mHTTPService; bool mInPreparationPhase; diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp index 20c3a76..785c515 100644 --- a/media/libstagefright/httplive/M3UParser.cpp +++ b/media/libstagefright/httplive/M3UParser.cpp @@ -170,14 +170,14 @@ status_t M3UParser::MediaGroup::selectTrack(size_t index, bool select) { ALOGE("track %zu already selected", index); return BAD_VALUE; } - ALOGV("selected track %d", index); + ALOGV("selected track %zu", index); mSelectedIndex = index; } else { if (mSelectedIndex != (ssize_t)index) { ALOGE("track %zu is not selected", index); return BAD_VALUE; } - ALOGV("unselected track %d", index); + ALOGV("unselected track %zu", index); mSelectedIndex = -1; } @@ -798,7 +798,8 @@ status_t M3UParser::parseCipherInfo( if (MakeURL(baseURI.c_str(), val.c_str(), &absURI)) { val = absURI; } else { - ALOGE("failed to make absolute url for <URL suppressed>."); + ALOGE("failed to make absolute url for %s.", + uriDebugString(baseURI).c_str()); } } diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp index 513f114..c34f3cb 100644 --- a/media/libstagefright/httplive/PlaylistFetcher.cpp +++ b/media/libstagefright/httplive/PlaylistFetcher.cpp @@ -317,7 +317,7 @@ void PlaylistFetcher::postMonitorQueue(int64_t delayUs, int64_t minDelayUs) { maxDelayUs = minDelayUs; } if (delayUs > maxDelayUs) { - ALOGV("Need to refresh playlist in %lld", maxDelayUs); + ALOGV("Need to refresh playlist in %" PRId64 , maxDelayUs); delayUs = maxDelayUs; } sp<AMessage> msg = new AMessage(kWhatMonitorQueue, id()); @@ -628,7 +628,7 @@ void PlaylistFetcher::onMonitorQueue() { int64_t bufferedStreamDurationUs = mPacketSources.valueAt(i)->getBufferedDurationUs(&finalResult); - ALOGV("buffered %lld for stream %d", + ALOGV("buffered %" PRId64 " for stream %d", bufferedStreamDurationUs, mPacketSources.keyAt(i)); if (bufferedStreamDurationUs > bufferedDurationUs) { bufferedDurationUs = bufferedStreamDurationUs; @@ -641,7 +641,7 @@ void PlaylistFetcher::onMonitorQueue() { if (!mPrepared && bufferedDurationUs > targetDurationUs && downloadMore) { mPrepared = true; - ALOGV("prepared, buffered=%lld > %lld", + ALOGV("prepared, buffered=%" PRId64 " > %" PRId64 "", bufferedDurationUs, targetDurationUs); sp<AMessage> msg = mNotify->dup(); msg->setInt32("what", kWhatTemporarilyDoneFetching); @@ -649,7 +649,7 @@ void PlaylistFetcher::onMonitorQueue() { } if (finalResult == OK && downloadMore) { - ALOGV("monitoring, buffered=%lld < %lld", + ALOGV("monitoring, buffered=%" PRId64 " < %" PRId64 "", bufferedDurationUs, durationToBufferUs); // delay the next download slightly; hopefully this gives other concurrent fetchers // a better chance to run. @@ -665,7 +665,7 @@ void PlaylistFetcher::onMonitorQueue() { msg->post(); int64_t delayUs = mPrepared ? kMaxMonitorDelayUs : targetDurationUs / 2; - ALOGV("pausing for %lld, buffered=%lld > %lld", + ALOGV("pausing for %" PRId64 ", buffered=%" PRId64 " > %" PRId64 "", delayUs, bufferedDurationUs, durationToBufferUs); // :TRICKY: need to enforce minimum delay because the delay to // refresh the playlist will become 0 @@ -739,7 +739,7 @@ void PlaylistFetcher::onDownloadNext() { if (mPlaylist->isComplete() || mPlaylist->isEvent()) { mSeqNumber = getSeqNumberForTime(mStartTimeUs); - ALOGV("Initial sequence number for time %lld is %ld from (%ld .. %ld)", + ALOGV("Initial sequence number for time %" PRId64 " is %d from (%d .. %d)", mStartTimeUs, mSeqNumber, firstSeqNumberInPlaylist, lastSeqNumberInPlaylist); } else { @@ -748,7 +748,7 @@ void PlaylistFetcher::onDownloadNext() { if (mSeqNumber < firstSeqNumberInPlaylist) { mSeqNumber = firstSeqNumberInPlaylist; } - ALOGV("Initial sequence number for live event %ld from (%ld .. %ld)", + ALOGV("Initial sequence number for live event %d from (%d .. %d)", mSeqNumber, firstSeqNumberInPlaylist, lastSeqNumberInPlaylist); } @@ -772,7 +772,8 @@ void PlaylistFetcher::onDownloadNext() { if (delayUs > kMaxMonitorDelayUs) { delayUs = kMaxMonitorDelayUs; } - ALOGV("sequence number high: %ld from (%ld .. %ld), monitor in %lld (retry=%d)", + ALOGV("sequence number high: %d from (%d .. %d), " + "monitor in %" PRId64 " (retry=%d)", mSeqNumber, firstSeqNumberInPlaylist, lastSeqNumberInPlaylist, delayUs, mNumRetries); postMonitorQueue(delayUs); @@ -915,6 +916,7 @@ void PlaylistFetcher::onDownloadNext() { if (err == -EAGAIN) { // bad starting sequence number hint + mTSParser.clear(); postMonitorQueue(); return; } diff --git a/media/libstagefright/id3/Android.mk b/media/libstagefright/id3/Android.mk index bf6f7bb..2194c38 100644 --- a/media/libstagefright/id3/Android.mk +++ b/media/libstagefright/id3/Android.mk @@ -4,6 +4,8 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES := \ ID3.cpp +LOCAL_CFLAGS += -Werror + LOCAL_MODULE := libstagefright_id3 include $(BUILD_STATIC_LIBRARY) @@ -15,6 +17,8 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES := \ testid3.cpp +LOCAL_CFLAGS += -Werror + LOCAL_SHARED_LIBRARIES := \ libstagefright libutils liblog libbinder libstagefright_foundation diff --git a/media/libstagefright/id3/ID3.cpp b/media/libstagefright/id3/ID3.cpp index 1199c22..7f221a0 100644 --- a/media/libstagefright/id3/ID3.cpp +++ b/media/libstagefright/id3/ID3.cpp @@ -468,49 +468,6 @@ void ID3::Iterator::getID(String8 *id) const { } } -static void convertISO8859ToString8( - const uint8_t *data, size_t size, - String8 *s) { - size_t utf8len = 0; - for (size_t i = 0; i < size; ++i) { - if (data[i] == '\0') { - size = i; - break; - } else if (data[i] < 0x80) { - ++utf8len; - } else { - utf8len += 2; - } - } - - if (utf8len == size) { - // Only ASCII characters present. - - s->setTo((const char *)data, size); - return; - } - - char *tmp = new char[utf8len]; - char *ptr = tmp; - for (size_t i = 0; i < size; ++i) { - if (data[i] == '\0') { - break; - } else if (data[i] < 0x80) { - *ptr++ = data[i]; - } else if (data[i] < 0xc0) { - *ptr++ = 0xc2; - *ptr++ = data[i]; - } else { - *ptr++ = 0xc3; - *ptr++ = data[i] - 64; - } - } - - s->setTo(tmp, utf8len); - - delete[] tmp; - tmp = NULL; -} // the 2nd argument is used to get the data following the \0 in a comment field void ID3::Iterator::getString(String8 *id, String8 *comment) const { @@ -543,7 +500,9 @@ void ID3::Iterator::getstring(String8 *id, bool otherdata) const { return; } - convertISO8859ToString8(frameData, mFrameSize, id); + // this is supposed to be ISO-8859-1, but pass it up as-is to the caller, who will figure + // out the real encoding + id->setTo((const char*)frameData, mFrameSize); return; } @@ -561,13 +520,13 @@ void ID3::Iterator::getstring(String8 *id, bool otherdata) const { } if (encoding == 0x00) { - // ISO 8859-1 - convertISO8859ToString8(frameData + 1, n, id); + // supposedly ISO 8859-1 + id->setTo((const char*)frameData + 1, n); } else if (encoding == 0x03) { - // UTF-8 + // supposedly UTF-8 id->setTo((const char *)(frameData + 1), n); } else if (encoding == 0x02) { - // UTF-16 BE, no byte order mark. + // supposedly UTF-16 BE, no byte order mark. // API wants number of characters, not number of bytes... int len = n / 2; const char16_t *framedata = (const char16_t *) (frameData + 1); @@ -583,7 +542,7 @@ void ID3::Iterator::getstring(String8 *id, bool otherdata) const { if (framedatacopy != NULL) { delete[] framedatacopy; } - } else { + } else if (encoding == 0x01) { // UCS-2 // API wants number of characters, not number of bytes... int len = n / 2; @@ -602,7 +561,27 @@ void ID3::Iterator::getstring(String8 *id, bool otherdata) const { framedata++; len--; } - id->setTo(framedata, len); + + // check if the resulting data consists entirely of 8-bit values + bool eightBit = true; + for (int i = 0; i < len; i++) { + if (framedata[i] > 0xff) { + eightBit = false; + break; + } + } + if (eightBit) { + // collapse to 8 bit, then let the media scanner client figure out the real encoding + char *frame8 = new char[len]; + for (int i = 0; i < len; i++) { + frame8[i] = framedata[i]; + } + id->setTo(frame8, len); + delete [] frame8; + } else { + id->setTo(framedata, len); + } + if (framedatacopy != NULL) { delete[] framedatacopy; } diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h index 271df8e..a81bbba 100644 --- a/media/libstagefright/include/AwesomePlayer.h +++ b/media/libstagefright/include/AwesomePlayer.h @@ -63,6 +63,7 @@ struct AwesomePlayer { void setUID(uid_t uid); status_t setDataSource( + const sp<IMediaHTTPService> &httpService, const char *uri, const KeyedVector<String8, String8> *headers = NULL); @@ -159,6 +160,7 @@ private: SystemTimeSource mSystemTimeSource; TimeSource *mTimeSource; + sp<IMediaHTTPService> mHTTPService; String8 mUri; KeyedVector<String8, String8> mUriHeaders; @@ -247,6 +249,7 @@ private: sp<MediaExtractor> mExtractor; status_t setDataSource_l( + const sp<IMediaHTTPService> &httpService, const char *uri, const KeyedVector<String8, String8> *headers = NULL); diff --git a/media/libstagefright/include/ChromiumHTTPDataSource.h b/media/libstagefright/include/ChromiumHTTPDataSource.h deleted file mode 100644 index da188dd..0000000 --- a/media/libstagefright/include/ChromiumHTTPDataSource.h +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef CHROME_HTTP_DATA_SOURCE_H_ - -#define CHROME_HTTP_DATA_SOURCE_H_ - -#include <media/stagefright/foundation/AString.h> -#include <utils/threads.h> - -#include "HTTPBase.h" - -namespace android { - -struct SfDelegate; - -struct ChromiumHTTPDataSource : public HTTPBase { - ChromiumHTTPDataSource(uint32_t flags = 0); - - virtual status_t connect( - const char *uri, - const KeyedVector<String8, String8> *headers = NULL, - off64_t offset = 0); - - virtual void disconnect(); - - virtual status_t initCheck() const; - - virtual ssize_t readAt(off64_t offset, void *data, size_t size); - virtual status_t getSize(off64_t *size); - virtual uint32_t flags(); - - virtual sp<DecryptHandle> DrmInitialization(const char *mime); - - virtual void getDrmInfo(sp<DecryptHandle> &handle, DrmManagerClient **client); - - virtual String8 getUri(); - - virtual String8 getMIMEType() const; - - virtual status_t reconnectAtOffset(off64_t offset); - - static status_t UpdateProxyConfig( - const char *host, int32_t port, const char *exclusionList); - -protected: - virtual ~ChromiumHTTPDataSource(); - -private: - friend struct SfDelegate; - - enum State { - DISCONNECTED, - CONNECTING, - CONNECTED, - READING, - DISCONNECTING - }; - - const uint32_t mFlags; - - mutable Mutex mLock; - Condition mCondition; - - State mState; - - SfDelegate *mDelegate; - - AString mURI; - KeyedVector<String8, String8> mHeaders; - - off64_t mCurrentOffset; - - // Any connection error or the result of a read operation - // (for the lattter this is the number of bytes read, if successful). - ssize_t mIOResult; - - int64_t mContentSize; - - String8 mContentType; - - sp<DecryptHandle> mDecryptHandle; - DrmManagerClient *mDrmManagerClient; - - void disconnect_l(); - - status_t connect_l( - const char *uri, - const KeyedVector<String8, String8> *headers, - off64_t offset); - - static void InitiateRead( - ChromiumHTTPDataSource *me, void *data, size_t size); - - void initiateRead(void *data, size_t size); - - void onConnectionEstablished( - int64_t contentSize, const char *contentType); - - void onConnectionFailed(status_t err); - void onReadCompleted(ssize_t size); - void onDisconnectComplete(); - void onRedirect(const char *url); - - void clearDRMState_l(); - - DISALLOW_EVIL_CONSTRUCTORS(ChromiumHTTPDataSource); -}; - -} // namespace android - -#endif // CHROME_HTTP_DATA_SOURCE_H_ diff --git a/media/libstagefright/include/FragmentedMP4Parser.h b/media/libstagefright/include/FragmentedMP4Parser.h deleted file mode 100644 index dbe02b8..0000000 --- a/media/libstagefright/include/FragmentedMP4Parser.h +++ /dev/null @@ -1,274 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef PARSER_H_ - -#define PARSER_H_ - -#include <media/stagefright/foundation/AHandler.h> -#include <media/stagefright/DataSource.h> -#include <utils/Vector.h> - -namespace android { - -struct ABuffer; - -struct FragmentedMP4Parser : public AHandler { - struct Source : public RefBase { - Source() {} - - virtual ssize_t readAt(off64_t offset, void *data, size_t size) = 0; - virtual bool isSeekable() = 0; - - protected: - virtual ~Source() {} - - private: - DISALLOW_EVIL_CONSTRUCTORS(Source); - }; - - FragmentedMP4Parser(); - - void start(const char *filename); - void start(const sp<Source> &source); - void start(sp<DataSource> &source); - - sp<AMessage> getFormat(bool audio, bool synchronous = false); - status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit, bool synchronous = false); - status_t seekTo(bool audio, int64_t timeUs); - bool isSeekable() const; - - virtual void onMessageReceived(const sp<AMessage> &msg); - -protected: - virtual ~FragmentedMP4Parser(); - -private: - enum { - kWhatStart, - kWhatProceed, - kWhatReadMore, - kWhatGetFormat, - kWhatDequeueAccessUnit, - kWhatSeekTo, - }; - - struct TrackFragment; - struct DynamicTrackFragment; - struct StaticTrackFragment; - - struct DispatchEntry { - uint32_t mType; - uint32_t mParentType; - status_t (FragmentedMP4Parser::*mHandler)(uint32_t, size_t, uint64_t); - }; - - struct Container { - uint64_t mOffset; - uint64_t mBytesRemaining; - uint32_t mType; - bool mExtendsToEOF; - }; - - struct SampleDescription { - uint32_t mType; - uint16_t mDataRefIndex; - - sp<AMessage> mFormat; - }; - - struct SampleInfo { - off64_t mOffset; - size_t mSize; - uint32_t mPresentationTime; - size_t mSampleDescIndex; - uint32_t mFlags; - }; - - struct MediaDataInfo { - sp<ABuffer> mBuffer; - off64_t mOffset; - }; - - struct SidxEntry { - size_t mSize; - uint32_t mDurationUs; - }; - - struct TrackInfo { - enum Flags { - kTrackEnabled = 0x01, - kTrackInMovie = 0x02, - kTrackInPreview = 0x04, - }; - - uint32_t mTrackID; - uint32_t mFlags; - uint32_t mDuration; // This is the duration in terms of movie timescale! - uint64_t mSidxDuration; // usec, from sidx box, which can use a different timescale - - uint32_t mMediaTimeScale; - - uint32_t mMediaHandlerType; - Vector<SampleDescription> mSampleDescs; - - // from track extends: - uint32_t mDefaultSampleDescriptionIndex; - uint32_t mDefaultSampleDuration; - uint32_t mDefaultSampleSize; - uint32_t mDefaultSampleFlags; - - uint32_t mDecodingTime; - - Vector<SidxEntry> mSidx; - sp<StaticTrackFragment> mStaticFragment; - List<sp<TrackFragment> > mFragments; - }; - - struct TrackFragmentHeaderInfo { - enum Flags { - kBaseDataOffsetPresent = 0x01, - kSampleDescriptionIndexPresent = 0x02, - kDefaultSampleDurationPresent = 0x08, - kDefaultSampleSizePresent = 0x10, - kDefaultSampleFlagsPresent = 0x20, - kDurationIsEmpty = 0x10000, - }; - - uint32_t mTrackID; - uint32_t mFlags; - uint64_t mBaseDataOffset; - uint32_t mSampleDescriptionIndex; - uint32_t mDefaultSampleDuration; - uint32_t mDefaultSampleSize; - uint32_t mDefaultSampleFlags; - - uint64_t mDataOffset; - }; - - static const DispatchEntry kDispatchTable[]; - - sp<Source> mSource; - off_t mBufferPos; - bool mSuspended; - bool mDoneWithMoov; - off_t mFirstMoofOffset; // used as the starting point for offsets calculated from the sidx box - sp<ABuffer> mBuffer; - Vector<Container> mStack; - KeyedVector<uint32_t, TrackInfo> mTracks; // TrackInfo by trackID - Vector<MediaDataInfo> mMediaData; - - uint32_t mCurrentTrackID; - - status_t mFinalResult; - - TrackFragmentHeaderInfo mTrackFragmentHeaderInfo; - - status_t onProceed(); - status_t onDequeueAccessUnit(size_t trackIndex, sp<ABuffer> *accessUnit); - status_t onSeekTo(bool wantAudio, int64_t position); - - void enter(off64_t offset, uint32_t type, uint64_t size); - - uint16_t readU16(size_t offset); - uint32_t readU32(size_t offset); - uint64_t readU64(size_t offset); - void skip(off_t distance); - status_t need(size_t size); - bool fitsContainer(uint64_t size) const; - - status_t parseTrackHeader( - uint32_t type, size_t offset, uint64_t size); - - status_t parseMediaHeader( - uint32_t type, size_t offset, uint64_t size); - - status_t parseMediaHandler( - uint32_t type, size_t offset, uint64_t size); - - status_t parseTrackExtends( - uint32_t type, size_t offset, uint64_t size); - - status_t parseTrackFragmentHeader( - uint32_t type, size_t offset, uint64_t size); - - status_t parseTrackFragmentRun( - uint32_t type, size_t offset, uint64_t size); - - status_t parseVisualSampleEntry( - uint32_t type, size_t offset, uint64_t size); - - status_t parseAudioSampleEntry( - uint32_t type, size_t offset, uint64_t size); - - status_t parseSampleSizes( - uint32_t type, size_t offset, uint64_t size); - - status_t parseCompactSampleSizes( - uint32_t type, size_t offset, uint64_t size); - - status_t parseSampleToChunk( - uint32_t type, size_t offset, uint64_t size); - - status_t parseChunkOffsets( - uint32_t type, size_t offset, uint64_t size); - - status_t parseChunkOffsets64( - uint32_t type, size_t offset, uint64_t size); - - status_t parseAVCCodecSpecificData( - uint32_t type, size_t offset, uint64_t size); - - status_t parseESDSCodecSpecificData( - uint32_t type, size_t offset, uint64_t size); - - status_t parseMediaData( - uint32_t type, size_t offset, uint64_t size); - - status_t parseSegmentIndex( - uint32_t type, size_t offset, uint64_t size); - - TrackInfo *editTrack(uint32_t trackID, bool createIfNecessary = false); - - ssize_t findTrack(bool wantAudio) const; - - status_t makeAccessUnit( - TrackInfo *info, - const SampleInfo &sample, - const MediaDataInfo &mdatInfo, - sp<ABuffer> *accessUnit); - - status_t getSample( - TrackInfo *info, - sp<TrackFragment> *fragment, - SampleInfo *sampleInfo); - - static int CompareSampleLocation( - const SampleInfo &sample, const MediaDataInfo &mdatInfo); - - void resumeIfNecessary(); - - void copyBuffer( - sp<ABuffer> *dst, - size_t offset, uint64_t size) const; - - DISALLOW_EVIL_CONSTRUCTORS(FragmentedMP4Parser); -}; - -} // namespace android - -#endif // PARSER_H_ - diff --git a/media/libstagefright/include/HTTPBase.h b/media/libstagefright/include/HTTPBase.h index d4b7f9f..1c3cd5e 100644 --- a/media/libstagefright/include/HTTPBase.h +++ b/media/libstagefright/include/HTTPBase.h @@ -48,14 +48,6 @@ struct HTTPBase : public DataSource { virtual status_t setBandwidthStatCollectFreq(int32_t freqMs); - static status_t UpdateProxyConfig( - const char *host, int32_t port, const char *exclusionList); - - void setUID(uid_t uid); - bool getUID(uid_t *uid) const; - - static sp<HTTPBase> Create(uint32_t flags = 0); - static void RegisterSocketUserTag(int sockfd, uid_t uid, uint32_t kTag); static void UnRegisterSocketUserTag(int sockfd); @@ -87,9 +79,6 @@ private: int32_t mPrevEstimatedBandWidthKbps; int32_t mBandWidthCollectFreqMs; - bool mUIDValid; - uid_t mUID; - DISALLOW_EVIL_CONSTRUCTORS(HTTPBase); }; diff --git a/media/libstagefright/include/SDPLoader.h b/media/libstagefright/include/SDPLoader.h index ca59dc0..2c4f543 100644 --- a/media/libstagefright/include/SDPLoader.h +++ b/media/libstagefright/include/SDPLoader.h @@ -25,6 +25,7 @@ namespace android { struct HTTPBase; +struct IMediaHTTPService; struct SDPLoader : public AHandler { enum Flags { @@ -34,7 +35,10 @@ struct SDPLoader : public AHandler { enum { kWhatSDPLoaded = 'sdpl' }; - SDPLoader(const sp<AMessage> ¬ify, uint32_t flags = 0, bool uidValid = false, uid_t uid = 0); + SDPLoader( + const sp<AMessage> ¬ify, + uint32_t flags, + const sp<IMediaHTTPService> &httpService); void load(const char* url, const KeyedVector<String8, String8> *headers); @@ -55,8 +59,6 @@ private: sp<AMessage> mNotify; const char* mUrl; uint32_t mFlags; - bool mUIDValid; - uid_t mUID; sp<ALooper> mNetLooper; bool mCancelled; diff --git a/media/libstagefright/include/StagefrightMetadataRetriever.h b/media/libstagefright/include/StagefrightMetadataRetriever.h index b02ed0e..6632c27 100644 --- a/media/libstagefright/include/StagefrightMetadataRetriever.h +++ b/media/libstagefright/include/StagefrightMetadataRetriever.h @@ -33,6 +33,7 @@ struct StagefrightMetadataRetriever : public MediaMetadataRetrieverInterface { virtual ~StagefrightMetadataRetriever(); virtual status_t setDataSource( + const sp<IMediaHTTPService> &httpService, const char *url, const KeyedVector<String8, String8> *headers); diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp index 0e4dd2b..d7bec59 100644 --- a/media/libstagefright/matroska/MatroskaExtractor.cpp +++ b/media/libstagefright/matroska/MatroskaExtractor.cpp @@ -315,7 +315,7 @@ void BlockIterator::seek( *actualFrameTimeUs = -1ll; - const int64_t seekTimeNs = seekTimeUs * 1000ll; + const int64_t seekTimeNs = seekTimeUs * 1000ll - mExtractor->mSeekPreRollNs; mkvparser::Segment* const pSegment = mExtractor->mSegment; @@ -630,7 +630,8 @@ MatroskaExtractor::MatroskaExtractor(const sp<DataSource> &source) mReader(new DataSourceReader(mDataSource)), mSegment(NULL), mExtractedThumbnails(false), - mIsWebm(false) { + mIsWebm(false), + mSeekPreRollNs(0) { off64_t size; mIsLiveStreaming = (mDataSource->flags() @@ -656,14 +657,22 @@ MatroskaExtractor::MatroskaExtractor(const sp<DataSource> &source) return; } + // from mkvparser::Segment::Load(), but stop at first cluster ret = mSegment->ParseHeaders(); - CHECK_EQ(ret, 0); - - long len; - ret = mSegment->LoadCluster(pos, len); - CHECK_EQ(ret, 0); + if (ret == 0) { + long len; + ret = mSegment->LoadCluster(pos, len); + if (ret >= 1) { + // no more clusters + ret = 0; + } + } else if (ret > 0) { + ret = mkvparser::E_BUFFER_NOT_FULL; + } if (ret < 0) { + ALOGW("Corrupt %s source: %s", mIsWebm ? "webm" : "matroska", + uriDebugString(mDataSource->getUri()).c_str()); delete mSegment; mSegment = NULL; return; @@ -921,6 +930,12 @@ void MatroskaExtractor::addTracks() { err = addVorbisCodecInfo( meta, codecPrivate, codecPrivateSize); + } else if (!strcmp("A_OPUS", codecID)) { + meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_OPUS); + meta->setData(kKeyOpusHeader, 0, codecPrivate, codecPrivateSize); + meta->setInt64(kKeyOpusCodecDelay, track->GetCodecDelay()); + meta->setInt64(kKeyOpusSeekPreRoll, track->GetSeekPreRoll()); + mSeekPreRollNs = track->GetSeekPreRoll(); } else if (!strcmp("A_MPEG/L3", codecID)) { meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG); } else { diff --git a/media/libstagefright/matroska/MatroskaExtractor.h b/media/libstagefright/matroska/MatroskaExtractor.h index 1294b4f..cf200f3 100644 --- a/media/libstagefright/matroska/MatroskaExtractor.h +++ b/media/libstagefright/matroska/MatroskaExtractor.h @@ -69,6 +69,7 @@ private: bool mExtractedThumbnails; bool mIsLiveStreaming; bool mIsWebm; + int64_t mSeekPreRollNs; void addTracks(); void findThumbnails(); diff --git a/media/libstagefright/mp4/FragmentedMP4Parser.cpp b/media/libstagefright/mp4/FragmentedMP4Parser.cpp deleted file mode 100644 index 0102656..0000000 --- a/media/libstagefright/mp4/FragmentedMP4Parser.cpp +++ /dev/null @@ -1,1993 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "FragmentedMP4Parser" -#include <utils/Log.h> - -#include "include/avc_utils.h" -#include "include/ESDS.h" -#include "include/FragmentedMP4Parser.h" -#include "TrackFragment.h" - - -#include <media/stagefright/foundation/ABuffer.h> -#include <media/stagefright/foundation/ADebug.h> -#include <media/stagefright/foundation/AMessage.h> -#include <media/stagefright/foundation/hexdump.h> -#include <media/stagefright/MediaDefs.h> -#include <media/stagefright/MediaErrors.h> -#include <media/stagefright/Utils.h> - - -namespace android { - -static const char *Fourcc2String(uint32_t fourcc) { - static char buffer[5]; - buffer[4] = '\0'; - buffer[0] = fourcc >> 24; - buffer[1] = (fourcc >> 16) & 0xff; - buffer[2] = (fourcc >> 8) & 0xff; - buffer[3] = fourcc & 0xff; - - return buffer; -} - -static const char *IndentString(size_t n) { - static const char kSpace[] = " "; - return kSpace + sizeof(kSpace) - 2 * n - 1; -} - -// static -const FragmentedMP4Parser::DispatchEntry FragmentedMP4Parser::kDispatchTable[] = { - { FOURCC('m', 'o', 'o', 'v'), 0, NULL }, - { FOURCC('t', 'r', 'a', 'k'), FOURCC('m', 'o', 'o', 'v'), NULL }, - { FOURCC('u', 'd', 't', 'a'), FOURCC('t', 'r', 'a', 'k'), NULL }, - { FOURCC('u', 'd', 't', 'a'), FOURCC('m', 'o', 'o', 'v'), NULL }, - { FOURCC('m', 'e', 't', 'a'), FOURCC('u', 'd', 't', 'a'), NULL }, - { FOURCC('i', 'l', 's', 't'), FOURCC('m', 'e', 't', 'a'), NULL }, - - { FOURCC('t', 'k', 'h', 'd'), FOURCC('t', 'r', 'a', 'k'), - &FragmentedMP4Parser::parseTrackHeader - }, - - { FOURCC('m', 'v', 'e', 'x'), FOURCC('m', 'o', 'o', 'v'), NULL }, - - { FOURCC('t', 'r', 'e', 'x'), FOURCC('m', 'v', 'e', 'x'), - &FragmentedMP4Parser::parseTrackExtends - }, - - { FOURCC('e', 'd', 't', 's'), FOURCC('t', 'r', 'a', 'k'), NULL }, - { FOURCC('m', 'd', 'i', 'a'), FOURCC('t', 'r', 'a', 'k'), NULL }, - - { FOURCC('m', 'd', 'h', 'd'), FOURCC('m', 'd', 'i', 'a'), - &FragmentedMP4Parser::parseMediaHeader - }, - - { FOURCC('h', 'd', 'l', 'r'), FOURCC('m', 'd', 'i', 'a'), - &FragmentedMP4Parser::parseMediaHandler - }, - - { FOURCC('m', 'i', 'n', 'f'), FOURCC('m', 'd', 'i', 'a'), NULL }, - { FOURCC('d', 'i', 'n', 'f'), FOURCC('m', 'i', 'n', 'f'), NULL }, - { FOURCC('s', 't', 'b', 'l'), FOURCC('m', 'i', 'n', 'f'), NULL }, - { FOURCC('s', 't', 's', 'd'), FOURCC('s', 't', 'b', 'l'), NULL }, - - { FOURCC('s', 't', 's', 'z'), FOURCC('s', 't', 'b', 'l'), - &FragmentedMP4Parser::parseSampleSizes }, - - { FOURCC('s', 't', 'z', '2'), FOURCC('s', 't', 'b', 'l'), - &FragmentedMP4Parser::parseCompactSampleSizes }, - - { FOURCC('s', 't', 's', 'c'), FOURCC('s', 't', 'b', 'l'), - &FragmentedMP4Parser::parseSampleToChunk }, - - { FOURCC('s', 't', 'c', 'o'), FOURCC('s', 't', 'b', 'l'), - &FragmentedMP4Parser::parseChunkOffsets }, - - { FOURCC('c', 'o', '6', '4'), FOURCC('s', 't', 'b', 'l'), - &FragmentedMP4Parser::parseChunkOffsets64 }, - - { FOURCC('a', 'v', 'c', 'C'), FOURCC('a', 'v', 'c', '1'), - &FragmentedMP4Parser::parseAVCCodecSpecificData }, - - { FOURCC('e', 's', 'd', 's'), FOURCC('m', 'p', '4', 'a'), - &FragmentedMP4Parser::parseESDSCodecSpecificData }, - - { FOURCC('e', 's', 'd', 's'), FOURCC('m', 'p', '4', 'v'), - &FragmentedMP4Parser::parseESDSCodecSpecificData }, - - { FOURCC('m', 'd', 'a', 't'), 0, &FragmentedMP4Parser::parseMediaData }, - - { FOURCC('m', 'o', 'o', 'f'), 0, NULL }, - { FOURCC('t', 'r', 'a', 'f'), FOURCC('m', 'o', 'o', 'f'), NULL }, - - { FOURCC('t', 'f', 'h', 'd'), FOURCC('t', 'r', 'a', 'f'), - &FragmentedMP4Parser::parseTrackFragmentHeader - }, - { FOURCC('t', 'r', 'u', 'n'), FOURCC('t', 'r', 'a', 'f'), - &FragmentedMP4Parser::parseTrackFragmentRun - }, - - { FOURCC('m', 'f', 'r', 'a'), 0, NULL }, - - { FOURCC('s', 'i', 'd', 'x'), 0, &FragmentedMP4Parser::parseSegmentIndex }, -}; - -struct FileSource : public FragmentedMP4Parser::Source { - FileSource(const char *filename) - : mFile(fopen(filename, "rb")) { - CHECK(mFile != NULL); - } - - virtual ~FileSource() { - fclose(mFile); - } - - virtual ssize_t readAt(off64_t offset, void *data, size_t size) { - fseek(mFile, offset, SEEK_SET); - return fread(data, 1, size, mFile); - } - - virtual bool isSeekable() { - return true; - } - - private: - FILE *mFile; - - DISALLOW_EVIL_CONSTRUCTORS(FileSource); -}; - -struct ReadTracker : public RefBase { - ReadTracker(off64_t size) { - allocSize = 1 + size / 8192; // 1 bit per kilobyte - bitmap = (char*) calloc(1, allocSize); - } - virtual ~ReadTracker() { - dumpToLog(); - free(bitmap); - } - void mark(off64_t offset, size_t size) { - int firstbit = offset / 1024; - int lastbit = (offset + size - 1) / 1024; - for (int i = firstbit; i <= lastbit; i++) { - bitmap[i/8] |= (0x80 >> (i & 7)); - } - } - - private: - void dumpToLog() { - // 96 chars per line, each char represents one kilobyte, 1 kb per bit - int numlines = allocSize / 12; - char buf[97]; - char *cur = bitmap; - for (int i = 0; i < numlines; i++ && cur) { - for (int j = 0; j < 12; j++) { - for (int k = 0; k < 8; k++) { - buf[(j * 8) + k] = (*cur & (0x80 >> k)) ? 'X' : '.'; - } - cur++; - } - buf[96] = '\0'; - ALOGI("%5dk: %s", i * 96, buf); - } - } - - size_t allocSize; - char *bitmap; -}; - -struct DataSourceSource : public FragmentedMP4Parser::Source { - DataSourceSource(sp<DataSource> &source) - : mDataSource(source) { - CHECK(mDataSource != NULL); -#if 0 - off64_t size; - if (source->getSize(&size) == OK) { - mReadTracker = new ReadTracker(size); - } else { - ALOGE("couldn't get data source size"); - } -#endif - } - - virtual ssize_t readAt(off64_t offset, void *data, size_t size) { - if (mReadTracker != NULL) { - mReadTracker->mark(offset, size); - } - return mDataSource->readAt(offset, data, size); - } - - virtual bool isSeekable() { - return true; - } - - private: - sp<DataSource> mDataSource; - sp<ReadTracker> mReadTracker; - - DISALLOW_EVIL_CONSTRUCTORS(DataSourceSource); -}; - -FragmentedMP4Parser::FragmentedMP4Parser() - : mBufferPos(0), - mSuspended(false), - mDoneWithMoov(false), - mFirstMoofOffset(0), - mFinalResult(OK) { -} - -FragmentedMP4Parser::~FragmentedMP4Parser() { -} - -void FragmentedMP4Parser::start(const char *filename) { - sp<AMessage> msg = new AMessage(kWhatStart, id()); - msg->setObject("source", new FileSource(filename)); - msg->post(); - ALOGV("Parser::start(%s)", filename); -} - -void FragmentedMP4Parser::start(const sp<Source> &source) { - sp<AMessage> msg = new AMessage(kWhatStart, id()); - msg->setObject("source", source); - msg->post(); - ALOGV("Parser::start(Source)"); -} - -void FragmentedMP4Parser::start(sp<DataSource> &source) { - sp<AMessage> msg = new AMessage(kWhatStart, id()); - msg->setObject("source", new DataSourceSource(source)); - msg->post(); - ALOGV("Parser::start(DataSource)"); -} - -sp<AMessage> FragmentedMP4Parser::getFormat(bool audio, bool synchronous) { - - while (true) { - bool moovDone = mDoneWithMoov; - sp<AMessage> msg = new AMessage(kWhatGetFormat, id()); - msg->setInt32("audio", audio); - - sp<AMessage> response; - status_t err = msg->postAndAwaitResponse(&response); - - if (err != OK) { - ALOGV("getFormat post failed: %d", err); - return NULL; - } - - if (response->findInt32("err", &err) && err != OK) { - if (synchronous && err == -EWOULDBLOCK && !moovDone) { - resumeIfNecessary(); - ALOGV("@getFormat parser not ready yet, retrying"); - usleep(10000); - continue; - } - ALOGV("getFormat failed: %d", err); - return NULL; - } - - sp<AMessage> format; - CHECK(response->findMessage("format", &format)); - - ALOGV("returning format %s", format->debugString().c_str()); - return format; - } -} - -status_t FragmentedMP4Parser::seekTo(bool wantAudio, int64_t timeUs) { - sp<AMessage> msg = new AMessage(kWhatSeekTo, id()); - msg->setInt32("audio", wantAudio); - msg->setInt64("position", timeUs); - - sp<AMessage> response; - status_t err = msg->postAndAwaitResponse(&response); - return err; -} - -bool FragmentedMP4Parser::isSeekable() const { - while (mFirstMoofOffset == 0 && mFinalResult == OK) { - usleep(10000); - } - bool seekable = mSource->isSeekable(); - for (size_t i = 0; seekable && i < mTracks.size(); i++) { - const TrackInfo *info = &mTracks.valueAt(i); - seekable &= !info->mSidx.empty(); - } - return seekable; -} - -status_t FragmentedMP4Parser::onSeekTo(bool wantAudio, int64_t position) { - status_t err = -EINVAL; - ssize_t trackIndex = findTrack(wantAudio); - if (trackIndex < 0) { - err = trackIndex; - } else { - TrackInfo *info = &mTracks.editValueAt(trackIndex); - - int numSidxEntries = info->mSidx.size(); - int64_t totalTime = 0; - off_t totalOffset = mFirstMoofOffset; - for (int i = 0; i < numSidxEntries; i++) { - const SidxEntry *se = &info->mSidx[i]; - if (totalTime + se->mDurationUs > position) { - mBuffer->setRange(0,0); - mBufferPos = totalOffset; - if (mFinalResult == ERROR_END_OF_STREAM) { - mFinalResult = OK; - mSuspended = true; // force resume - resumeIfNecessary(); - } - info->mFragments.clear(); - info->mDecodingTime = totalTime * info->mMediaTimeScale / 1000000ll; - return OK; - } - totalTime += se->mDurationUs; - totalOffset += se->mSize; - } - } - ALOGV("seekTo out of range"); - return err; -} - -status_t FragmentedMP4Parser::dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit, - bool synchronous) { - - while (true) { - sp<AMessage> msg = new AMessage(kWhatDequeueAccessUnit, id()); - msg->setInt32("audio", audio); - - sp<AMessage> response; - status_t err = msg->postAndAwaitResponse(&response); - - if (err != OK) { - ALOGV("dequeue fail 1: %d", err); - return err; - } - - if (response->findInt32("err", &err) && err != OK) { - if (synchronous && err == -EWOULDBLOCK) { - resumeIfNecessary(); - ALOGV("Parser not ready yet, retrying"); - usleep(10000); - continue; - } - ALOGV("dequeue fail 2: %d, %d", err, synchronous); - return err; - } - - CHECK(response->findBuffer("accessUnit", accessUnit)); - - return OK; - } -} - -ssize_t FragmentedMP4Parser::findTrack(bool wantAudio) const { - for (size_t i = 0; i < mTracks.size(); ++i) { - const TrackInfo *info = &mTracks.valueAt(i); - - bool isAudio = - info->mMediaHandlerType == FOURCC('s', 'o', 'u', 'n'); - - bool isVideo = - info->mMediaHandlerType == FOURCC('v', 'i', 'd', 'e'); - - if ((wantAudio && isAudio) || (!wantAudio && !isAudio)) { - if (info->mSampleDescs.empty()) { - break; - } - - return i; - } - } - - return -EWOULDBLOCK; -} - -void FragmentedMP4Parser::onMessageReceived(const sp<AMessage> &msg) { - switch (msg->what()) { - case kWhatStart: - { - sp<RefBase> obj; - CHECK(msg->findObject("source", &obj)); - - mSource = static_cast<Source *>(obj.get()); - - mBuffer = new ABuffer(512 * 1024); - mBuffer->setRange(0, 0); - - enter(0ll, 0, 0); - - (new AMessage(kWhatProceed, id()))->post(); - break; - } - - case kWhatProceed: - { - CHECK(!mSuspended); - - status_t err = onProceed(); - - if (err == OK) { - if (!mSuspended) { - msg->post(); - } - } else if (err != -EAGAIN) { - ALOGE("onProceed returned error %d", err); - } - - break; - } - - case kWhatReadMore: - { - size_t needed; - CHECK(msg->findSize("needed", &needed)); - - memmove(mBuffer->base(), mBuffer->data(), mBuffer->size()); - mBufferPos += mBuffer->offset(); - mBuffer->setRange(0, mBuffer->size()); - - size_t maxBytesToRead = mBuffer->capacity() - mBuffer->size(); - - if (maxBytesToRead < needed) { - ALOGV("resizing buffer."); - - sp<ABuffer> newBuffer = - new ABuffer((mBuffer->size() + needed + 1023) & ~1023); - memcpy(newBuffer->data(), mBuffer->data(), mBuffer->size()); - newBuffer->setRange(0, mBuffer->size()); - - mBuffer = newBuffer; - maxBytesToRead = mBuffer->capacity() - mBuffer->size(); - } - - CHECK_GE(maxBytesToRead, needed); - - ssize_t n = mSource->readAt( - mBufferPos + mBuffer->size(), - mBuffer->data() + mBuffer->size(), needed); - - if (n < (ssize_t)needed) { - ALOGV("Reached EOF when reading %d @ %d + %d", needed, mBufferPos, mBuffer->size()); - if (n < 0) { - mFinalResult = n; - } else if (n == 0) { - mFinalResult = ERROR_END_OF_STREAM; - } else { - mFinalResult = ERROR_IO; - } - } else { - mBuffer->setRange(0, mBuffer->size() + n); - (new AMessage(kWhatProceed, id()))->post(); - } - - break; - } - - case kWhatGetFormat: - { - int32_t wantAudio; - CHECK(msg->findInt32("audio", &wantAudio)); - - status_t err = -EWOULDBLOCK; - sp<AMessage> response = new AMessage; - - ssize_t trackIndex = findTrack(wantAudio); - - if (trackIndex < 0) { - err = trackIndex; - } else { - TrackInfo *info = &mTracks.editValueAt(trackIndex); - - sp<AMessage> format = info->mSampleDescs.itemAt(0).mFormat; - if (info->mSidxDuration) { - format->setInt64("durationUs", info->mSidxDuration); - } else { - // this is probably going to be zero. Oh well... - format->setInt64("durationUs", - 1000000ll * info->mDuration / info->mMediaTimeScale); - } - response->setMessage( - "format", format); - - err = OK; - } - - response->setInt32("err", err); - - uint32_t replyID; - CHECK(msg->senderAwaitsResponse(&replyID)); - - response->postReply(replyID); - break; - } - - case kWhatDequeueAccessUnit: - { - int32_t wantAudio; - CHECK(msg->findInt32("audio", &wantAudio)); - - status_t err = -EWOULDBLOCK; - sp<AMessage> response = new AMessage; - - ssize_t trackIndex = findTrack(wantAudio); - - if (trackIndex < 0) { - err = trackIndex; - } else { - sp<ABuffer> accessUnit; - err = onDequeueAccessUnit(trackIndex, &accessUnit); - - if (err == OK) { - response->setBuffer("accessUnit", accessUnit); - } - } - - response->setInt32("err", err); - - uint32_t replyID; - CHECK(msg->senderAwaitsResponse(&replyID)); - - response->postReply(replyID); - break; - } - - case kWhatSeekTo: - { - ALOGV("kWhatSeekTo"); - int32_t wantAudio; - CHECK(msg->findInt32("audio", &wantAudio)); - int64_t position; - CHECK(msg->findInt64("position", &position)); - - status_t err = -EWOULDBLOCK; - sp<AMessage> response = new AMessage; - - ssize_t trackIndex = findTrack(wantAudio); - - if (trackIndex < 0) { - err = trackIndex; - } else { - err = onSeekTo(wantAudio, position); - } - response->setInt32("err", err); - uint32_t replyID; - CHECK(msg->senderAwaitsResponse(&replyID)); - response->postReply(replyID); - break; - } - default: - TRESPASS(); - } -} - -status_t FragmentedMP4Parser::onProceed() { - status_t err; - - if ((err = need(8)) != OK) { - return err; - } - - uint64_t size = readU32(0); - uint32_t type = readU32(4); - - size_t offset = 8; - - if (size == 1) { - if ((err = need(16)) != OK) { - return err; - } - - size = readU64(offset); - offset += 8; - } - - uint8_t userType[16]; - - if (type == FOURCC('u', 'u', 'i', 'd')) { - if ((err = need(offset + 16)) != OK) { - return err; - } - - memcpy(userType, mBuffer->data() + offset, 16); - offset += 16; - } - - CHECK(!mStack.isEmpty()); - uint32_t ptype = mStack.itemAt(mStack.size() - 1).mType; - - static const size_t kNumDispatchers = - sizeof(kDispatchTable) / sizeof(kDispatchTable[0]); - - size_t i; - for (i = 0; i < kNumDispatchers; ++i) { - if (kDispatchTable[i].mType == type - && kDispatchTable[i].mParentType == ptype) { - break; - } - } - - // SampleEntry boxes are container boxes that start with a variable - // amount of data depending on the media handler type. - // We don't look inside 'hint' type SampleEntry boxes. - - bool isSampleEntryBox = - (ptype == FOURCC('s', 't', 's', 'd')) - && editTrack(mCurrentTrackID)->mMediaHandlerType - != FOURCC('h', 'i', 'n', 't'); - - if ((i < kNumDispatchers && kDispatchTable[i].mHandler == 0) - || isSampleEntryBox || ptype == FOURCC('i', 'l', 's', 't')) { - // This is a container box. - if (type == FOURCC('m', 'o', 'o', 'f')) { - if (mFirstMoofOffset == 0) { - ALOGV("first moof @ %08x", mBufferPos + offset); - mFirstMoofOffset = mBufferPos + offset - 8; // point at the size - } - } - if (type == FOURCC('m', 'e', 't', 'a')) { - if ((err = need(offset + 4)) < OK) { - return err; - } - - if (readU32(offset) != 0) { - return -EINVAL; - } - - offset += 4; - } else if (type == FOURCC('s', 't', 's', 'd')) { - if ((err = need(offset + 8)) < OK) { - return err; - } - - if (readU32(offset) != 0) { - return -EINVAL; - } - - if (readU32(offset + 4) == 0) { - // We need at least some entries. - return -EINVAL; - } - - offset += 8; - } else if (isSampleEntryBox) { - size_t headerSize; - - switch (editTrack(mCurrentTrackID)->mMediaHandlerType) { - case FOURCC('v', 'i', 'd', 'e'): - { - // 8 bytes SampleEntry + 70 bytes VisualSampleEntry - headerSize = 78; - break; - } - - case FOURCC('s', 'o', 'u', 'n'): - { - // 8 bytes SampleEntry + 20 bytes AudioSampleEntry - headerSize = 28; - break; - } - - case FOURCC('m', 'e', 't', 'a'): - { - headerSize = 8; // 8 bytes SampleEntry - break; - } - - default: - TRESPASS(); - } - - if (offset + headerSize > size) { - return -EINVAL; - } - - if ((err = need(offset + headerSize)) != OK) { - return err; - } - - switch (editTrack(mCurrentTrackID)->mMediaHandlerType) { - case FOURCC('v', 'i', 'd', 'e'): - { - err = parseVisualSampleEntry( - type, offset, offset + headerSize); - break; - } - - case FOURCC('s', 'o', 'u', 'n'): - { - err = parseAudioSampleEntry( - type, offset, offset + headerSize); - break; - } - - case FOURCC('m', 'e', 't', 'a'): - { - err = OK; - break; - } - - default: - TRESPASS(); - } - - if (err != OK) { - return err; - } - - offset += headerSize; - } - - skip(offset); - - ALOGV("%sentering box of type '%s'", - IndentString(mStack.size()), Fourcc2String(type)); - - enter(mBufferPos - offset, type, size - offset); - } else { - if (!fitsContainer(size)) { - return -EINVAL; - } - - if (i < kNumDispatchers && kDispatchTable[i].mHandler != 0) { - // We have a handler for this box type. - - if ((err = need(size)) != OK) { - return err; - } - - ALOGV("%sparsing box of type '%s'", - IndentString(mStack.size()), Fourcc2String(type)); - - if ((err = (this->*kDispatchTable[i].mHandler)( - type, offset, size)) != OK) { - return err; - } - } else { - // Unknown box type - - ALOGV("%sskipping box of type '%s', size %llu", - IndentString(mStack.size()), - Fourcc2String(type), size); - - } - - skip(size); - } - - return OK; -} - -// static -int FragmentedMP4Parser::CompareSampleLocation( - const SampleInfo &sample, const MediaDataInfo &mdatInfo) { - if (sample.mOffset + sample.mSize < mdatInfo.mOffset) { - return -1; - } - - if (sample.mOffset >= mdatInfo.mOffset + mdatInfo.mBuffer->size()) { - return 1; - } - - // Otherwise make sure the sample is completely contained within this - // media data block. - - CHECK_GE(sample.mOffset, mdatInfo.mOffset); - - CHECK_LE(sample.mOffset + sample.mSize, - mdatInfo.mOffset + mdatInfo.mBuffer->size()); - - return 0; -} - -void FragmentedMP4Parser::resumeIfNecessary() { - if (!mSuspended) { - return; - } - - ALOGV("resuming."); - - mSuspended = false; - (new AMessage(kWhatProceed, id()))->post(); -} - -status_t FragmentedMP4Parser::getSample( - TrackInfo *info, sp<TrackFragment> *fragment, SampleInfo *sampleInfo) { - for (;;) { - if (info->mFragments.empty()) { - if (mFinalResult != OK) { - return mFinalResult; - } - - resumeIfNecessary(); - return -EWOULDBLOCK; - } - - *fragment = *info->mFragments.begin(); - - status_t err = (*fragment)->getSample(sampleInfo); - - if (err == OK) { - return OK; - } else if (err != ERROR_END_OF_STREAM) { - return err; - } - - // Really, end of this fragment... - - info->mFragments.erase(info->mFragments.begin()); - } -} - -status_t FragmentedMP4Parser::onDequeueAccessUnit( - size_t trackIndex, sp<ABuffer> *accessUnit) { - TrackInfo *info = &mTracks.editValueAt(trackIndex); - - sp<TrackFragment> fragment; - SampleInfo sampleInfo; - status_t err = getSample(info, &fragment, &sampleInfo); - - if (err == -EWOULDBLOCK) { - resumeIfNecessary(); - return err; - } else if (err != OK) { - return err; - } - - err = -EWOULDBLOCK; - - bool checkDroppable = false; - - for (size_t i = 0; i < mMediaData.size(); ++i) { - const MediaDataInfo &mdatInfo = mMediaData.itemAt(i); - - int cmp = CompareSampleLocation(sampleInfo, mdatInfo); - - if (cmp < 0 && !mSource->isSeekable()) { - return -EPIPE; - } else if (cmp == 0) { - if (i > 0) { - checkDroppable = true; - } - - err = makeAccessUnit(info, sampleInfo, mdatInfo, accessUnit); - break; - } - } - - if (err != OK) { - return err; - } - - fragment->advance(); - - if (!mMediaData.empty() && checkDroppable) { - size_t numDroppable = 0; - bool done = false; - - // XXX FIXME: if one of the tracks is not advanced (e.g. if you play an audio+video - // file with sf2), then mMediaData will not be pruned and keeps growing - for (size_t i = 0; !done && i < mMediaData.size(); ++i) { - const MediaDataInfo &mdatInfo = mMediaData.itemAt(i); - - for (size_t j = 0; j < mTracks.size(); ++j) { - TrackInfo *info = &mTracks.editValueAt(j); - - sp<TrackFragment> fragment; - SampleInfo sampleInfo; - err = getSample(info, &fragment, &sampleInfo); - - if (err != OK) { - done = true; - break; - } - - int cmp = CompareSampleLocation(sampleInfo, mdatInfo); - - if (cmp <= 0) { - done = true; - break; - } - } - - if (!done) { - ++numDroppable; - } - } - - if (numDroppable > 0) { - mMediaData.removeItemsAt(0, numDroppable); - - if (mMediaData.size() < 5) { - resumeIfNecessary(); - } - } - } - - return err; -} - -static size_t parseNALSize(size_t nalLengthSize, const uint8_t *data) { - switch (nalLengthSize) { - case 1: - return *data; - case 2: - return U16_AT(data); - case 3: - return ((size_t)data[0] << 16) | U16_AT(&data[1]); - case 4: - return U32_AT(data); - } - - // This cannot happen, mNALLengthSize springs to life by adding 1 to - // a 2-bit integer. - TRESPASS(); - - return 0; -} - -status_t FragmentedMP4Parser::makeAccessUnit( - TrackInfo *info, - const SampleInfo &sample, - const MediaDataInfo &mdatInfo, - sp<ABuffer> *accessUnit) { - if (sample.mSampleDescIndex < 1 - || sample.mSampleDescIndex > info->mSampleDescs.size()) { - return ERROR_MALFORMED; - } - - int64_t presentationTimeUs = - 1000000ll * sample.mPresentationTime / info->mMediaTimeScale; - - const SampleDescription &sampleDesc = - info->mSampleDescs.itemAt(sample.mSampleDescIndex - 1); - - size_t nalLengthSize; - if (!sampleDesc.mFormat->findSize("nal-length-size", &nalLengthSize)) { - *accessUnit = new ABuffer(sample.mSize); - - memcpy((*accessUnit)->data(), - mdatInfo.mBuffer->data() + (sample.mOffset - mdatInfo.mOffset), - sample.mSize); - - (*accessUnit)->meta()->setInt64("timeUs", presentationTimeUs); - if (IsIDR(*accessUnit)) { - (*accessUnit)->meta()->setInt32("is-sync-frame", 1); - } - - return OK; - } - - const uint8_t *srcPtr = - mdatInfo.mBuffer->data() + (sample.mOffset - mdatInfo.mOffset); - - for (int i = 0; i < 2 ; ++i) { - size_t srcOffset = 0; - size_t dstOffset = 0; - - while (srcOffset < sample.mSize) { - if (srcOffset + nalLengthSize > sample.mSize) { - return ERROR_MALFORMED; - } - - size_t nalSize = parseNALSize(nalLengthSize, &srcPtr[srcOffset]); - srcOffset += nalLengthSize; - - if (srcOffset + nalSize > sample.mSize) { - return ERROR_MALFORMED; - } - - if (i == 1) { - memcpy((*accessUnit)->data() + dstOffset, - "\x00\x00\x00\x01", - 4); - - memcpy((*accessUnit)->data() + dstOffset + 4, - srcPtr + srcOffset, - nalSize); - } - - srcOffset += nalSize; - dstOffset += nalSize + 4; - } - - if (i == 0) { - (*accessUnit) = new ABuffer(dstOffset); - (*accessUnit)->meta()->setInt64( - "timeUs", presentationTimeUs); - } - } - if (IsIDR(*accessUnit)) { - (*accessUnit)->meta()->setInt32("is-sync-frame", 1); - } - - return OK; -} - -status_t FragmentedMP4Parser::need(size_t size) { - if (!fitsContainer(size)) { - return -EINVAL; - } - - if (size <= mBuffer->size()) { - return OK; - } - - sp<AMessage> msg = new AMessage(kWhatReadMore, id()); - msg->setSize("needed", size - mBuffer->size()); - msg->post(); - - // ALOGV("need(%d) returning -EAGAIN, only have %d", size, mBuffer->size()); - - return -EAGAIN; -} - -void FragmentedMP4Parser::enter(off64_t offset, uint32_t type, uint64_t size) { - Container container; - container.mOffset = offset; - container.mType = type; - container.mExtendsToEOF = (size == 0); - container.mBytesRemaining = size; - - mStack.push(container); -} - -bool FragmentedMP4Parser::fitsContainer(uint64_t size) const { - CHECK(!mStack.isEmpty()); - const Container &container = mStack.itemAt(mStack.size() - 1); - - return container.mExtendsToEOF || size <= container.mBytesRemaining; -} - -uint16_t FragmentedMP4Parser::readU16(size_t offset) { - CHECK_LE(offset + 2, mBuffer->size()); - - const uint8_t *ptr = mBuffer->data() + offset; - return (ptr[0] << 8) | ptr[1]; -} - -uint32_t FragmentedMP4Parser::readU32(size_t offset) { - CHECK_LE(offset + 4, mBuffer->size()); - - const uint8_t *ptr = mBuffer->data() + offset; - return (ptr[0] << 24) | (ptr[1] << 16) | (ptr[2] << 8) | ptr[3]; -} - -uint64_t FragmentedMP4Parser::readU64(size_t offset) { - return (((uint64_t)readU32(offset)) << 32) | readU32(offset + 4); -} - -void FragmentedMP4Parser::skip(off_t distance) { - CHECK(!mStack.isEmpty()); - for (size_t i = mStack.size(); i-- > 0;) { - Container *container = &mStack.editItemAt(i); - if (!container->mExtendsToEOF) { - CHECK_LE(distance, (off_t)container->mBytesRemaining); - - container->mBytesRemaining -= distance; - - if (container->mBytesRemaining == 0) { - ALOGV("%sleaving box of type '%s'", - IndentString(mStack.size() - 1), - Fourcc2String(container->mType)); - -#if 0 - if (container->mType == FOURCC('s', 't', 's', 'd')) { - TrackInfo *trackInfo = editTrack(mCurrentTrackID); - for (size_t i = 0; - i < trackInfo->mSampleDescs.size(); ++i) { - ALOGI("format #%d: %s", - i, - trackInfo->mSampleDescs.itemAt(i) - .mFormat->debugString().c_str()); - } - } -#endif - - if (container->mType == FOURCC('s', 't', 'b', 'l')) { - TrackInfo *trackInfo = editTrack(mCurrentTrackID); - - trackInfo->mStaticFragment->signalCompletion(); - - CHECK(trackInfo->mFragments.empty()); - trackInfo->mFragments.push_back(trackInfo->mStaticFragment); - trackInfo->mStaticFragment.clear(); - } else if (container->mType == FOURCC('t', 'r', 'a', 'f')) { - TrackInfo *trackInfo = - editTrack(mTrackFragmentHeaderInfo.mTrackID); - - const sp<TrackFragment> &fragment = - *--trackInfo->mFragments.end(); - - static_cast<DynamicTrackFragment *>( - fragment.get())->signalCompletion(); - } else if (container->mType == FOURCC('m', 'o', 'o', 'v')) { - mDoneWithMoov = true; - } - - container = NULL; - mStack.removeItemsAt(i); - } - } - } - - if (distance < (off_t)mBuffer->size()) { - mBuffer->setRange(mBuffer->offset() + distance, mBuffer->size() - distance); - mBufferPos += distance; - return; - } - - mBuffer->setRange(0, 0); - mBufferPos += distance; -} - -status_t FragmentedMP4Parser::parseTrackHeader( - uint32_t type, size_t offset, uint64_t size) { - if (offset + 4 > size) { - return -EINVAL; - } - - uint32_t flags = readU32(offset); - - uint32_t version = flags >> 24; - flags &= 0xffffff; - - uint32_t trackID; - uint64_t duration; - - if (version == 1) { - if (offset + 36 > size) { - return -EINVAL; - } - - trackID = readU32(offset + 20); - duration = readU64(offset + 28); - - offset += 36; - } else if (version == 0) { - if (offset + 24 > size) { - return -EINVAL; - } - - trackID = readU32(offset + 12); - duration = readU32(offset + 20); - - offset += 24; - } else { - return -EINVAL; - } - - TrackInfo *info = editTrack(trackID, true /* createIfNecessary */); - info->mFlags = flags; - info->mDuration = duration; - if (info->mDuration == 0xffffffff) { - // ffmpeg sets this to -1, which is incorrect. - info->mDuration = 0; - } - - info->mStaticFragment = new StaticTrackFragment; - - mCurrentTrackID = trackID; - - return OK; -} - -status_t FragmentedMP4Parser::parseMediaHeader( - uint32_t type, size_t offset, uint64_t size) { - if (offset + 4 > size) { - return -EINVAL; - } - - uint32_t versionAndFlags = readU32(offset); - - if (versionAndFlags & 0xffffff) { - return ERROR_MALFORMED; - } - - uint32_t version = versionAndFlags >> 24; - - TrackInfo *info = editTrack(mCurrentTrackID); - - if (version == 1) { - if (offset + 4 + 32 > size) { - return -EINVAL; - } - info->mMediaTimeScale = U32_AT(mBuffer->data() + offset + 20); - } else if (version == 0) { - if (offset + 4 + 20 > size) { - return -EINVAL; - } - info->mMediaTimeScale = U32_AT(mBuffer->data() + offset + 12); - } else { - return ERROR_MALFORMED; - } - - return OK; -} - -status_t FragmentedMP4Parser::parseMediaHandler( - uint32_t type, size_t offset, uint64_t size) { - if (offset + 12 > size) { - return -EINVAL; - } - - if (readU32(offset) != 0) { - return -EINVAL; - } - - uint32_t handlerType = readU32(offset + 8); - - switch (handlerType) { - case FOURCC('v', 'i', 'd', 'e'): - case FOURCC('s', 'o', 'u', 'n'): - case FOURCC('h', 'i', 'n', 't'): - case FOURCC('m', 'e', 't', 'a'): - break; - - default: - return -EINVAL; - } - - editTrack(mCurrentTrackID)->mMediaHandlerType = handlerType; - - return OK; -} - -status_t FragmentedMP4Parser::parseVisualSampleEntry( - uint32_t type, size_t offset, uint64_t size) { - if (offset + 78 > size) { - return -EINVAL; - } - - TrackInfo *trackInfo = editTrack(mCurrentTrackID); - - trackInfo->mSampleDescs.push(); - SampleDescription *sampleDesc = - &trackInfo->mSampleDescs.editItemAt( - trackInfo->mSampleDescs.size() - 1); - - sampleDesc->mType = type; - sampleDesc->mDataRefIndex = readU16(offset + 6); - - sp<AMessage> format = new AMessage; - - switch (type) { - case FOURCC('a', 'v', 'c', '1'): - format->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC); - break; - case FOURCC('m', 'p', '4', 'v'): - format->setString("mime", MEDIA_MIMETYPE_VIDEO_MPEG4); - break; - case FOURCC('s', '2', '6', '3'): - case FOURCC('h', '2', '6', '3'): - case FOURCC('H', '2', '6', '3'): - format->setString("mime", MEDIA_MIMETYPE_VIDEO_H263); - break; - default: - format->setString("mime", "application/octet-stream"); - break; - } - - format->setInt32("width", readU16(offset + 8 + 16)); - format->setInt32("height", readU16(offset + 8 + 18)); - - sampleDesc->mFormat = format; - - return OK; -} - -status_t FragmentedMP4Parser::parseAudioSampleEntry( - uint32_t type, size_t offset, uint64_t size) { - if (offset + 28 > size) { - return -EINVAL; - } - - TrackInfo *trackInfo = editTrack(mCurrentTrackID); - - trackInfo->mSampleDescs.push(); - SampleDescription *sampleDesc = - &trackInfo->mSampleDescs.editItemAt( - trackInfo->mSampleDescs.size() - 1); - - sampleDesc->mType = type; - sampleDesc->mDataRefIndex = readU16(offset + 6); - - sp<AMessage> format = new AMessage; - - format->setInt32("channel-count", readU16(offset + 8 + 8)); - format->setInt32("sample-size", readU16(offset + 8 + 10)); - format->setInt32("sample-rate", readU32(offset + 8 + 16) / 65536.0f); - - switch (type) { - case FOURCC('m', 'p', '4', 'a'): - format->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); - break; - - case FOURCC('s', 'a', 'm', 'r'): - format->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); - format->setInt32("channel-count", 1); - format->setInt32("sample-rate", 8000); - break; - - case FOURCC('s', 'a', 'w', 'b'): - format->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); - format->setInt32("channel-count", 1); - format->setInt32("sample-rate", 16000); - break; - default: - format->setString("mime", "application/octet-stream"); - break; - } - - sampleDesc->mFormat = format; - - return OK; -} - -static void addCodecSpecificData( - const sp<AMessage> &format, int32_t index, - const void *data, size_t size, - bool insertStartCode = false) { - sp<ABuffer> csd = new ABuffer(insertStartCode ? size + 4 : size); - - memcpy(csd->data() + (insertStartCode ? 4 : 0), data, size); - - if (insertStartCode) { - memcpy(csd->data(), "\x00\x00\x00\x01", 4); - } - - csd->meta()->setInt32("csd", true); - csd->meta()->setInt64("timeUs", 0ll); - - format->setBuffer(StringPrintf("csd-%d", index).c_str(), csd); -} - -status_t FragmentedMP4Parser::parseSampleSizes( - uint32_t type, size_t offset, uint64_t size) { - return editTrack(mCurrentTrackID)->mStaticFragment->parseSampleSizes( - this, type, offset, size); -} - -status_t FragmentedMP4Parser::parseCompactSampleSizes( - uint32_t type, size_t offset, uint64_t size) { - return editTrack(mCurrentTrackID)->mStaticFragment->parseCompactSampleSizes( - this, type, offset, size); -} - -status_t FragmentedMP4Parser::parseSampleToChunk( - uint32_t type, size_t offset, uint64_t size) { - return editTrack(mCurrentTrackID)->mStaticFragment->parseSampleToChunk( - this, type, offset, size); -} - -status_t FragmentedMP4Parser::parseChunkOffsets( - uint32_t type, size_t offset, uint64_t size) { - return editTrack(mCurrentTrackID)->mStaticFragment->parseChunkOffsets( - this, type, offset, size); -} - -status_t FragmentedMP4Parser::parseChunkOffsets64( - uint32_t type, size_t offset, uint64_t size) { - return editTrack(mCurrentTrackID)->mStaticFragment->parseChunkOffsets64( - this, type, offset, size); -} - -status_t FragmentedMP4Parser::parseAVCCodecSpecificData( - uint32_t type, size_t offset, uint64_t size) { - TrackInfo *trackInfo = editTrack(mCurrentTrackID); - - SampleDescription *sampleDesc = - &trackInfo->mSampleDescs.editItemAt( - trackInfo->mSampleDescs.size() - 1); - - if (sampleDesc->mType != FOURCC('a', 'v', 'c', '1')) { - return -EINVAL; - } - - const uint8_t *ptr = mBuffer->data() + offset; - - size -= offset; - offset = 0; - - if (size < 7 || ptr[0] != 0x01) { - return ERROR_MALFORMED; - } - - sampleDesc->mFormat->setSize("nal-length-size", 1 + (ptr[4] & 3)); - - size_t numSPS = ptr[5] & 31; - - ptr += 6; - size -= 6; - - for (size_t i = 0; i < numSPS; ++i) { - if (size < 2) { - return ERROR_MALFORMED; - } - - size_t length = U16_AT(ptr); - - ptr += 2; - size -= 2; - - if (size < length) { - return ERROR_MALFORMED; - } - - addCodecSpecificData( - sampleDesc->mFormat, i, ptr, length, - true /* insertStartCode */); - - ptr += length; - size -= length; - } - - if (size < 1) { - return ERROR_MALFORMED; - } - - size_t numPPS = *ptr; - ++ptr; - --size; - - for (size_t i = 0; i < numPPS; ++i) { - if (size < 2) { - return ERROR_MALFORMED; - } - - size_t length = U16_AT(ptr); - - ptr += 2; - size -= 2; - - if (size < length) { - return ERROR_MALFORMED; - } - - addCodecSpecificData( - sampleDesc->mFormat, numSPS + i, ptr, length, - true /* insertStartCode */); - - ptr += length; - size -= length; - } - - return OK; -} - -status_t FragmentedMP4Parser::parseESDSCodecSpecificData( - uint32_t type, size_t offset, uint64_t size) { - TrackInfo *trackInfo = editTrack(mCurrentTrackID); - - SampleDescription *sampleDesc = - &trackInfo->mSampleDescs.editItemAt( - trackInfo->mSampleDescs.size() - 1); - - if (sampleDesc->mType != FOURCC('m', 'p', '4', 'a') - && sampleDesc->mType != FOURCC('m', 'p', '4', 'v')) { - return -EINVAL; - } - - const uint8_t *ptr = mBuffer->data() + offset; - - size -= offset; - offset = 0; - - if (size < 4) { - return -EINVAL; - } - - if (U32_AT(ptr) != 0) { - return -EINVAL; - } - - ptr += 4; - size -=4; - - ESDS esds(ptr, size); - - uint8_t objectTypeIndication; - if (esds.getObjectTypeIndication(&objectTypeIndication) != OK) { - return ERROR_MALFORMED; - } - - const uint8_t *csd; - size_t csd_size; - if (esds.getCodecSpecificInfo( - (const void **)&csd, &csd_size) != OK) { - return ERROR_MALFORMED; - } - - addCodecSpecificData(sampleDesc->mFormat, 0, csd, csd_size); - - if (sampleDesc->mType != FOURCC('m', 'p', '4', 'a')) { - return OK; - } - - if (csd_size == 0) { - // There's no further information, i.e. no codec specific data - // Let's assume that the information provided in the mpeg4 headers - // is accurate and hope for the best. - - return OK; - } - - if (csd_size < 2) { - return ERROR_MALFORMED; - } - - uint32_t objectType = csd[0] >> 3; - - if (objectType == 31) { - return ERROR_UNSUPPORTED; - } - - uint32_t freqIndex = (csd[0] & 7) << 1 | (csd[1] >> 7); - int32_t sampleRate = 0; - int32_t numChannels = 0; - if (freqIndex == 15) { - if (csd_size < 5) { - return ERROR_MALFORMED; - } - - sampleRate = (csd[1] & 0x7f) << 17 - | csd[2] << 9 - | csd[3] << 1 - | (csd[4] >> 7); - - numChannels = (csd[4] >> 3) & 15; - } else { - static uint32_t kSamplingRate[] = { - 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, - 16000, 12000, 11025, 8000, 7350 - }; - - if (freqIndex == 13 || freqIndex == 14) { - return ERROR_MALFORMED; - } - - sampleRate = kSamplingRate[freqIndex]; - numChannels = (csd[1] >> 3) & 15; - } - - if (numChannels == 0) { - return ERROR_UNSUPPORTED; - } - - sampleDesc->mFormat->setInt32("sample-rate", sampleRate); - sampleDesc->mFormat->setInt32("channel-count", numChannels); - - return OK; -} - -status_t FragmentedMP4Parser::parseMediaData( - uint32_t type, size_t offset, uint64_t size) { - ALOGV("skipping 'mdat' chunk at offsets 0x%08lx-0x%08llx.", - mBufferPos + offset, mBufferPos + size); - - sp<ABuffer> buffer = new ABuffer(size - offset); - memcpy(buffer->data(), mBuffer->data() + offset, size - offset); - - mMediaData.push(); - MediaDataInfo *info = &mMediaData.editItemAt(mMediaData.size() - 1); - info->mBuffer = buffer; - info->mOffset = mBufferPos + offset; - - if (mMediaData.size() > 10) { - ALOGV("suspending for now."); - mSuspended = true; - } - - return OK; -} - -status_t FragmentedMP4Parser::parseSegmentIndex( - uint32_t type, size_t offset, uint64_t size) { - ALOGV("sidx box type %d, offset %d, size %d", type, int(offset), int(size)); -// AString sidxstr; -// hexdump(mBuffer->data() + offset, size, 0 /* indent */, &sidxstr); -// ALOGV("raw sidx:"); -// ALOGV("%s", sidxstr.c_str()); - if (offset + 12 > size) { - return -EINVAL; - } - - uint32_t flags = readU32(offset); - - uint32_t version = flags >> 24; - flags &= 0xffffff; - - ALOGV("sidx version %d", version); - - uint32_t referenceId = readU32(offset + 4); - uint32_t timeScale = readU32(offset + 8); - ALOGV("sidx refid/timescale: %d/%d", referenceId, timeScale); - - uint64_t earliestPresentationTime; - uint64_t firstOffset; - - offset += 12; - - if (version == 0) { - if (offset + 8 > size) { - return -EINVAL; - } - earliestPresentationTime = readU32(offset); - firstOffset = readU32(offset + 4); - offset += 8; - } else { - if (offset + 16 > size) { - return -EINVAL; - } - earliestPresentationTime = readU64(offset); - firstOffset = readU64(offset + 8); - offset += 16; - } - ALOGV("sidx pres/off: %Ld/%Ld", earliestPresentationTime, firstOffset); - - if (offset + 4 > size) { - return -EINVAL; - } - if (readU16(offset) != 0) { // reserved - return -EINVAL; - } - int32_t referenceCount = readU16(offset + 2); - offset += 4; - ALOGV("refcount: %d", referenceCount); - - if (offset + referenceCount * 12 > size) { - return -EINVAL; - } - - TrackInfo *info = editTrack(mCurrentTrackID); - uint64_t total_duration = 0; - for (int i = 0; i < referenceCount; i++) { - uint32_t d1 = readU32(offset); - uint32_t d2 = readU32(offset + 4); - uint32_t d3 = readU32(offset + 8); - - if (d1 & 0x80000000) { - ALOGW("sub-sidx boxes not supported yet"); - } - bool sap = d3 & 0x80000000; - bool saptype = d3 >> 28; - if (!sap || saptype > 2) { - ALOGW("not a stream access point, or unsupported type"); - } - total_duration += d2; - offset += 12; - ALOGV(" item %d, %08x %08x %08x", i, d1, d2, d3); - SidxEntry se; - se.mSize = d1 & 0x7fffffff; - se.mDurationUs = 1000000LL * d2 / timeScale; - info->mSidx.add(se); - } - - info->mSidxDuration = total_duration * 1000000 / timeScale; - ALOGV("duration: %lld", info->mSidxDuration); - return OK; -} - -status_t FragmentedMP4Parser::parseTrackExtends( - uint32_t type, size_t offset, uint64_t size) { - if (offset + 24 > size) { - return -EINVAL; - } - - if (readU32(offset) != 0) { - return -EINVAL; - } - - uint32_t trackID = readU32(offset + 4); - - TrackInfo *info = editTrack(trackID, true /* createIfNecessary */); - info->mDefaultSampleDescriptionIndex = readU32(offset + 8); - info->mDefaultSampleDuration = readU32(offset + 12); - info->mDefaultSampleSize = readU32(offset + 16); - info->mDefaultSampleFlags = readU32(offset + 20); - - return OK; -} - -FragmentedMP4Parser::TrackInfo *FragmentedMP4Parser::editTrack( - uint32_t trackID, bool createIfNecessary) { - ssize_t i = mTracks.indexOfKey(trackID); - - if (i >= 0) { - return &mTracks.editValueAt(i); - } - - if (!createIfNecessary) { - return NULL; - } - - TrackInfo info; - info.mTrackID = trackID; - info.mFlags = 0; - info.mDuration = 0xffffffff; - info.mSidxDuration = 0; - info.mMediaTimeScale = 0; - info.mMediaHandlerType = 0; - info.mDefaultSampleDescriptionIndex = 0; - info.mDefaultSampleDuration = 0; - info.mDefaultSampleSize = 0; - info.mDefaultSampleFlags = 0; - - info.mDecodingTime = 0; - - mTracks.add(trackID, info); - return &mTracks.editValueAt(mTracks.indexOfKey(trackID)); -} - -status_t FragmentedMP4Parser::parseTrackFragmentHeader( - uint32_t type, size_t offset, uint64_t size) { - if (offset + 8 > size) { - return -EINVAL; - } - - uint32_t flags = readU32(offset); - - if (flags & 0xff000000) { - return -EINVAL; - } - - mTrackFragmentHeaderInfo.mFlags = flags; - - mTrackFragmentHeaderInfo.mTrackID = readU32(offset + 4); - offset += 8; - - if (flags & TrackFragmentHeaderInfo::kBaseDataOffsetPresent) { - if (offset + 8 > size) { - return -EINVAL; - } - - mTrackFragmentHeaderInfo.mBaseDataOffset = readU64(offset); - offset += 8; - } - - if (flags & TrackFragmentHeaderInfo::kSampleDescriptionIndexPresent) { - if (offset + 4 > size) { - return -EINVAL; - } - - mTrackFragmentHeaderInfo.mSampleDescriptionIndex = readU32(offset); - offset += 4; - } - - if (flags & TrackFragmentHeaderInfo::kDefaultSampleDurationPresent) { - if (offset + 4 > size) { - return -EINVAL; - } - - mTrackFragmentHeaderInfo.mDefaultSampleDuration = readU32(offset); - offset += 4; - } - - if (flags & TrackFragmentHeaderInfo::kDefaultSampleSizePresent) { - if (offset + 4 > size) { - return -EINVAL; - } - - mTrackFragmentHeaderInfo.mDefaultSampleSize = readU32(offset); - offset += 4; - } - - if (flags & TrackFragmentHeaderInfo::kDefaultSampleFlagsPresent) { - if (offset + 4 > size) { - return -EINVAL; - } - - mTrackFragmentHeaderInfo.mDefaultSampleFlags = readU32(offset); - offset += 4; - } - - if (!(flags & TrackFragmentHeaderInfo::kBaseDataOffsetPresent)) { - // This should point to the position of the first byte of the - // enclosing 'moof' container for the first track and - // the end of the data of the preceding fragment for subsequent - // tracks. - - CHECK_GE(mStack.size(), 2u); - - mTrackFragmentHeaderInfo.mBaseDataOffset = - mStack.itemAt(mStack.size() - 2).mOffset; - - // XXX TODO: This does not do the right thing for the 2nd and - // subsequent tracks yet. - } - - mTrackFragmentHeaderInfo.mDataOffset = - mTrackFragmentHeaderInfo.mBaseDataOffset; - - TrackInfo *trackInfo = editTrack(mTrackFragmentHeaderInfo.mTrackID); - - if (trackInfo->mFragments.empty() - || (*trackInfo->mFragments.begin())->complete()) { - trackInfo->mFragments.push_back(new DynamicTrackFragment); - } - - return OK; -} - -status_t FragmentedMP4Parser::parseTrackFragmentRun( - uint32_t type, size_t offset, uint64_t size) { - if (offset + 8 > size) { - return -EINVAL; - } - - enum { - kDataOffsetPresent = 0x01, - kFirstSampleFlagsPresent = 0x04, - kSampleDurationPresent = 0x100, - kSampleSizePresent = 0x200, - kSampleFlagsPresent = 0x400, - kSampleCompositionTimeOffsetPresent = 0x800, - }; - - uint32_t flags = readU32(offset); - - if (flags & 0xff000000) { - return -EINVAL; - } - - if ((flags & kFirstSampleFlagsPresent) && (flags & kSampleFlagsPresent)) { - // These two shall not be used together. - return -EINVAL; - } - - uint32_t sampleCount = readU32(offset + 4); - offset += 8; - - uint64_t dataOffset = mTrackFragmentHeaderInfo.mDataOffset; - - uint32_t firstSampleFlags = 0; - - if (flags & kDataOffsetPresent) { - if (offset + 4 > size) { - return -EINVAL; - } - - int32_t dataOffsetDelta = (int32_t)readU32(offset); - - dataOffset = mTrackFragmentHeaderInfo.mBaseDataOffset + dataOffsetDelta; - - offset += 4; - } - - if (flags & kFirstSampleFlagsPresent) { - if (offset + 4 > size) { - return -EINVAL; - } - - firstSampleFlags = readU32(offset); - offset += 4; - } - - TrackInfo *info = editTrack(mTrackFragmentHeaderInfo.mTrackID); - - if (info == NULL) { - return -EINVAL; - } - - uint32_t sampleDuration = 0, sampleSize = 0, sampleFlags = 0, - sampleCtsOffset = 0; - - size_t bytesPerSample = 0; - if (flags & kSampleDurationPresent) { - bytesPerSample += 4; - } else if (mTrackFragmentHeaderInfo.mFlags - & TrackFragmentHeaderInfo::kDefaultSampleDurationPresent) { - sampleDuration = mTrackFragmentHeaderInfo.mDefaultSampleDuration; - } else { - sampleDuration = info->mDefaultSampleDuration; - } - - if (flags & kSampleSizePresent) { - bytesPerSample += 4; - } else if (mTrackFragmentHeaderInfo.mFlags - & TrackFragmentHeaderInfo::kDefaultSampleSizePresent) { - sampleSize = mTrackFragmentHeaderInfo.mDefaultSampleSize; - } else { - sampleSize = info->mDefaultSampleSize; - } - - if (flags & kSampleFlagsPresent) { - bytesPerSample += 4; - } else if (mTrackFragmentHeaderInfo.mFlags - & TrackFragmentHeaderInfo::kDefaultSampleFlagsPresent) { - sampleFlags = mTrackFragmentHeaderInfo.mDefaultSampleFlags; - } else { - sampleFlags = info->mDefaultSampleFlags; - } - - if (flags & kSampleCompositionTimeOffsetPresent) { - bytesPerSample += 4; - } else { - sampleCtsOffset = 0; - } - - if (offset + sampleCount * bytesPerSample > size) { - return -EINVAL; - } - - uint32_t sampleDescIndex = - (mTrackFragmentHeaderInfo.mFlags - & TrackFragmentHeaderInfo::kSampleDescriptionIndexPresent) - ? mTrackFragmentHeaderInfo.mSampleDescriptionIndex - : info->mDefaultSampleDescriptionIndex; - - for (uint32_t i = 0; i < sampleCount; ++i) { - if (flags & kSampleDurationPresent) { - sampleDuration = readU32(offset); - offset += 4; - } - - if (flags & kSampleSizePresent) { - sampleSize = readU32(offset); - offset += 4; - } - - if (flags & kSampleFlagsPresent) { - sampleFlags = readU32(offset); - offset += 4; - } - - if (flags & kSampleCompositionTimeOffsetPresent) { - sampleCtsOffset = readU32(offset); - offset += 4; - } - - ALOGV("adding sample at offset 0x%08llx, size %u, duration %u, " - "sampleDescIndex=%u, flags 0x%08x", - dataOffset, sampleSize, sampleDuration, - sampleDescIndex, - (flags & kFirstSampleFlagsPresent) && i == 0 - ? firstSampleFlags : sampleFlags); - - const sp<TrackFragment> &fragment = *--info->mFragments.end(); - - uint32_t decodingTime = info->mDecodingTime; - info->mDecodingTime += sampleDuration; - uint32_t presentationTime = decodingTime + sampleCtsOffset; - - static_cast<DynamicTrackFragment *>( - fragment.get())->addSample( - dataOffset, - sampleSize, - presentationTime, - sampleDescIndex, - ((flags & kFirstSampleFlagsPresent) && i == 0) - ? firstSampleFlags : sampleFlags); - - dataOffset += sampleSize; - } - - mTrackFragmentHeaderInfo.mDataOffset = dataOffset; - - return OK; -} - -void FragmentedMP4Parser::copyBuffer( - sp<ABuffer> *dst, size_t offset, uint64_t size) const { - sp<ABuffer> buf = new ABuffer(size); - memcpy(buf->data(), mBuffer->data() + offset, size); - - *dst = buf; -} - -} // namespace android diff --git a/media/libstagefright/mp4/TrackFragment.cpp b/media/libstagefright/mp4/TrackFragment.cpp deleted file mode 100644 index 3699038..0000000 --- a/media/libstagefright/mp4/TrackFragment.cpp +++ /dev/null @@ -1,364 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "TrackFragment" -#include <utils/Log.h> - -#include "TrackFragment.h" - -#include <media/stagefright/MediaErrors.h> -#include <media/stagefright/Utils.h> -#include <media/stagefright/foundation/ABuffer.h> -#include <media/stagefright/foundation/ADebug.h> -#include <media/stagefright/foundation/hexdump.h> - -namespace android { - -FragmentedMP4Parser::DynamicTrackFragment::DynamicTrackFragment() - : mComplete(false), - mSampleIndex(0) { -} - -FragmentedMP4Parser::DynamicTrackFragment::~DynamicTrackFragment() { -} - -status_t FragmentedMP4Parser::DynamicTrackFragment::getSample(SampleInfo *info) { - if (mSampleIndex >= mSamples.size()) { - return mComplete ? ERROR_END_OF_STREAM : -EWOULDBLOCK; - } - - *info = mSamples.itemAt(mSampleIndex); - - return OK; -} - -void FragmentedMP4Parser::DynamicTrackFragment::advance() { - ++mSampleIndex; -} - -void FragmentedMP4Parser::DynamicTrackFragment::addSample( - off64_t dataOffset, size_t sampleSize, - uint32_t presentationTime, - size_t sampleDescIndex, - uint32_t flags) { - mSamples.push(); - SampleInfo *sampleInfo = &mSamples.editItemAt(mSamples.size() - 1); - - sampleInfo->mOffset = dataOffset; - sampleInfo->mSize = sampleSize; - sampleInfo->mPresentationTime = presentationTime; - sampleInfo->mSampleDescIndex = sampleDescIndex; - sampleInfo->mFlags = flags; -} - -status_t FragmentedMP4Parser::DynamicTrackFragment::signalCompletion() { - mComplete = true; - - return OK; -} - -bool FragmentedMP4Parser::DynamicTrackFragment::complete() const { - return mComplete; -} - -//////////////////////////////////////////////////////////////////////////////// - -FragmentedMP4Parser::StaticTrackFragment::StaticTrackFragment() - : mSampleIndex(0), - mSampleCount(0), - mChunkIndex(0), - mSampleToChunkIndex(-1), - mSampleToChunkRemaining(0), - mPrevChunkIndex(0xffffffff), - mNextSampleOffset(0) { -} - -FragmentedMP4Parser::StaticTrackFragment::~StaticTrackFragment() { -} - -status_t FragmentedMP4Parser::StaticTrackFragment::getSample(SampleInfo *info) { - if (mSampleIndex >= mSampleCount) { - return ERROR_END_OF_STREAM; - } - - *info = mSampleInfo; - - ALOGV("returning sample %d at [0x%08llx, 0x%08llx)", - mSampleIndex, - info->mOffset, info->mOffset + info->mSize); - - return OK; -} - -void FragmentedMP4Parser::StaticTrackFragment::updateSampleInfo() { - if (mSampleIndex >= mSampleCount) { - return; - } - - if (mSampleSizes != NULL) { - uint32_t defaultSampleSize = U32_AT(mSampleSizes->data() + 4); - if (defaultSampleSize > 0) { - mSampleInfo.mSize = defaultSampleSize; - } else { - mSampleInfo.mSize= U32_AT(mSampleSizes->data() + 12 + 4 * mSampleIndex); - } - } else { - CHECK(mCompactSampleSizes != NULL); - - uint32_t fieldSize = U32_AT(mCompactSampleSizes->data() + 4); - - switch (fieldSize) { - case 4: - { - unsigned byte = mCompactSampleSizes->data()[12 + mSampleIndex / 2]; - mSampleInfo.mSize = (mSampleIndex & 1) ? byte & 0x0f : byte >> 4; - break; - } - - case 8: - { - mSampleInfo.mSize = mCompactSampleSizes->data()[12 + mSampleIndex]; - break; - } - - default: - { - CHECK_EQ(fieldSize, 16); - mSampleInfo.mSize = - U16_AT(mCompactSampleSizes->data() + 12 + mSampleIndex * 2); - break; - } - } - } - - CHECK_GT(mSampleToChunkRemaining, 0); - - // The sample desc index is 1-based... XXX - mSampleInfo.mSampleDescIndex = - U32_AT(mSampleToChunk->data() + 8 + 12 * mSampleToChunkIndex + 8); - - if (mChunkIndex != mPrevChunkIndex) { - mPrevChunkIndex = mChunkIndex; - - if (mChunkOffsets != NULL) { - uint32_t entryCount = U32_AT(mChunkOffsets->data() + 4); - - if (mChunkIndex >= entryCount) { - mSampleIndex = mSampleCount; - return; - } - - mNextSampleOffset = - U32_AT(mChunkOffsets->data() + 8 + 4 * mChunkIndex); - } else { - CHECK(mChunkOffsets64 != NULL); - - uint32_t entryCount = U32_AT(mChunkOffsets64->data() + 4); - - if (mChunkIndex >= entryCount) { - mSampleIndex = mSampleCount; - return; - } - - mNextSampleOffset = - U64_AT(mChunkOffsets64->data() + 8 + 8 * mChunkIndex); - } - } - - mSampleInfo.mOffset = mNextSampleOffset; - - mSampleInfo.mPresentationTime = 0; - mSampleInfo.mFlags = 0; -} - -void FragmentedMP4Parser::StaticTrackFragment::advance() { - mNextSampleOffset += mSampleInfo.mSize; - - ++mSampleIndex; - if (--mSampleToChunkRemaining == 0) { - ++mChunkIndex; - - uint32_t entryCount = U32_AT(mSampleToChunk->data() + 4); - - // If this is the last entry in the sample to chunk table, we will - // stay on this entry. - if ((uint32_t)(mSampleToChunkIndex + 1) < entryCount) { - uint32_t nextChunkIndex = - U32_AT(mSampleToChunk->data() + 8 + 12 * (mSampleToChunkIndex + 1)); - - CHECK_GE(nextChunkIndex, 1u); - --nextChunkIndex; - - if (mChunkIndex >= nextChunkIndex) { - CHECK_EQ(mChunkIndex, nextChunkIndex); - ++mSampleToChunkIndex; - } - } - - mSampleToChunkRemaining = - U32_AT(mSampleToChunk->data() + 8 + 12 * mSampleToChunkIndex + 4); - } - - updateSampleInfo(); -} - -static void setU32At(uint8_t *ptr, uint32_t x) { - ptr[0] = x >> 24; - ptr[1] = (x >> 16) & 0xff; - ptr[2] = (x >> 8) & 0xff; - ptr[3] = x & 0xff; -} - -status_t FragmentedMP4Parser::StaticTrackFragment::signalCompletion() { - mSampleToChunkIndex = 0; - - mSampleToChunkRemaining = - (mSampleToChunk == NULL) - ? 0 - : U32_AT(mSampleToChunk->data() + 8 + 12 * mSampleToChunkIndex + 4); - - updateSampleInfo(); - - return OK; -} - -bool FragmentedMP4Parser::StaticTrackFragment::complete() const { - return true; -} - -status_t FragmentedMP4Parser::StaticTrackFragment::parseSampleSizes( - FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size) { - if (offset + 12 > size) { - return ERROR_MALFORMED; - } - - if (parser->readU32(offset) != 0) { - return ERROR_MALFORMED; - } - - uint32_t sampleSize = parser->readU32(offset + 4); - uint32_t sampleCount = parser->readU32(offset + 8); - - if (sampleSize == 0 && offset + 12 + sampleCount * 4 != size) { - return ERROR_MALFORMED; - } - - parser->copyBuffer(&mSampleSizes, offset, size); - - mSampleCount = sampleCount; - - return OK; -} - -status_t FragmentedMP4Parser::StaticTrackFragment::parseCompactSampleSizes( - FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size) { - if (offset + 12 > size) { - return ERROR_MALFORMED; - } - - if (parser->readU32(offset) != 0) { - return ERROR_MALFORMED; - } - - uint32_t fieldSize = parser->readU32(offset + 4); - - if (fieldSize != 4 && fieldSize != 8 && fieldSize != 16) { - return ERROR_MALFORMED; - } - - uint32_t sampleCount = parser->readU32(offset + 8); - - if (offset + 12 + (sampleCount * fieldSize + 4) / 8 != size) { - return ERROR_MALFORMED; - } - - parser->copyBuffer(&mCompactSampleSizes, offset, size); - - mSampleCount = sampleCount; - - return OK; -} - -status_t FragmentedMP4Parser::StaticTrackFragment::parseSampleToChunk( - FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size) { - if (offset + 8 > size) { - return ERROR_MALFORMED; - } - - if (parser->readU32(offset) != 0) { - return ERROR_MALFORMED; - } - - uint32_t entryCount = parser->readU32(offset + 4); - - if (entryCount == 0) { - return OK; - } - - if (offset + 8 + entryCount * 12 != size) { - return ERROR_MALFORMED; - } - - parser->copyBuffer(&mSampleToChunk, offset, size); - - return OK; -} - -status_t FragmentedMP4Parser::StaticTrackFragment::parseChunkOffsets( - FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size) { - if (offset + 8 > size) { - return ERROR_MALFORMED; - } - - if (parser->readU32(offset) != 0) { - return ERROR_MALFORMED; - } - - uint32_t entryCount = parser->readU32(offset + 4); - - if (offset + 8 + entryCount * 4 != size) { - return ERROR_MALFORMED; - } - - parser->copyBuffer(&mChunkOffsets, offset, size); - - return OK; -} - -status_t FragmentedMP4Parser::StaticTrackFragment::parseChunkOffsets64( - FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size) { - if (offset + 8 > size) { - return ERROR_MALFORMED; - } - - if (parser->readU32(offset) != 0) { - return ERROR_MALFORMED; - } - - uint32_t entryCount = parser->readU32(offset + 4); - - if (offset + 8 + entryCount * 8 != size) { - return ERROR_MALFORMED; - } - - parser->copyBuffer(&mChunkOffsets64, offset, size); - - return OK; -} - -} // namespace android - diff --git a/media/libstagefright/mp4/TrackFragment.h b/media/libstagefright/mp4/TrackFragment.h deleted file mode 100644 index e1ad46e..0000000 --- a/media/libstagefright/mp4/TrackFragment.h +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef TRACK_FRAGMENT_H_ - -#define TRACK_FRAGMENT_H_ - -#include "include/FragmentedMP4Parser.h" - -namespace android { - -struct FragmentedMP4Parser::TrackFragment : public RefBase { - TrackFragment() {} - - virtual status_t getSample(SampleInfo *info) = 0; - virtual void advance() = 0; - - virtual status_t signalCompletion() = 0; - virtual bool complete() const = 0; - -protected: - virtual ~TrackFragment() {} - -private: - DISALLOW_EVIL_CONSTRUCTORS(TrackFragment); -}; - -struct FragmentedMP4Parser::DynamicTrackFragment : public FragmentedMP4Parser::TrackFragment { - DynamicTrackFragment(); - - virtual status_t getSample(SampleInfo *info); - virtual void advance(); - - void addSample( - off64_t dataOffset, size_t sampleSize, - uint32_t presentationTime, - size_t sampleDescIndex, - uint32_t flags); - - // No more samples will be added to this fragment. - virtual status_t signalCompletion(); - - virtual bool complete() const; - -protected: - virtual ~DynamicTrackFragment(); - -private: - bool mComplete; - size_t mSampleIndex; - Vector<SampleInfo> mSamples; - - DISALLOW_EVIL_CONSTRUCTORS(DynamicTrackFragment); -}; - -struct FragmentedMP4Parser::StaticTrackFragment : public FragmentedMP4Parser::TrackFragment { - StaticTrackFragment(); - - virtual status_t getSample(SampleInfo *info); - virtual void advance(); - - virtual status_t signalCompletion(); - virtual bool complete() const; - - status_t parseSampleSizes( - FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size); - - status_t parseCompactSampleSizes( - FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size); - - status_t parseSampleToChunk( - FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size); - - status_t parseChunkOffsets( - FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size); - - status_t parseChunkOffsets64( - FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size); - -protected: - virtual ~StaticTrackFragment(); - -private: - size_t mSampleIndex; - size_t mSampleCount; - uint32_t mChunkIndex; - - SampleInfo mSampleInfo; - - sp<ABuffer> mSampleSizes; - sp<ABuffer> mCompactSampleSizes; - - sp<ABuffer> mSampleToChunk; - ssize_t mSampleToChunkIndex; - size_t mSampleToChunkRemaining; - - sp<ABuffer> mChunkOffsets; - sp<ABuffer> mChunkOffsets64; - uint32_t mPrevChunkIndex; - uint64_t mNextSampleOffset; - - void updateSampleInfo(); - - DISALLOW_EVIL_CONSTRUCTORS(StaticTrackFragment); -}; - -} // namespace android - -#endif // TRACK_FRAGMENT_H_ diff --git a/media/libstagefright/mpeg2ts/Android.mk b/media/libstagefright/mpeg2ts/Android.mk index c1a7a9d..c17a0b7 100644 --- a/media/libstagefright/mpeg2ts/Android.mk +++ b/media/libstagefright/mpeg2ts/Android.mk @@ -13,6 +13,8 @@ LOCAL_C_INCLUDES:= \ $(TOP)/frameworks/av/media/libstagefright \ $(TOP)/frameworks/native/include/media/openmax +LOCAL_CFLAGS += -Werror + LOCAL_MODULE:= libstagefright_mpeg2ts ifeq ($(TARGET_ARCH),arm) diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp index 6dfaa94..021b640 100644 --- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp +++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp @@ -26,6 +26,8 @@ #include <media/stagefright/MetaData.h> #include <utils/Vector.h> +#include <inttypes.h> + namespace android { const int64_t kNearEOSMarkUs = 2000000ll; // 2 secs @@ -186,7 +188,7 @@ void AnotherPacketSource::queueAccessUnit(const sp<ABuffer> &buffer) { int64_t lastQueuedTimeUs; CHECK(buffer->meta()->findInt64("timeUs", &lastQueuedTimeUs)); mLastQueuedTimeUs = lastQueuedTimeUs; - ALOGV("queueAccessUnit timeUs=%lld us (%.2f secs)", mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6); + ALOGV("queueAccessUnit timeUs=%" PRIi64 " us (%.2f secs)", mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6); Mutex::Autolock autoLock(mLock); mBuffers.push_back(buffer); diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp index e9252cc..f7abf01 100644 --- a/media/libstagefright/mpeg2ts/ESQueue.cpp +++ b/media/libstagefright/mpeg2ts/ESQueue.cpp @@ -265,7 +265,7 @@ status_t ElementaryStreamQueue::appendData( if (startOffset > 0) { ALOGI("found something resembling an H.264/MPEG syncword " - "at offset %d", + "at offset %zd", startOffset); } @@ -359,7 +359,7 @@ status_t ElementaryStreamQueue::appendData( if (startOffset > 0) { ALOGI("found something resembling an AC3 syncword at " - "offset %d", + "offset %zd", startOffset); } diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp index 3fe9c23..16f6c58 100644 --- a/media/libstagefright/omx/GraphicBufferSource.cpp +++ b/media/libstagefright/omx/GraphicBufferSource.cpp @@ -29,6 +29,8 @@ #include <media/hardware/MetadataBufferType.h> #include <ui/GraphicBuffer.h> +#include <inttypes.h> + namespace android { static const bool EXTRA_CHECK = true; @@ -43,16 +45,21 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance, mNumFramesAvailable(0), mEndOfStream(false), mEndOfStreamSent(false), - mRepeatAfterUs(-1ll), mMaxTimestampGapUs(-1ll), mPrevOriginalTimeUs(-1ll), mPrevModifiedTimeUs(-1ll), + mSkipFramesBeforeNs(-1ll), + mRepeatAfterUs(-1ll), mRepeatLastFrameGeneration(0), mRepeatLastFrameTimestamp(-1ll), mLatestSubmittedBufferId(-1), mLatestSubmittedBufferFrameNum(0), mLatestSubmittedBufferUseCount(0), - mRepeatBufferDeferred(false) { + mRepeatBufferDeferred(false), + mTimePerCaptureUs(-1ll), + mTimePerFrameUs(-1ll), + mPrevCaptureUs(-1ll), + mPrevFrameUs(-1ll) { ALOGV("GraphicBufferSource w=%u h=%u c=%u", bufferWidth, bufferHeight, bufferCount); @@ -65,13 +72,13 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance, String8 name("GraphicBufferSource"); - mBufferQueue = new BufferQueue(); - mBufferQueue->setConsumerName(name); - mBufferQueue->setDefaultBufferSize(bufferWidth, bufferHeight); - mBufferQueue->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER | + BufferQueue::createBufferQueue(&mProducer, &mConsumer); + mConsumer->setConsumerName(name); + mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight); + mConsumer->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_HW_TEXTURE); - mInitCheck = mBufferQueue->setMaxAcquiredBufferCount(bufferCount); + mInitCheck = mConsumer->setMaxAcquiredBufferCount(bufferCount); if (mInitCheck != NO_ERROR) { ALOGE("Unable to set BQ max acquired buffer count to %u: %d", bufferCount, mInitCheck); @@ -85,7 +92,7 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance, wp<BufferQueue::ConsumerListener> listener = static_cast<BufferQueue::ConsumerListener*>(this); sp<BufferQueue::ProxyConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener); - mInitCheck = mBufferQueue->consumerConnect(proxy, false); + mInitCheck = mConsumer->consumerConnect(proxy, false); if (mInitCheck != NO_ERROR) { ALOGE("Error connecting to BufferQueue: %s (%d)", strerror(-mInitCheck), mInitCheck); @@ -97,8 +104,8 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance, GraphicBufferSource::~GraphicBufferSource() { ALOGV("~GraphicBufferSource"); - if (mBufferQueue != NULL) { - status_t err = mBufferQueue->consumerDisconnect(); + if (mConsumer != NULL) { + status_t err = mConsumer->consumerDisconnect(); if (err != NO_ERROR) { ALOGW("consumerDisconnect failed: %d", err); } @@ -270,7 +277,7 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) { if (id == mLatestSubmittedBufferId) { CHECK_GT(mLatestSubmittedBufferUseCount--, 0); } else { - mBufferQueue->releaseBuffer(id, codecBuffer.mFrameNumber, + mConsumer->releaseBuffer(id, codecBuffer.mFrameNumber, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); } } else { @@ -339,7 +346,7 @@ void GraphicBufferSource::suspend(bool suspend) { while (mNumFramesAvailable > 0) { BufferQueue::BufferItem item; - status_t err = mBufferQueue->acquireBuffer(&item, 0); + status_t err = mConsumer->acquireBuffer(&item, 0); if (err == BufferQueue::NO_BUFFER_AVAILABLE) { // shouldn't happen. @@ -352,7 +359,7 @@ void GraphicBufferSource::suspend(bool suspend) { --mNumFramesAvailable; - mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber, + mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence); } return; @@ -389,7 +396,7 @@ bool GraphicBufferSource::fillCodecBuffer_l() { ALOGV("fillCodecBuffer_l: acquiring buffer, avail=%d", mNumFramesAvailable); BufferQueue::BufferItem item; - status_t err = mBufferQueue->acquireBuffer(&item, 0); + status_t err = mConsumer->acquireBuffer(&item, 0); if (err == BufferQueue::NO_BUFFER_AVAILABLE) { // shouldn't happen ALOGW("fillCodecBuffer_l: frame was not available"); @@ -416,10 +423,21 @@ bool GraphicBufferSource::fillCodecBuffer_l() { mBufferSlot[item.mBuf] = item.mGraphicBuffer; } - err = submitBuffer_l(item, cbi); + err = UNKNOWN_ERROR; + + // only submit sample if start time is unspecified, or sample + // is queued after the specified start time + if (mSkipFramesBeforeNs < 0ll || item.mTimestamp >= mSkipFramesBeforeNs) { + // if start time is set, offset time stamp by start time + if (mSkipFramesBeforeNs > 0) { + item.mTimestamp -= mSkipFramesBeforeNs; + } + err = submitBuffer_l(item, cbi); + } + if (err != OK) { ALOGV("submitBuffer_l failed, releasing bq buf %d", item.mBuf); - mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber, + mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); } else { ALOGV("buffer submitted (bq %d, cbi %d)", item.mBuf, cbi); @@ -442,7 +460,7 @@ bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() { // // To be on the safe side we try to release the buffer. ALOGD("repeatLatestSubmittedBuffer_l: slot was NULL"); - mBufferQueue->releaseBuffer( + mConsumer->releaseBuffer( mLatestSubmittedBufferId, mLatestSubmittedBufferFrameNum, EGL_NO_DISPLAY, @@ -496,7 +514,7 @@ void GraphicBufferSource::setLatestSubmittedBuffer_l( if (mLatestSubmittedBufferId >= 0) { if (mLatestSubmittedBufferUseCount == 0) { - mBufferQueue->releaseBuffer( + mConsumer->releaseBuffer( mLatestSubmittedBufferId, mLatestSubmittedBufferFrameNum, EGL_NO_DISPLAY, @@ -550,7 +568,30 @@ status_t GraphicBufferSource::signalEndOfInputStream() { int64_t GraphicBufferSource::getTimestamp(const BufferQueue::BufferItem &item) { int64_t timeUs = item.mTimestamp / 1000; - if (mMaxTimestampGapUs > 0ll) { + if (mTimePerCaptureUs > 0ll) { + // Time lapse or slow motion mode + if (mPrevCaptureUs < 0ll) { + // first capture + mPrevCaptureUs = timeUs; + mPrevFrameUs = timeUs; + } else { + // snap to nearest capture point + int64_t nFrames = (timeUs + mTimePerCaptureUs / 2 - mPrevCaptureUs) + / mTimePerCaptureUs; + if (nFrames <= 0) { + // skip this frame as it's too close to previous capture + ALOGV("skipping frame, timeUs %lld", timeUs); + return -1; + } + mPrevCaptureUs = mPrevCaptureUs + nFrames * mTimePerCaptureUs; + mPrevFrameUs += mTimePerFrameUs * nFrames; + } + + ALOGV("timeUs %lld, captureUs %lld, frameUs %lld", + timeUs, mPrevCaptureUs, mPrevFrameUs); + + return mPrevFrameUs; + } else if (mMaxTimestampGapUs > 0ll) { /* Cap timestamp gap between adjacent frames to specified max * * In the scenario of cast mirroring, encoding could be suspended for @@ -696,15 +737,15 @@ void GraphicBufferSource::onFrameAvailable() { } BufferQueue::BufferItem item; - status_t err = mBufferQueue->acquireBuffer(&item, 0); + status_t err = mConsumer->acquireBuffer(&item, 0); if (err == OK) { // If this is the first time we're seeing this buffer, add it to our // slot table. if (item.mGraphicBuffer != NULL) { - ALOGV("fillCodecBuffer_l: setting mBufferSlot %d", item.mBuf); + ALOGV("onFrameAvailable: setting mBufferSlot %d", item.mBuf); mBufferSlot[item.mBuf] = item.mGraphicBuffer; } - mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber, + mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence); } return; @@ -724,13 +765,13 @@ void GraphicBufferSource::onFrameAvailable() { void GraphicBufferSource::onBuffersReleased() { Mutex::Autolock lock(mMutex); - uint32_t slotMask; - if (mBufferQueue->getReleasedBuffers(&slotMask) != NO_ERROR) { + uint64_t slotMask; + if (mConsumer->getReleasedBuffers(&slotMask) != NO_ERROR) { ALOGW("onBuffersReleased: unable to get released buffer set"); - slotMask = 0xffffffff; + slotMask = 0xffffffffffffffffULL; } - ALOGV("onBuffersReleased: 0x%08x", slotMask); + ALOGV("onBuffersReleased: 0x%016" PRIx64, slotMask); for (int i = 0; i < BufferQueue::NUM_BUFFER_SLOTS; i++) { if ((slotMask & 0x01) != 0) { @@ -740,6 +781,11 @@ void GraphicBufferSource::onBuffersReleased() { } } +// BufferQueue::ConsumerListener callback +void GraphicBufferSource::onSidebandStreamChanged() { + ALOG_ASSERT(false, "GraphicBufferSource can't consume sideband streams"); +} + status_t GraphicBufferSource::setRepeatPreviousFrameDelayUs( int64_t repeatAfterUs) { Mutex::Autolock autoLock(mMutex); @@ -764,6 +810,27 @@ status_t GraphicBufferSource::setMaxTimestampGapUs(int64_t maxGapUs) { return OK; } + +void GraphicBufferSource::setSkipFramesBeforeUs(int64_t skipFramesBeforeUs) { + Mutex::Autolock autoLock(mMutex); + + mSkipFramesBeforeNs = + (skipFramesBeforeUs > 0) ? (skipFramesBeforeUs * 1000) : -1ll; +} + +status_t GraphicBufferSource::setTimeLapseUs(int64_t* data) { + Mutex::Autolock autoLock(mMutex); + + if (mExecuting || data[0] <= 0ll || data[1] <= 0ll) { + return INVALID_OPERATION; + } + + mTimePerFrameUs = data[0]; + mTimePerCaptureUs = data[1]; + + return OK; +} + void GraphicBufferSource::onMessageReceived(const sp<AMessage> &msg) { switch (msg->what()) { case kWhatRepeatLastFrame: diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h index 3b0e454..a70cc1a 100644 --- a/media/libstagefright/omx/GraphicBufferSource.h +++ b/media/libstagefright/omx/GraphicBufferSource.h @@ -61,7 +61,7 @@ public: // Returns the handle to the producer side of the BufferQueue. Buffers // queued on this will be received by GraphicBufferSource. sp<IGraphicBufferProducer> getIGraphicBufferProducer() const { - return mBufferQueue; + return mProducer; } // This is called when OMX transitions to OMX_StateExecuting, which means @@ -118,6 +118,17 @@ public: // of suspension on input. status_t setMaxTimestampGapUs(int64_t maxGapUs); + // Sets the time lapse (or slow motion) parameters. + // data[0] is the time (us) between two frames for playback + // data[1] is the time (us) between two frames for capture + // When set, the sample's timestamp will be modified to playback framerate, + // and capture timestamp will be modified to capture rate. + status_t setTimeLapseUs(int64_t* data); + + // Sets the start time us (in system time), samples before which should + // be dropped and not submitted to encoder + void setSkipFramesBeforeUs(int64_t startTimeUs); + protected: // BufferQueue::ConsumerListener interface, called when a new frame of // data is available. If we're executing and a codec buffer is @@ -132,6 +143,11 @@ protected: // set of mBufferSlot entries. virtual void onBuffersReleased(); + // BufferQueue::ConsumerListener interface, called when the client has + // changed the sideband stream. GraphicBufferSource doesn't handle sideband + // streams so this is a no-op (and should never be called). + virtual void onSidebandStreamChanged(); + private: // Keep track of codec input buffers. They may either be available // (mGraphicBuffer == NULL) or in use by the codec. @@ -194,8 +210,11 @@ private: bool mSuspended; - // We consume graphic buffers from this. - sp<BufferQueue> mBufferQueue; + // Our BufferQueue interfaces. mProducer is passed to the producer through + // getIGraphicBufferProducer, and mConsumer is used internally to retrieve + // the buffers queued by the producer. + sp<IGraphicBufferProducer> mProducer; + sp<IGraphicBufferConsumer> mConsumer; // Number of frames pending in BufferQueue that haven't yet been // forwarded to the codec. @@ -223,16 +242,17 @@ private: enum { kRepeatLastFrameCount = 10, }; - int64_t mRepeatAfterUs; - int64_t mMaxTimestampGapUs; KeyedVector<int64_t, int64_t> mOriginalTimeUs; + int64_t mMaxTimestampGapUs; int64_t mPrevOriginalTimeUs; int64_t mPrevModifiedTimeUs; + int64_t mSkipFramesBeforeNs; sp<ALooper> mLooper; sp<AHandlerReflector<GraphicBufferSource> > mReflector; + int64_t mRepeatAfterUs; int32_t mRepeatLastFrameGeneration; int64_t mRepeatLastFrameTimestamp; int32_t mRepeatLastFrameCount; @@ -245,6 +265,12 @@ private: // no codec buffer was available at the time. bool mRepeatBufferDeferred; + // Time lapse / slow motion configuration + int64_t mTimePerCaptureUs; + int64_t mTimePerFrameUs; + int64_t mPrevCaptureUs; + int64_t mPrevFrameUs; + void onMessageReceived(const sp<AMessage> &msg); DISALLOW_EVIL_CONSTRUCTORS(GraphicBufferSource); diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp index 8391290..0fb38fa 100644 --- a/media/libstagefright/omx/OMXNodeInstance.cpp +++ b/media/libstagefright/omx/OMXNodeInstance.cpp @@ -850,6 +850,8 @@ status_t OMXNodeInstance::setInternalOption( case IOMX::INTERNAL_OPTION_SUSPEND: case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY: case IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP: + case IOMX::INTERNAL_OPTION_START_TIME: + case IOMX::INTERNAL_OPTION_TIME_LAPSE: { const sp<GraphicBufferSource> &bufferSource = getGraphicBufferSource(); @@ -874,7 +876,8 @@ status_t OMXNodeInstance::setInternalOption( int64_t delayUs = *(int64_t *)data; return bufferSource->setRepeatPreviousFrameDelayUs(delayUs); - } else { + } else if (type == + IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP){ if (size != sizeof(int64_t)) { return INVALID_OPERATION; } @@ -882,6 +885,20 @@ status_t OMXNodeInstance::setInternalOption( int64_t maxGapUs = *(int64_t *)data; return bufferSource->setMaxTimestampGapUs(maxGapUs); + } else if (type == IOMX::INTERNAL_OPTION_START_TIME) { + if (size != sizeof(int64_t)) { + return INVALID_OPERATION; + } + + int64_t skipFramesBeforeUs = *(int64_t *)data; + + bufferSource->setSkipFramesBeforeUs(skipFramesBeforeUs); + } else { // IOMX::INTERNAL_OPTION_TIME_LAPSE + if (size != sizeof(int64_t) * 2) { + return INVALID_OPERATION; + } + + bufferSource->setTimeLapseUs((int64_t *)data); } return OK; diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp index d49e50b..65f5404 100644 --- a/media/libstagefright/omx/SoftOMXPlugin.cpp +++ b/media/libstagefright/omx/SoftOMXPlugin.cpp @@ -50,6 +50,7 @@ static const struct { { "OMX.google.mpeg4.encoder", "mpeg4enc", "video_encoder.mpeg4" }, { "OMX.google.mp3.decoder", "mp3dec", "audio_decoder.mp3" }, { "OMX.google.vorbis.decoder", "vorbisdec", "audio_decoder.vorbis" }, + { "OMX.google.opus.decoder", "opusdec", "audio_decoder.opus" }, { "OMX.google.vp8.decoder", "vpxdec", "video_decoder.vp8" }, { "OMX.google.vp9.decoder", "vpxdec", "video_decoder.vp9" }, { "OMX.google.vp8.encoder", "vpxenc", "video_encoder.vp8" }, diff --git a/media/libstagefright/omx/tests/Android.mk b/media/libstagefright/omx/tests/Android.mk index e368134..447b29e 100644 --- a/media/libstagefright/omx/tests/Android.mk +++ b/media/libstagefright/omx/tests/Android.mk @@ -11,6 +11,8 @@ LOCAL_C_INCLUDES := \ $(TOP)/frameworks/av/media/libstagefright \ $(TOP)/frameworks/native/include/media/openmax +LOCAL_CFLAGS += -Werror + LOCAL_MODULE := omx_tests LOCAL_MODULE_TAGS := tests diff --git a/media/libstagefright/omx/tests/OMXHarness.cpp b/media/libstagefright/omx/tests/OMXHarness.cpp index 44e4f9d..f4dfd6b 100644 --- a/media/libstagefright/omx/tests/OMXHarness.cpp +++ b/media/libstagefright/omx/tests/OMXHarness.cpp @@ -26,6 +26,7 @@ #include <binder/ProcessState.h> #include <binder/IServiceManager.h> #include <binder/MemoryDealer.h> +#include <media/IMediaHTTPService.h> #include <media/IMediaPlayerService.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/ALooper.h> @@ -242,7 +243,8 @@ private: }; static sp<MediaExtractor> CreateExtractorFromURI(const char *uri) { - sp<DataSource> source = DataSource::CreateFromURI(uri); + sp<DataSource> source = + DataSource::CreateFromURI(NULL /* httpService */, uri); if (source == NULL) { return NULL; @@ -461,6 +463,7 @@ static const char *GetMimeFromComponentRole(const char *componentRole) { { "audio_decoder.aac", "audio/mp4a-latm" }, { "audio_decoder.mp3", "audio/mpeg" }, { "audio_decoder.vorbis", "audio/vorbis" }, + { "audio_decoder.opus", "audio/opus" }, { "audio_decoder.g711alaw", MEDIA_MIMETYPE_AUDIO_G711_ALAW }, { "audio_decoder.g711mlaw", MEDIA_MIMETYPE_AUDIO_G711_MLAW }, }; @@ -493,6 +496,7 @@ static const char *GetURLForMime(const char *mime) { { "audio/mpeg", "file:///sdcard/media_api/music/MP3_48KHz_128kbps_s_1_17_CBR.mp3" }, { "audio/vorbis", NULL }, + { "audio/opus", NULL }, { "video/x-vnd.on2.vp8", "file:///sdcard/media_api/video/big-buck-bunny_trailer.webm" }, { MEDIA_MIMETYPE_AUDIO_G711_ALAW, "file:///sdcard/M1F1-Alaw-AFsp.wav" }, diff --git a/media/libstagefright/rtsp/APacketSource.cpp b/media/libstagefright/rtsp/APacketSource.cpp index 462c384..09f52bc 100644 --- a/media/libstagefright/rtsp/APacketSource.cpp +++ b/media/libstagefright/rtsp/APacketSource.cpp @@ -23,7 +23,7 @@ #include "ARawAudioAssembler.h" #include "ASessionDescription.h" -#include "avc_utils.h" +#include "include/avc_utils.h" #include <ctype.h> diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp index 492bd4a..f25539c 100644 --- a/media/libstagefright/rtsp/ARTSPConnection.cpp +++ b/media/libstagefright/rtsp/ARTSPConnection.cpp @@ -33,7 +33,7 @@ #include <openssl/md5.h> #include <sys/socket.h> -#include "HTTPBase.h" +#include "include/HTTPBase.h" namespace android { @@ -239,7 +239,7 @@ void ARTSPConnection::onConnect(const sp<AMessage> &msg) { // right here, since we currently have no way of asking the user // for this information. - ALOGE("Malformed rtsp url <URL suppressed>"); + ALOGE("Malformed rtsp url %s", uriDebugString(url).c_str()); reply->setInt32("result", ERROR_MALFORMED); reply->post(); diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk index e77c69c..39eedc0 100644 --- a/media/libstagefright/rtsp/Android.mk +++ b/media/libstagefright/rtsp/Android.mk @@ -20,7 +20,7 @@ LOCAL_SRC_FILES:= \ SDPLoader.cpp \ LOCAL_C_INCLUDES:= \ - $(TOP)/frameworks/av/media/libstagefright/include \ + $(TOP)/frameworks/av/media/libstagefright \ $(TOP)/frameworks/native/include/media/openmax \ $(TOP)/external/openssl/include @@ -30,6 +30,8 @@ ifeq ($(TARGET_ARCH),arm) LOCAL_CFLAGS += -Wno-psabi endif +LOCAL_CFLAGS += -Werror + include $(BUILD_STATIC_LIBRARY) ################################################################################ diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h index e7580c2..f3dfc59 100644 --- a/media/libstagefright/rtsp/MyHandler.h +++ b/media/libstagefright/rtsp/MyHandler.h @@ -19,7 +19,11 @@ #define MY_HANDLER_H_ //#define LOG_NDEBUG 0 + +#ifndef LOG_TAG #define LOG_TAG "MyHandler" +#endif + #include <utils/Log.h> #include "APacketSource.h" @@ -42,6 +46,12 @@ #include "HTTPBase.h" +#if LOG_NDEBUG +#define UNUSED_UNLESS_VERBOSE(x) (void)(x) +#else +#define UNUSED_UNLESS_VERBOSE(x) +#endif + // If no access units are received within 5 secs, assume that the rtp // stream has ended and signal end of stream. static int64_t kAccessUnitTimeoutUs = 10000000ll; @@ -178,7 +188,7 @@ struct MyHandler : public AHandler { mConn->connect(mOriginalSessionURL.c_str(), reply); } - AString getControlURL(sp<ASessionDescription> desc) { + AString getControlURL() { AString sessionLevelControlURL; if (mSessionDesc->findAttribute( 0, @@ -556,7 +566,7 @@ struct MyHandler : public AHandler { mBaseURL = tmp; } - mControlURL = getControlURL(mSessionDesc); + mControlURL = getControlURL(); if (mSessionDesc->countTracks() < 2) { // There's no actual tracks in this session. @@ -602,7 +612,7 @@ struct MyHandler : public AHandler { mSeekable = !isLiveStream(mSessionDesc); - mControlURL = getControlURL(mSessionDesc); + mControlURL = getControlURL(); if (mSessionDesc->countTracks() < 2) { // There's no actual tracks in this session. @@ -1816,6 +1826,8 @@ private: bool addMediaTimestamp( int32_t trackIndex, const TrackInfo *track, const sp<ABuffer> &accessUnit) { + UNUSED_UNLESS_VERBOSE(trackIndex); + uint32_t rtpTime; CHECK(accessUnit->meta()->findInt32( "rtp-time", (int32_t *)&rtpTime)); diff --git a/media/libstagefright/rtsp/SDPLoader.cpp b/media/libstagefright/rtsp/SDPLoader.cpp index 89ff17d..424badf 100644 --- a/media/libstagefright/rtsp/SDPLoader.cpp +++ b/media/libstagefright/rtsp/SDPLoader.cpp @@ -18,34 +18,30 @@ #define LOG_TAG "SDPLoader" #include <utils/Log.h> -#include "SDPLoader.h" +#include "include/SDPLoader.h" #include "ASessionDescription.h" -#include "HTTPBase.h" +#include <media/IMediaHTTPConnection.h> +#include <media/IMediaHTTPService.h> +#include <media/stagefright/MediaHTTP.h> #include <media/stagefright/foundation/ABuffer.h> #include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/Utils.h> #define DEFAULT_SDP_SIZE 100000 namespace android { -SDPLoader::SDPLoader(const sp<AMessage> ¬ify, uint32_t flags, bool uidValid, uid_t uid) +SDPLoader::SDPLoader( + const sp<AMessage> ¬ify, + uint32_t flags, + const sp<IMediaHTTPService> &httpService) : mNotify(notify), mFlags(flags), - mUIDValid(uidValid), - mUID(uid), mNetLooper(new ALooper), mCancelled(false), - mHTTPDataSource( - HTTPBase::Create( - (mFlags & kFlagIncognito) - ? HTTPBase::kFlagIncognito - : 0)) { - if (mUIDValid) { - mHTTPDataSource->setUID(mUID); - } - + mHTTPDataSource(new MediaHTTP(httpService->makeHTTPConnection())) { mNetLooper->setName("sdp net"); mNetLooper->start(false /* runOnCallingThread */, false /* canCallJava */, @@ -94,11 +90,7 @@ void SDPLoader::onLoad(const sp<AMessage> &msg) { KeyedVector<String8, String8> *headers = NULL; msg->findPointer("headers", (void **)&headers); - if (!(mFlags & kFlagIncognito)) { - ALOGV("onLoad '%s'", url.c_str()); - } else { - ALOGI("onLoad <URL suppressed>"); - } + ALOGV("onLoad %s", uriDebugString(url, mFlags & kFlagIncognito).c_str()); if (!mCancelled) { err = mHTTPDataSource->connect(url.c_str(), headers); diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp index 49ffcd6..fd889f9 100644 --- a/media/libstagefright/tests/SurfaceMediaSource_test.cpp +++ b/media/libstagefright/tests/SurfaceMediaSource_test.cpp @@ -35,7 +35,6 @@ #include <gui/SurfaceComposerClient.h> #include <binder/ProcessState.h> -#include <ui/FramebufferNativeWindow.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/MediaBufferGroup.h> @@ -110,7 +109,7 @@ protected: } else { ALOGV("No actual display. Choosing EGLSurface based on SurfaceMediaSource"); sp<IGraphicBufferProducer> sms = (new SurfaceMediaSource( - getSurfaceWidth(), getSurfaceHeight()))->getBufferQueue(); + getSurfaceWidth(), getSurfaceHeight()))->getProducer(); sp<Surface> stc = new Surface(sms); sp<ANativeWindow> window = stc; @@ -361,9 +360,7 @@ protected: virtual void SetUp() { android::ProcessState::self()->startThreadPool(); mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight); - - // Manual cast is required to avoid constructor ambiguity - mSTC = new Surface(static_cast<sp<IGraphicBufferProducer> >( mSMS->getBufferQueue())); + mSTC = new Surface(mSMS->getProducer()); mANW = mSTC; } @@ -398,7 +395,7 @@ protected: ALOGV("SMS-GLTest::SetUp()"); android::ProcessState::self()->startThreadPool(); mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight); - mSTC = new Surface(static_cast<sp<IGraphicBufferProducer> >( mSMS->getBufferQueue())); + mSTC = new Surface(mSMS->getProducer()); mANW = mSTC; // Doing the setup related to the GL Side @@ -527,7 +524,8 @@ void SurfaceMediaSourceTest::oneBufferPass(int width, int height ) { } // Dequeuing and queuing the buffer without really filling it in. -void SurfaceMediaSourceTest::oneBufferPassNoFill(int width, int height ) { +void SurfaceMediaSourceTest::oneBufferPassNoFill( + int /* width */, int /* height */) { ANativeWindowBuffer* anb; ASSERT_EQ(NO_ERROR, native_window_dequeue_buffer_and_wait(mANW.get(), &anb)); ASSERT_TRUE(anb != NULL); @@ -746,9 +744,8 @@ TEST_F(SurfaceMediaSourceTest, DISABLED_EncodingFromCpuYV12BufferNpotWriteMediaS CHECK(fd >= 0); sp<MediaRecorder> mr = SurfaceMediaSourceGLTest::setUpMediaRecorder(fd, - VIDEO_SOURCE_GRALLOC_BUFFER, - OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264, mYuvTexWidth, - mYuvTexHeight, 30); + VIDEO_SOURCE_SURFACE, OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264, + mYuvTexWidth, mYuvTexHeight, 30); // get the reference to the surfacemediasource living in // mediaserver that is created by stagefrightrecorder sp<IGraphicBufferProducer> iST = mr->querySurfaceMediaSourceFromMediaServer(); @@ -783,7 +780,7 @@ TEST_F(SurfaceMediaSourceGLTest, ChooseAndroidRecordableEGLConfigDummyWriter) { ALOGV("Verify creating a surface w/ right config + dummy writer*********"); mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight); - mSTC = new Surface(static_cast<sp<IGraphicBufferProducer> >( mSMS->getBufferQueue())); + mSTC = new Surface(mSMS->getProducer()); mANW = mSTC; DummyRecorder writer(mSMS); @@ -880,7 +877,7 @@ TEST_F(SurfaceMediaSourceGLTest, EncodingFromGLRgbaSameImageEachBufNpotWrite) { } CHECK(fd >= 0); - sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_GRALLOC_BUFFER, + sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_SURFACE, OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264, mYuvTexWidth, mYuvTexHeight, 30); // get the reference to the surfacemediasource living in @@ -923,7 +920,7 @@ TEST_F(SurfaceMediaSourceGLTest, EncodingFromGLRgbaDiffImageEachBufNpotWrite) { } CHECK(fd >= 0); - sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_GRALLOC_BUFFER, + sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_SURFACE, OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264, mYuvTexWidth, mYuvTexHeight, 30); // get the reference to the surfacemediasource living in diff --git a/media/libstagefright/timedtext/TimedTextDriver.cpp b/media/libstagefright/timedtext/TimedTextDriver.cpp index 12fd7f4..71aa21e 100644 --- a/media/libstagefright/timedtext/TimedTextDriver.cpp +++ b/media/libstagefright/timedtext/TimedTextDriver.cpp @@ -20,6 +20,7 @@ #include <binder/IPCThreadState.h> +#include <media/IMediaHTTPService.h> #include <media/mediaplayer.h> #include <media/MediaPlayerInterface.h> #include <media/stagefright/DataSource.h> @@ -40,9 +41,11 @@ namespace android { TimedTextDriver::TimedTextDriver( - const wp<MediaPlayerBase> &listener) + const wp<MediaPlayerBase> &listener, + const sp<IMediaHTTPService> &httpService) : mLooper(new ALooper), mListener(listener), + mHTTPService(httpService), mState(UNINITIALIZED), mCurrentTrackIndex(UINT_MAX) { mLooper->setName("TimedTextDriver"); @@ -207,7 +210,7 @@ status_t TimedTextDriver::addOutOfBandTextSource( } sp<DataSource> dataSource = - DataSource::CreateFromURI(uri); + DataSource::CreateFromURI(mHTTPService, uri); return createOutOfBandTextSource(trackIndex, mimeType, dataSource); } diff --git a/media/libstagefright/timedtext/test/Android.mk b/media/libstagefright/timedtext/test/Android.mk index a5e7ba2..9a9fde2 100644 --- a/media/libstagefright/timedtext/test/Android.mk +++ b/media/libstagefright/timedtext/test/Android.mk @@ -2,7 +2,6 @@ LOCAL_PATH:= $(call my-dir) # ================================================================ # Unit tests for libstagefright_timedtext -# See also /development/testrunner/test_defs.xml # ================================================================ # ================================================================ @@ -18,10 +17,13 @@ LOCAL_SRC_FILES := TimedTextSRTSource_test.cpp LOCAL_C_INCLUDES := \ $(TOP)/external/expat/lib \ - $(TOP)/frameworks/base/media/libstagefright/timedtext + $(TOP)/frameworks/av/media/libstagefright/timedtext LOCAL_SHARED_LIBRARIES := \ + libbinder \ libexpat \ - libstagefright + libstagefright \ + libstagefright_foundation \ + libutils include $(BUILD_NATIVE_TEST) diff --git a/media/libstagefright/webm/Android.mk b/media/libstagefright/webm/Android.mk new file mode 100644 index 0000000..7081463 --- /dev/null +++ b/media/libstagefright/webm/Android.mk @@ -0,0 +1,23 @@ +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_CPPFLAGS += -D__STDINT_LIMITS \ + -Werror + +LOCAL_SRC_FILES:= EbmlUtil.cpp \ + WebmElement.cpp \ + WebmFrame.cpp \ + WebmFrameThread.cpp \ + WebmWriter.cpp + + +LOCAL_C_INCLUDES += $(TOP)/frameworks/av/include + +LOCAL_SHARED_LIBRARIES += libstagefright_foundation \ + libstagefright \ + libutils \ + liblog + +LOCAL_MODULE:= libstagefright_webm + +include $(BUILD_STATIC_LIBRARY) diff --git a/media/libstagefright/webm/EbmlUtil.cpp b/media/libstagefright/webm/EbmlUtil.cpp new file mode 100644 index 0000000..449fec6 --- /dev/null +++ b/media/libstagefright/webm/EbmlUtil.cpp @@ -0,0 +1,108 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <stdint.h> + +namespace { + +// Table for Seal's algorithm for Number of Trailing Zeros. Hacker's Delight +// online, Figure 5-18 (http://www.hackersdelight.org/revisions.pdf) +// The entries whose value is -1 are never referenced. +int NTZ_TABLE[] = { + 32, 0, 1, 12, 2, 6, -1, 13, 3, -1, 7, -1, -1, -1, -1, 14, + 10, 4, -1, -1, 8, -1, -1, 25, -1, -1, -1, -1, -1, 21, 27, 15, + 31, 11, 5, -1, -1, -1, -1, -1, 9, -1, -1, 24, -1, -1, 20, 26, + 30, -1, -1, -1, -1, 23, -1, 19, 29, -1, 22, 18, 28, 17, 16, -1 +}; + +int numberOfTrailingZeros32(int32_t i) { + uint32_t u = (i & -i) * 0x0450FBAF; + return NTZ_TABLE[(u) >> 26]; +} + +uint64_t highestOneBit(uint64_t n) { + n |= (n >> 1); + n |= (n >> 2); + n |= (n >> 4); + n |= (n >> 8); + n |= (n >> 16); + n |= (n >> 32); + return n - (n >> 1); +} + +uint64_t _powerOf2(uint64_t u) { + uint64_t powerOf2 = highestOneBit(u); + return powerOf2 ? powerOf2 : 1; +} + +// Based on Long.numberOfTrailingZeros in Long.java +int numberOfTrailingZeros(uint64_t u) { + int32_t low = u; + return low !=0 ? numberOfTrailingZeros32(low) + : 32 + numberOfTrailingZeros32((int32_t) (u >> 32)); +} +} + +namespace webm { + +// Encode the id and/or size of an EBML element bytes by setting a leading length descriptor bit: +// +// 1xxxxxxx - 1-byte values +// 01xxxxxx xxxxxxxx - +// 001xxxxx xxxxxxxx xxxxxxxx - +// 0001xxxx xxxxxxxx xxxxxxxx xxxxxxxx - ... +// 00001xxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx - +// 000001xx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx - +// 0000001x xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx - +// 00000001 xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx - 8-byte values +// +// This function uses the least the number of bytes possible. +uint64_t encodeUnsigned(uint64_t u) { + uint64_t powerOf2 = _powerOf2(u); + if (u + 1 == powerOf2 << 1) + powerOf2 <<= 1; + int shiftWidth = (7 + numberOfTrailingZeros(powerOf2)) / 7 * 7; + long lengthDescriptor = 1 << shiftWidth; + return lengthDescriptor | u; +} + +// Like above but pads the input value with leading zeros up to the specified width. The length +// descriptor is calculated based on width. +uint64_t encodeUnsigned(uint64_t u, int width) { + int shiftWidth = 7 * width; + uint64_t lengthDescriptor = 1; + lengthDescriptor <<= shiftWidth; + return lengthDescriptor | u; +} + +// Calculate the length of an EBML coded id or size from its length descriptor. +int sizeOf(uint64_t u) { + uint64_t powerOf2 = _powerOf2(u); + int unsignedLength = numberOfTrailingZeros(powerOf2) / 8 + 1; + return unsignedLength; +} + +// Serialize an EBML coded id or size in big-endian order. +int serializeCodedUnsigned(uint64_t u, uint8_t* bary) { + int unsignedLength = sizeOf(u); + for (int i = unsignedLength - 1; i >= 0; i--) { + bary[i] = u & 0xff; + u >>= 8; + } + return unsignedLength; +} + +} diff --git a/media/libstagefright/webm/EbmlUtil.h b/media/libstagefright/webm/EbmlUtil.h new file mode 100644 index 0000000..eb9c37c --- /dev/null +++ b/media/libstagefright/webm/EbmlUtil.h @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef EBMLUTIL_H_ +#define EBMLUTIL_H_ + +#include <stdint.h> + +namespace webm { + +// Encode the id and/or size of an EBML element bytes by setting a leading length descriptor bit: +// +// 1xxxxxxx - 1-byte values +// 01xxxxxx xxxxxxxx - +// 001xxxxx xxxxxxxx xxxxxxxx - +// 0001xxxx xxxxxxxx xxxxxxxx xxxxxxxx - ... +// 00001xxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx - +// 000001xx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx - +// 0000001x xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx - +// 00000001 xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx - 8-byte values +// +// This function uses the least the number of bytes possible. +uint64_t encodeUnsigned(uint64_t u); + +// Like above but pads the input value with leading zeros up to the specified width. The length +// descriptor is calculated based on width. +uint64_t encodeUnsigned(uint64_t u, int width); + +// Serialize an EBML coded id or size in big-endian order. +int serializeCodedUnsigned(uint64_t u, uint8_t* bary); + +// Calculate the length of an EBML coded id or size from its length descriptor. +int sizeOf(uint64_t u); + +} + +#endif /* EBMLUTIL_H_ */ diff --git a/media/libstagefright/webm/LinkedBlockingQueue.h b/media/libstagefright/webm/LinkedBlockingQueue.h new file mode 100644 index 0000000..0b6a9a1 --- /dev/null +++ b/media/libstagefright/webm/LinkedBlockingQueue.h @@ -0,0 +1,79 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef LINKEDBLOCKINGQUEUE_H_ +#define LINKEDBLOCKINGQUEUE_H_ + +#include <utils/List.h> +#include <utils/Mutex.h> +#include <utils/Condition.h> + +namespace android { + +template<typename T> +class LinkedBlockingQueue { + List<T> mList; + Mutex mLock; + Condition mContentAvailableCondition; + + T front(bool remove) { + Mutex::Autolock autolock(mLock); + while (mList.empty()) { + mContentAvailableCondition.wait(mLock); + } + T e = *(mList.begin()); + if (remove) { + mList.erase(mList.begin()); + } + return e; + } + + DISALLOW_EVIL_CONSTRUCTORS(LinkedBlockingQueue); + +public: + LinkedBlockingQueue() { + } + + ~LinkedBlockingQueue() { + } + + bool empty() { + Mutex::Autolock autolock(mLock); + return mList.empty(); + } + + void clear() { + Mutex::Autolock autolock(mLock); + mList.clear(); + } + + T peek() { + return front(false); + } + + T take() { + return front(true); + } + + void push(T e) { + Mutex::Autolock autolock(mLock); + mList.push_back(e); + mContentAvailableCondition.signal(); + } +}; + +} /* namespace android */ +#endif /* LINKEDBLOCKINGQUEUE_H_ */ diff --git a/media/libstagefright/webm/WebmConstants.h b/media/libstagefright/webm/WebmConstants.h new file mode 100644 index 0000000..c53f458 --- /dev/null +++ b/media/libstagefright/webm/WebmConstants.h @@ -0,0 +1,133 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef WEBMCONSTANTS_H_ +#define WEBMCONSTANTS_H_ + +#include <stdint.h> + +namespace webm { + +const int kMinEbmlVoidSize = 2; +const int64_t kMaxMetaSeekSize = 64; +const int64_t kMkvUnknownLength = 0x01ffffffffffffffl; + +// EBML element id's from http://matroska.org/technical/specs/index.html +enum Mkv { + kMkvEbml = 0x1A45DFA3, + kMkvEbmlVersion = 0x4286, + kMkvEbmlReadVersion = 0x42F7, + kMkvEbmlMaxIdlength = 0x42F2, + kMkvEbmlMaxSizeLength = 0x42F3, + kMkvDocType = 0x4282, + kMkvDocTypeVersion = 0x4287, + kMkvDocTypeReadVersion = 0x4285, + kMkvVoid = 0xEC, + kMkvSignatureSlot = 0x1B538667, + kMkvSignatureAlgo = 0x7E8A, + kMkvSignatureHash = 0x7E9A, + kMkvSignaturePublicKey = 0x7EA5, + kMkvSignature = 0x7EB5, + kMkvSignatureElements = 0x7E5B, + kMkvSignatureElementList = 0x7E7B, + kMkvSignedElement = 0x6532, + kMkvSegment = 0x18538067, + kMkvSeekHead = 0x114D9B74, + kMkvSeek = 0x4DBB, + kMkvSeekId = 0x53AB, + kMkvSeekPosition = 0x53AC, + kMkvInfo = 0x1549A966, + kMkvTimecodeScale = 0x2AD7B1, + kMkvSegmentDuration = 0x4489, + kMkvDateUtc = 0x4461, + kMkvMuxingApp = 0x4D80, + kMkvWritingApp = 0x5741, + kMkvCluster = 0x1F43B675, + kMkvTimecode = 0xE7, + kMkvPrevSize = 0xAB, + kMkvBlockGroup = 0xA0, + kMkvBlock = 0xA1, + kMkvBlockAdditions = 0x75A1, + kMkvBlockMore = 0xA6, + kMkvBlockAddId = 0xEE, + kMkvBlockAdditional = 0xA5, + kMkvBlockDuration = 0x9B, + kMkvReferenceBlock = 0xFB, + kMkvLaceNumber = 0xCC, + kMkvSimpleBlock = 0xA3, + kMkvTracks = 0x1654AE6B, + kMkvTrackEntry = 0xAE, + kMkvTrackNumber = 0xD7, + kMkvTrackUid = 0x73C5, + kMkvTrackType = 0x83, + kMkvFlagEnabled = 0xB9, + kMkvFlagDefault = 0x88, + kMkvFlagForced = 0x55AA, + kMkvFlagLacing = 0x9C, + kMkvDefaultDuration = 0x23E383, + kMkvMaxBlockAdditionId = 0x55EE, + kMkvName = 0x536E, + kMkvLanguage = 0x22B59C, + kMkvCodecId = 0x86, + kMkvCodecPrivate = 0x63A2, + kMkvCodecName = 0x258688, + kMkvVideo = 0xE0, + kMkvFlagInterlaced = 0x9A, + kMkvStereoMode = 0x53B8, + kMkvAlphaMode = 0x53C0, + kMkvPixelWidth = 0xB0, + kMkvPixelHeight = 0xBA, + kMkvPixelCropBottom = 0x54AA, + kMkvPixelCropTop = 0x54BB, + kMkvPixelCropLeft = 0x54CC, + kMkvPixelCropRight = 0x54DD, + kMkvDisplayWidth = 0x54B0, + kMkvDisplayHeight = 0x54BA, + kMkvDisplayUnit = 0x54B2, + kMkvAspectRatioType = 0x54B3, + kMkvFrameRate = 0x2383E3, + kMkvAudio = 0xE1, + kMkvSamplingFrequency = 0xB5, + kMkvOutputSamplingFrequency = 0x78B5, + kMkvChannels = 0x9F, + kMkvBitDepth = 0x6264, + kMkvCues = 0x1C53BB6B, + kMkvCuePoint = 0xBB, + kMkvCueTime = 0xB3, + kMkvCueTrackPositions = 0xB7, + kMkvCueTrack = 0xF7, + kMkvCueClusterPosition = 0xF1, + kMkvCueBlockNumber = 0x5378 +}; + +enum TrackTypes { + kInvalidType = -1, + kVideoType = 0x1, + kAudioType = 0x2, + kComplexType = 0x3, + kLogoType = 0x10, + kSubtitleType = 0x11, + kButtonsType = 0x12, + kControlType = 0x20 +}; + +enum TrackNum { + kVideoTrackNum = 0x1, + kAudioTrackNum = 0x2 +}; +} + +#endif /* WEBMCONSTANTS_H_ */ diff --git a/media/libstagefright/webm/WebmElement.cpp b/media/libstagefright/webm/WebmElement.cpp new file mode 100644 index 0000000..a008cab --- /dev/null +++ b/media/libstagefright/webm/WebmElement.cpp @@ -0,0 +1,367 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// #define LOG_NDEBUG 0 +#define LOG_TAG "WebmElement" + +#include "EbmlUtil.h" +#include "WebmElement.h" +#include "WebmConstants.h" + +#include <media/stagefright/foundation/ADebug.h> +#include <utils/Log.h> + +#include <string.h> +#include <unistd.h> +#include <errno.h> +#include <fcntl.h> +#include <sys/mman.h> + +using namespace android; +using namespace webm; + +namespace { + +int64_t voidSize(int64_t totalSize) { + if (totalSize < 2) { + return -1; + } + if (totalSize < 9) { + return totalSize - 2; + } + return totalSize - 9; +} + +uint64_t childrenSum(const List<sp<WebmElement> >& children) { + uint64_t total = 0; + for (List<sp<WebmElement> >::const_iterator it = children.begin(); + it != children.end(); ++it) { + total += (*it)->totalSize(); + } + return total; +} + +void populateCommonTrackEntries( + int num, + uint64_t uid, + bool lacing, + const char *lang, + const char *codec, + TrackTypes type, + List<sp<WebmElement> > &ls) { + ls.push_back(new WebmUnsigned(kMkvTrackNumber, num)); + ls.push_back(new WebmUnsigned(kMkvTrackUid, uid)); + ls.push_back(new WebmUnsigned(kMkvFlagLacing, lacing)); + ls.push_back(new WebmString(kMkvLanguage, lang)); + ls.push_back(new WebmString(kMkvCodecId, codec)); + ls.push_back(new WebmUnsigned(kMkvTrackType, type)); +} +} + +namespace android { + +WebmElement::WebmElement(uint64_t id, uint64_t size) + : mId(id), mSize(size) { +} + +WebmElement::~WebmElement() { +} + +int WebmElement::serializePayloadSize(uint8_t *buf) { + return serializeCodedUnsigned(encodeUnsigned(mSize), buf); +} + +uint64_t WebmElement::serializeInto(uint8_t *buf) { + uint8_t *cur = buf; + int head = serializeCodedUnsigned(mId, cur); + cur += head; + int neck = serializePayloadSize(cur); + cur += neck; + serializePayload(cur); + cur += mSize; + return cur - buf; +} + +uint64_t WebmElement::totalSize() { + uint8_t buf[8]; + //............... + sizeOf(encodeUnsigned(size)) + return sizeOf(mId) + serializePayloadSize(buf) + mSize; +} + +uint8_t *WebmElement::serialize(uint64_t& size) { + size = totalSize(); + uint8_t *buf = new uint8_t[size]; + serializeInto(buf); + return buf; +} + +int WebmElement::write(int fd, uint64_t& size) { + uint8_t buf[8]; + size = totalSize(); + off64_t off = ::lseek64(fd, (size - 1), SEEK_CUR) - (size - 1); + ::write(fd, buf, 1); // extend file + + off64_t curOff = off + size; + off64_t alignedOff = off & ~(::sysconf(_SC_PAGE_SIZE) - 1); + off64_t mapSize = curOff - alignedOff; + off64_t pageOff = off - alignedOff; + void *dst = ::mmap64(NULL, mapSize, PROT_WRITE, MAP_SHARED, fd, alignedOff); + if (dst == MAP_FAILED) { + ALOGE("mmap64 failed; errno = %d", errno); + ALOGE("fd %d; flags: %o", fd, ::fcntl(fd, F_GETFL, 0)); + return errno; + } else { + serializeInto((uint8_t*) dst + pageOff); + ::msync(dst, mapSize, MS_SYNC); + return ::munmap(dst, mapSize); + } +} + +//================================================================================================= + +WebmUnsigned::WebmUnsigned(uint64_t id, uint64_t value) + : WebmElement(id, sizeOf(value)), mValue(value) { +} + +void WebmUnsigned::serializePayload(uint8_t *buf) { + serializeCodedUnsigned(mValue, buf); +} + +//================================================================================================= + +WebmFloat::WebmFloat(uint64_t id, double value) + : WebmElement(id, sizeof(double)), mValue(value) { +} + +WebmFloat::WebmFloat(uint64_t id, float value) + : WebmElement(id, sizeof(float)), mValue(value) { +} + +void WebmFloat::serializePayload(uint8_t *buf) { + uint64_t data; + if (mSize == sizeof(float)) { + float f = mValue; + data = *reinterpret_cast<const uint32_t*>(&f); + } else { + data = *reinterpret_cast<const uint64_t*>(&mValue); + } + for (int i = mSize - 1; i >= 0; --i) { + buf[i] = data & 0xff; + data >>= 8; + } +} + +//================================================================================================= + +WebmBinary::WebmBinary(uint64_t id, const sp<ABuffer> &ref) + : WebmElement(id, ref->size()), mRef(ref) { +} + +void WebmBinary::serializePayload(uint8_t *buf) { + memcpy(buf, mRef->data(), mRef->size()); +} + +//================================================================================================= + +WebmString::WebmString(uint64_t id, const char *str) + : WebmElement(id, strlen(str)), mStr(str) { +} + +void WebmString::serializePayload(uint8_t *buf) { + memcpy(buf, mStr, strlen(mStr)); +} + +//================================================================================================= + +WebmSimpleBlock::WebmSimpleBlock( + int trackNum, + int16_t relTimecode, + bool key, + const sp<ABuffer>& orig) + // ............................ trackNum*1 + timecode*2 + flags*1 + // ^^^ + // Only the least significant byte of trackNum is encoded + : WebmElement(kMkvSimpleBlock, orig->size() + 4), + mTrackNum(trackNum), + mRelTimecode(relTimecode), + mKey(key), + mRef(orig) { +} + +void WebmSimpleBlock::serializePayload(uint8_t *buf) { + serializeCodedUnsigned(encodeUnsigned(mTrackNum), buf); + buf[1] = (mRelTimecode & 0xff00) >> 8; + buf[2] = mRelTimecode & 0xff; + buf[3] = mKey ? 0x80 : 0; + memcpy(buf + 4, mRef->data(), mSize - 4); +} + +//================================================================================================= + +EbmlVoid::EbmlVoid(uint64_t totalSize) + : WebmElement(kMkvVoid, voidSize(totalSize)), + mSizeWidth(totalSize - sizeOf(kMkvVoid) - voidSize(totalSize)) { + CHECK_GE(voidSize(totalSize), 0); +} + +int EbmlVoid::serializePayloadSize(uint8_t *buf) { + return serializeCodedUnsigned(encodeUnsigned(mSize, mSizeWidth), buf); +} + +void EbmlVoid::serializePayload(uint8_t *buf) { + ::memset(buf, 0, mSize); + return; +} + +//================================================================================================= + +WebmMaster::WebmMaster(uint64_t id, const List<sp<WebmElement> >& children) + : WebmElement(id, childrenSum(children)), mChildren(children) { +} + +WebmMaster::WebmMaster(uint64_t id) + : WebmElement(id, 0) { +} + +int WebmMaster::serializePayloadSize(uint8_t *buf) { + if (mSize == 0){ + return serializeCodedUnsigned(kMkvUnknownLength, buf); + } + return WebmElement::serializePayloadSize(buf); +} + +void WebmMaster::serializePayload(uint8_t *buf) { + uint64_t off = 0; + for (List<sp<WebmElement> >::const_iterator it = mChildren.begin(); it != mChildren.end(); + ++it) { + sp<WebmElement> child = (*it); + child->serializeInto(buf + off); + off += child->totalSize(); + } +} + +//================================================================================================= + +sp<WebmElement> WebmElement::CuePointEntry(uint64_t time, int track, uint64_t off) { + List<sp<WebmElement> > cuePointEntryFields; + cuePointEntryFields.push_back(new WebmUnsigned(kMkvCueTrack, track)); + cuePointEntryFields.push_back(new WebmUnsigned(kMkvCueClusterPosition, off)); + WebmElement *cueTrackPositions = new WebmMaster(kMkvCueTrackPositions, cuePointEntryFields); + + cuePointEntryFields.clear(); + cuePointEntryFields.push_back(new WebmUnsigned(kMkvCueTime, time)); + cuePointEntryFields.push_back(cueTrackPositions); + return new WebmMaster(kMkvCuePoint, cuePointEntryFields); +} + +sp<WebmElement> WebmElement::SeekEntry(uint64_t id, uint64_t off) { + List<sp<WebmElement> > seekEntryFields; + seekEntryFields.push_back(new WebmUnsigned(kMkvSeekId, id)); + seekEntryFields.push_back(new WebmUnsigned(kMkvSeekPosition, off)); + return new WebmMaster(kMkvSeek, seekEntryFields); +} + +sp<WebmElement> WebmElement::EbmlHeader( + int ver, + int readVer, + int maxIdLen, + int maxSizeLen, + int docVer, + int docReadVer) { + List<sp<WebmElement> > headerFields; + headerFields.push_back(new WebmUnsigned(kMkvEbmlVersion, ver)); + headerFields.push_back(new WebmUnsigned(kMkvEbmlReadVersion, readVer)); + headerFields.push_back(new WebmUnsigned(kMkvEbmlMaxIdlength, maxIdLen)); + headerFields.push_back(new WebmUnsigned(kMkvEbmlMaxSizeLength, maxSizeLen)); + headerFields.push_back(new WebmString(kMkvDocType, "webm")); + headerFields.push_back(new WebmUnsigned(kMkvDocTypeVersion, docVer)); + headerFields.push_back(new WebmUnsigned(kMkvDocTypeReadVersion, docReadVer)); + return new WebmMaster(kMkvEbml, headerFields); +} + +sp<WebmElement> WebmElement::SegmentInfo(uint64_t scale, double dur) { + List<sp<WebmElement> > segmentInfo; + // place duration first; easier to patch + segmentInfo.push_back(new WebmFloat(kMkvSegmentDuration, dur)); + segmentInfo.push_back(new WebmUnsigned(kMkvTimecodeScale, scale)); + segmentInfo.push_back(new WebmString(kMkvMuxingApp, "android")); + segmentInfo.push_back(new WebmString(kMkvWritingApp, "android")); + return new WebmMaster(kMkvInfo, segmentInfo); +} + +sp<WebmElement> WebmElement::AudioTrackEntry( + int chans, + double rate, + const sp<ABuffer> &buf, + int bps, + uint64_t uid, + bool lacing, + const char *lang) { + if (uid == 0) { + uid = kAudioTrackNum; + } + + List<sp<WebmElement> > trackEntryFields; + populateCommonTrackEntries( + kAudioTrackNum, + uid, + lacing, + lang, + "A_VORBIS", + kAudioType, + trackEntryFields); + + List<sp<WebmElement> > audioInfo; + audioInfo.push_back(new WebmUnsigned(kMkvChannels, chans)); + audioInfo.push_back(new WebmFloat(kMkvSamplingFrequency, rate)); + if (bps) { + WebmElement *bitDepth = new WebmUnsigned(kMkvBitDepth, bps); + audioInfo.push_back(bitDepth); + } + + trackEntryFields.push_back(new WebmMaster(kMkvAudio, audioInfo)); + trackEntryFields.push_back(new WebmBinary(kMkvCodecPrivate, buf)); + return new WebmMaster(kMkvTrackEntry, trackEntryFields); +} + +sp<WebmElement> WebmElement::VideoTrackEntry( + uint64_t width, + uint64_t height, + uint64_t uid, + bool lacing, + const char *lang) { + if (uid == 0) { + uid = kVideoTrackNum; + } + + List<sp<WebmElement> > trackEntryFields; + populateCommonTrackEntries( + kVideoTrackNum, + uid, + lacing, + lang, + "V_VP8", + kVideoType, + trackEntryFields); + + List<sp<WebmElement> > videoInfo; + videoInfo.push_back(new WebmUnsigned(kMkvPixelWidth, width)); + videoInfo.push_back(new WebmUnsigned(kMkvPixelHeight, height)); + + trackEntryFields.push_back(new WebmMaster(kMkvVideo, videoInfo)); + return new WebmMaster(kMkvTrackEntry, trackEntryFields); +} +} /* namespace android */ diff --git a/media/libstagefright/webm/WebmElement.h b/media/libstagefright/webm/WebmElement.h new file mode 100644 index 0000000..f19933e --- /dev/null +++ b/media/libstagefright/webm/WebmElement.h @@ -0,0 +1,127 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef WEBMELEMENT_H_ +#define WEBMELEMENT_H_ + +#include <media/stagefright/MediaBuffer.h> +#include <media/stagefright/foundation/ABase.h> +#include <media/stagefright/foundation/ABuffer.h> +#include <utils/List.h> + +namespace android { + +struct WebmElement : public LightRefBase<WebmElement> { + const uint64_t mId, mSize; + + WebmElement(uint64_t id, uint64_t size); + virtual ~WebmElement(); + + virtual int serializePayloadSize(uint8_t *buf); + virtual void serializePayload(uint8_t *buf)=0; + uint64_t totalSize(); + uint64_t serializeInto(uint8_t *buf); + uint8_t *serialize(uint64_t& size); + int write(int fd, uint64_t& size); + + static sp<WebmElement> EbmlHeader( + int ver = 1, + int readVer = 1, + int maxIdLen = 4, + int maxSizeLen = 8, + int docVer = 2, + int docReadVer = 2); + + static sp<WebmElement> SegmentInfo(uint64_t scale = 1000000, double dur = 0); + + static sp<WebmElement> AudioTrackEntry( + int chans, + double rate, + const sp<ABuffer> &buf, + int bps = 0, + uint64_t uid = 0, + bool lacing = false, + const char *lang = "und"); + + static sp<WebmElement> VideoTrackEntry( + uint64_t width, + uint64_t height, + uint64_t uid = 0, + bool lacing = false, + const char *lang = "und"); + + static sp<WebmElement> SeekEntry(uint64_t id, uint64_t off); + static sp<WebmElement> CuePointEntry(uint64_t time, int track, uint64_t off); + static sp<WebmElement> SimpleBlock( + int trackNum, + int16_t timecode, + bool key, + const uint8_t *data, + uint64_t dataSize); +}; + +struct WebmUnsigned : public WebmElement { + WebmUnsigned(uint64_t id, uint64_t value); + const uint64_t mValue; + void serializePayload(uint8_t *buf); +}; + +struct WebmFloat : public WebmElement { + const double mValue; + WebmFloat(uint64_t id, float value); + WebmFloat(uint64_t id, double value); + void serializePayload(uint8_t *buf); +}; + +struct WebmBinary : public WebmElement { + const sp<ABuffer> mRef; + WebmBinary(uint64_t id, const sp<ABuffer> &ref); + void serializePayload(uint8_t *buf); +}; + +struct WebmString : public WebmElement { + const char *const mStr; + WebmString(uint64_t id, const char *str); + void serializePayload(uint8_t *buf); +}; + +struct WebmSimpleBlock : public WebmElement { + const int mTrackNum; + const int16_t mRelTimecode; + const bool mKey; + const sp<ABuffer> mRef; + + WebmSimpleBlock(int trackNum, int16_t timecode, bool key, const sp<ABuffer>& orig); + void serializePayload(uint8_t *buf); +}; + +struct EbmlVoid : public WebmElement { + const uint64_t mSizeWidth; + EbmlVoid(uint64_t totalSize); + int serializePayloadSize(uint8_t *buf); + void serializePayload(uint8_t *buf); +}; + +struct WebmMaster : public WebmElement { + const List<sp<WebmElement> > mChildren; + WebmMaster(uint64_t id); + WebmMaster(uint64_t id, const List<sp<WebmElement> > &children); + int serializePayloadSize(uint8_t *buf); + void serializePayload(uint8_t *buf); +}; + +} /* namespace android */ +#endif /* WEBMELEMENT_H_ */ diff --git a/media/libstagefright/webm/WebmFrame.cpp b/media/libstagefright/webm/WebmFrame.cpp new file mode 100644 index 0000000..e5134ed --- /dev/null +++ b/media/libstagefright/webm/WebmFrame.cpp @@ -0,0 +1,83 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "WebmFrame" + +#include "WebmFrame.h" +#include "WebmConstants.h" + +#include <media/stagefright/foundation/ADebug.h> +#include <unistd.h> + +using namespace android; +using namespace webm; + +namespace { +sp<ABuffer> toABuffer(MediaBuffer *mbuf) { + sp<ABuffer> abuf = new ABuffer(mbuf->range_length()); + memcpy(abuf->data(), (uint8_t*) mbuf->data() + mbuf->range_offset(), mbuf->range_length()); + return abuf; +} +} + +namespace android { + +const sp<WebmFrame> WebmFrame::EOS = new WebmFrame(); + +WebmFrame::WebmFrame() + : mType(kInvalidType), + mKey(false), + mAbsTimecode(UINT64_MAX), + mData(new ABuffer(0)), + mEos(true) { +} + +WebmFrame::WebmFrame(int type, bool key, uint64_t absTimecode, MediaBuffer *mbuf) + : mType(type), + mKey(key), + mAbsTimecode(absTimecode), + mData(toABuffer(mbuf)), + mEos(false) { +} + +sp<WebmElement> WebmFrame::SimpleBlock(uint64_t baseTimecode) const { + return new WebmSimpleBlock( + mType == kVideoType ? kVideoTrackNum : kAudioTrackNum, + mAbsTimecode - baseTimecode, + mKey, + mData); +} + +bool WebmFrame::operator<(const WebmFrame &other) const { + if (this->mEos) { + return false; + } + if (other.mEos) { + return true; + } + if (this->mAbsTimecode == other.mAbsTimecode) { + if (this->mType == kAudioType && other.mType == kVideoType) { + return true; + } + if (this->mType == kVideoType && other.mType == kAudioType) { + return false; + } + return false; + } + return this->mAbsTimecode < other.mAbsTimecode; +} +} /* namespace android */ diff --git a/media/libstagefright/webm/WebmFrame.h b/media/libstagefright/webm/WebmFrame.h new file mode 100644 index 0000000..4f0b055 --- /dev/null +++ b/media/libstagefright/webm/WebmFrame.h @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef WEBMFRAME_H_ +#define WEBMFRAME_H_ + +#include "WebmElement.h" + +namespace android { + +struct WebmFrame : LightRefBase<WebmFrame> { +public: + const int mType; + const bool mKey; + const uint64_t mAbsTimecode; + const sp<ABuffer> mData; + const bool mEos; + + WebmFrame(); + WebmFrame(int type, bool key, uint64_t absTimecode, MediaBuffer *buf); + ~WebmFrame() {} + + sp<WebmElement> SimpleBlock(uint64_t baseTimecode) const; + + bool operator<(const WebmFrame &other) const; + + static const sp<WebmFrame> EOS; +private: + DISALLOW_EVIL_CONSTRUCTORS(WebmFrame); +}; + +} /* namespace android */ +#endif /* WEBMFRAME_H_ */ diff --git a/media/libstagefright/webm/WebmFrameThread.cpp b/media/libstagefright/webm/WebmFrameThread.cpp new file mode 100644 index 0000000..a4b8a42 --- /dev/null +++ b/media/libstagefright/webm/WebmFrameThread.cpp @@ -0,0 +1,399 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "WebmFrameThread" + +#include "WebmConstants.h" +#include "WebmFrameThread.h" + +#include <media/stagefright/MetaData.h> +#include <media/stagefright/foundation/ADebug.h> + +#include <utils/Log.h> +#include <inttypes.h> + +using namespace webm; + +namespace android { + +void *WebmFrameThread::wrap(void *arg) { + WebmFrameThread *worker = reinterpret_cast<WebmFrameThread*>(arg); + worker->run(); + return NULL; +} + +status_t WebmFrameThread::start() { + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); + pthread_create(&mThread, &attr, WebmFrameThread::wrap, this); + pthread_attr_destroy(&attr); + return OK; +} + +status_t WebmFrameThread::stop() { + void *status; + pthread_join(mThread, &status); + return (status_t)(intptr_t)status; +} + +//================================================================================================= + +WebmFrameSourceThread::WebmFrameSourceThread( + int type, + LinkedBlockingQueue<const sp<WebmFrame> >& sink) + : mType(type), mSink(sink) { +} + +//================================================================================================= + +WebmFrameSinkThread::WebmFrameSinkThread( + const int& fd, + const uint64_t& off, + sp<WebmFrameSourceThread> videoThread, + sp<WebmFrameSourceThread> audioThread, + List<sp<WebmElement> >& cues) + : mFd(fd), + mSegmentDataStart(off), + mVideoFrames(videoThread->mSink), + mAudioFrames(audioThread->mSink), + mCues(cues), + mDone(true) { +} + +WebmFrameSinkThread::WebmFrameSinkThread( + const int& fd, + const uint64_t& off, + LinkedBlockingQueue<const sp<WebmFrame> >& videoSource, + LinkedBlockingQueue<const sp<WebmFrame> >& audioSource, + List<sp<WebmElement> >& cues) + : mFd(fd), + mSegmentDataStart(off), + mVideoFrames(videoSource), + mAudioFrames(audioSource), + mCues(cues), + mDone(true) { +} + +// Initializes a webm cluster with its starting timecode. +// +// frames: +// sequence of input audio/video frames received from the source. +// +// clusterTimecodeL: +// the starting timecode of the cluster; this is the timecode of the first +// frame since frames are ordered by timestamp. +// +// children: +// list to hold child elements in a webm cluster (start timecode and +// simple blocks). +// +// static +void WebmFrameSinkThread::initCluster( + List<const sp<WebmFrame> >& frames, + uint64_t& clusterTimecodeL, + List<sp<WebmElement> >& children) { + CHECK(!frames.empty() && children.empty()); + + const sp<WebmFrame> f = *(frames.begin()); + clusterTimecodeL = f->mAbsTimecode; + WebmUnsigned *clusterTimecode = new WebmUnsigned(kMkvTimecode, clusterTimecodeL); + children.clear(); + children.push_back(clusterTimecode); +} + +void WebmFrameSinkThread::writeCluster(List<sp<WebmElement> >& children) { + // children must contain at least one simpleblock and its timecode + CHECK_GE(children.size(), 2); + + uint64_t size; + sp<WebmElement> cluster = new WebmMaster(kMkvCluster, children); + cluster->write(mFd, size); + children.clear(); +} + +// Write out (possibly multiple) webm cluster(s) from frames split on video key frames. +// +// last: +// current flush is triggered by EOS instead of a second outstanding video key frame. +void WebmFrameSinkThread::flushFrames(List<const sp<WebmFrame> >& frames, bool last) { + if (frames.empty()) { + return; + } + + uint64_t clusterTimecodeL; + List<sp<WebmElement> > children; + initCluster(frames, clusterTimecodeL, children); + + uint64_t cueTime = clusterTimecodeL; + off_t fpos = ::lseek(mFd, 0, SEEK_CUR); + size_t n = frames.size(); + if (!last) { + // If we are not flushing the last sequence of outstanding frames, flushFrames + // must have been called right after we have pushed a second outstanding video key + // frame (the last frame), which belongs to the next cluster; also hold back on + // flushing the second to last frame before we check its type. A audio frame + // should precede the aforementioned video key frame in the next sequence, a video + // frame should be the last frame in the current (to-be-flushed) sequence. + CHECK_GE(n, 2); + n -= 2; + } + + for (size_t i = 0; i < n; i++) { + const sp<WebmFrame> f = *(frames.begin()); + if (f->mType == kVideoType && f->mKey) { + cueTime = f->mAbsTimecode; + } + + if (f->mAbsTimecode - clusterTimecodeL > INT16_MAX) { + writeCluster(children); + initCluster(frames, clusterTimecodeL, children); + } + + frames.erase(frames.begin()); + children.push_back(f->SimpleBlock(clusterTimecodeL)); + } + + // equivalent to last==false + if (!frames.empty()) { + // decide whether to write out the second to last frame. + const sp<WebmFrame> secondLastFrame = *(frames.begin()); + if (secondLastFrame->mType == kVideoType) { + frames.erase(frames.begin()); + children.push_back(secondLastFrame->SimpleBlock(clusterTimecodeL)); + } + } + + writeCluster(children); + sp<WebmElement> cuePoint = WebmElement::CuePointEntry(cueTime, 1, fpos - mSegmentDataStart); + mCues.push_back(cuePoint); +} + +status_t WebmFrameSinkThread::start() { + mDone = false; + return WebmFrameThread::start(); +} + +status_t WebmFrameSinkThread::stop() { + mDone = true; + mVideoFrames.push(WebmFrame::EOS); + mAudioFrames.push(WebmFrame::EOS); + return WebmFrameThread::stop(); +} + +void WebmFrameSinkThread::run() { + int numVideoKeyFrames = 0; + List<const sp<WebmFrame> > outstandingFrames; + while (!mDone) { + ALOGV("wait v frame"); + const sp<WebmFrame> videoFrame = mVideoFrames.peek(); + ALOGV("v frame: %p", videoFrame.get()); + + ALOGV("wait a frame"); + const sp<WebmFrame> audioFrame = mAudioFrames.peek(); + ALOGV("a frame: %p", audioFrame.get()); + + if (videoFrame->mEos && audioFrame->mEos) { + break; + } + + if (*audioFrame < *videoFrame) { + ALOGV("take a frame"); + mAudioFrames.take(); + outstandingFrames.push_back(audioFrame); + } else { + ALOGV("take v frame"); + mVideoFrames.take(); + outstandingFrames.push_back(videoFrame); + if (videoFrame->mKey) + numVideoKeyFrames++; + } + + if (numVideoKeyFrames == 2) { + flushFrames(outstandingFrames, /* last = */ false); + numVideoKeyFrames--; + } + } + ALOGV("flushing last cluster (size %zu)", outstandingFrames.size()); + flushFrames(outstandingFrames, /* last = */ true); + mDone = true; +} + +//================================================================================================= + +static const int64_t kInitialDelayTimeUs = 700000LL; + +void WebmFrameMediaSourceThread::clearFlags() { + mDone = false; + mPaused = false; + mResumed = false; + mStarted = false; + mReachedEOS = false; +} + +WebmFrameMediaSourceThread::WebmFrameMediaSourceThread( + const sp<MediaSource>& source, + int type, + LinkedBlockingQueue<const sp<WebmFrame> >& sink, + uint64_t timeCodeScale, + int64_t startTimeRealUs, + int32_t startTimeOffsetMs, + int numTracks, + bool realTimeRecording) + : WebmFrameSourceThread(type, sink), + mSource(source), + mTimeCodeScale(timeCodeScale), + mTrackDurationUs(0) { + clearFlags(); + mStartTimeUs = startTimeRealUs; + if (realTimeRecording && numTracks > 1) { + /* + * Copied from MPEG4Writer + * + * This extra delay of accepting incoming audio/video signals + * helps to align a/v start time at the beginning of a recording + * session, and it also helps eliminate the "recording" sound for + * camcorder applications. + * + * If client does not set the start time offset, we fall back to + * use the default initial delay value. + */ + int64_t startTimeOffsetUs = startTimeOffsetMs * 1000LL; + if (startTimeOffsetUs < 0) { // Start time offset was not set + startTimeOffsetUs = kInitialDelayTimeUs; + } + mStartTimeUs += startTimeOffsetUs; + ALOGI("Start time offset: %" PRId64 " us", startTimeOffsetUs); + } +} + +status_t WebmFrameMediaSourceThread::start() { + sp<MetaData> meta = new MetaData; + meta->setInt64(kKeyTime, mStartTimeUs); + status_t err = mSource->start(meta.get()); + if (err != OK) { + mDone = true; + mReachedEOS = true; + return err; + } else { + mStarted = true; + return WebmFrameThread::start(); + } +} + +status_t WebmFrameMediaSourceThread::resume() { + if (!mDone && mPaused) { + mPaused = false; + mResumed = true; + } + return OK; +} + +status_t WebmFrameMediaSourceThread::pause() { + if (mStarted) { + mPaused = true; + } + return OK; +} + +status_t WebmFrameMediaSourceThread::stop() { + if (mStarted) { + mStarted = false; + mDone = true; + mSource->stop(); + return WebmFrameThread::stop(); + } + return OK; +} + +void WebmFrameMediaSourceThread::run() { + int32_t count = 0; + int64_t timestampUs = 0xdeadbeef; + int64_t lastTimestampUs = 0; // Previous sample time stamp + int64_t lastDurationUs = 0; // Previous sample duration + int64_t previousPausedDurationUs = 0; + + const uint64_t kUninitialized = 0xffffffffffffffffL; + mStartTimeUs = kUninitialized; + + status_t err = OK; + MediaBuffer *buffer; + while (!mDone && (err = mSource->read(&buffer, NULL)) == OK) { + if (buffer->range_length() == 0) { + buffer->release(); + buffer = NULL; + continue; + } + + sp<MetaData> md = buffer->meta_data(); + CHECK(md->findInt64(kKeyTime, ×tampUs)); + if (mStartTimeUs == kUninitialized) { + mStartTimeUs = timestampUs; + } + timestampUs -= mStartTimeUs; + + if (mPaused && !mResumed) { + lastDurationUs = timestampUs - lastTimestampUs; + lastTimestampUs = timestampUs; + buffer->release(); + buffer = NULL; + continue; + } + ++count; + + // adjust time-stamps after pause/resume + if (mResumed) { + int64_t durExcludingEarlierPausesUs = timestampUs - previousPausedDurationUs; + CHECK_GE(durExcludingEarlierPausesUs, 0ll); + int64_t pausedDurationUs = durExcludingEarlierPausesUs - mTrackDurationUs; + CHECK_GE(pausedDurationUs, lastDurationUs); + previousPausedDurationUs += pausedDurationUs - lastDurationUs; + mResumed = false; + } + timestampUs -= previousPausedDurationUs; + CHECK_GE(timestampUs, 0ll); + + int32_t isSync = false; + md->findInt32(kKeyIsSyncFrame, &isSync); + const sp<WebmFrame> f = new WebmFrame( + mType, + isSync, + timestampUs * 1000 / mTimeCodeScale, + buffer); + mSink.push(f); + + ALOGV( + "%s %s frame at %" PRId64 " size %zu\n", + mType == kVideoType ? "video" : "audio", + isSync ? "I" : "P", + timestampUs * 1000 / mTimeCodeScale, + buffer->range_length()); + + buffer->release(); + buffer = NULL; + + if (timestampUs > mTrackDurationUs) { + mTrackDurationUs = timestampUs; + } + lastDurationUs = timestampUs - lastTimestampUs; + lastTimestampUs = timestampUs; + } + + mTrackDurationUs += lastDurationUs; + mSink.push(WebmFrame::EOS); +} +} diff --git a/media/libstagefright/webm/WebmFrameThread.h b/media/libstagefright/webm/WebmFrameThread.h new file mode 100644 index 0000000..d65d9b7 --- /dev/null +++ b/media/libstagefright/webm/WebmFrameThread.h @@ -0,0 +1,160 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef WEBMFRAMETHREAD_H_ +#define WEBMFRAMETHREAD_H_ + +#include "WebmFrame.h" +#include "LinkedBlockingQueue.h" + +#include <media/stagefright/FileSource.h> +#include <media/stagefright/MediaSource.h> + +#include <utils/List.h> +#include <utils/Errors.h> + +#include <pthread.h> + +namespace android { + +class WebmFrameThread : public LightRefBase<WebmFrameThread> { +public: + virtual void run() = 0; + virtual bool running() { return false; } + virtual status_t start(); + virtual status_t pause() { return OK; } + virtual status_t resume() { return OK; } + virtual status_t stop(); + virtual ~WebmFrameThread() { stop(); } + static void *wrap(void *arg); + +protected: + WebmFrameThread() + : mThread(0) { + } + +private: + pthread_t mThread; + DISALLOW_EVIL_CONSTRUCTORS(WebmFrameThread); +}; + +//================================================================================================= + +class WebmFrameSourceThread; +class WebmFrameSinkThread : public WebmFrameThread { +public: + WebmFrameSinkThread( + const int& fd, + const uint64_t& off, + sp<WebmFrameSourceThread> videoThread, + sp<WebmFrameSourceThread> audioThread, + List<sp<WebmElement> >& cues); + + WebmFrameSinkThread( + const int& fd, + const uint64_t& off, + LinkedBlockingQueue<const sp<WebmFrame> >& videoSource, + LinkedBlockingQueue<const sp<WebmFrame> >& audioSource, + List<sp<WebmElement> >& cues); + + void run(); + bool running() { + return !mDone; + } + status_t start(); + status_t stop(); + +private: + const int& mFd; + const uint64_t& mSegmentDataStart; + LinkedBlockingQueue<const sp<WebmFrame> >& mVideoFrames; + LinkedBlockingQueue<const sp<WebmFrame> >& mAudioFrames; + List<sp<WebmElement> >& mCues; + + volatile bool mDone; + + static void initCluster( + List<const sp<WebmFrame> >& frames, + uint64_t& clusterTimecodeL, + List<sp<WebmElement> >& children); + void writeCluster(List<sp<WebmElement> >& children); + void flushFrames(List<const sp<WebmFrame> >& frames, bool last); +}; + +//================================================================================================= + +class WebmFrameSourceThread : public WebmFrameThread { +public: + WebmFrameSourceThread(int type, LinkedBlockingQueue<const sp<WebmFrame> >& sink); + virtual int64_t getDurationUs() = 0; +protected: + const int mType; + LinkedBlockingQueue<const sp<WebmFrame> >& mSink; + + friend class WebmFrameSinkThread; +}; + +//================================================================================================= + +class WebmFrameEmptySourceThread : public WebmFrameSourceThread { +public: + WebmFrameEmptySourceThread(int type, LinkedBlockingQueue<const sp<WebmFrame> >& sink) + : WebmFrameSourceThread(type, sink) { + } + void run() { mSink.push(WebmFrame::EOS); } + int64_t getDurationUs() { return 0; } +}; + +//================================================================================================= + +class WebmFrameMediaSourceThread: public WebmFrameSourceThread { +public: + WebmFrameMediaSourceThread( + const sp<MediaSource>& source, + int type, + LinkedBlockingQueue<const sp<WebmFrame> >& sink, + uint64_t timeCodeScale, + int64_t startTimeRealUs, + int32_t startTimeOffsetMs, + int numPeers, + bool realTimeRecording); + + void run(); + status_t start(); + status_t resume(); + status_t pause(); + status_t stop(); + int64_t getDurationUs() { + return mTrackDurationUs; + } + +private: + const sp<MediaSource> mSource; + const uint64_t mTimeCodeScale; + uint64_t mStartTimeUs; + + volatile bool mDone; + volatile bool mPaused; + volatile bool mResumed; + volatile bool mStarted; + volatile bool mReachedEOS; + int64_t mTrackDurationUs; + + void clearFlags(); +}; +} /* namespace android */ + +#endif /* WEBMFRAMETHREAD_H_ */ diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp new file mode 100644 index 0000000..03cf92a --- /dev/null +++ b/media/libstagefright/webm/WebmWriter.cpp @@ -0,0 +1,551 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// #define LOG_NDEBUG 0 +#define LOG_TAG "WebmWriter" + +#include "EbmlUtil.h" +#include "WebmWriter.h" + +#include <media/stagefright/MetaData.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/foundation/ADebug.h> + +#include <utils/Errors.h> + +#include <unistd.h> +#include <fcntl.h> +#include <sys/stat.h> +#include <inttypes.h> + +using namespace webm; + +namespace { +size_t XiphLaceCodeLen(size_t size) { + return size / 0xff + 1; +} + +size_t XiphLaceEnc(uint8_t *buf, size_t size) { + size_t i; + for (i = 0; size >= 0xff; ++i, size -= 0xff) { + buf[i] = 0xff; + } + buf[i++] = size; + return i; +} +} + +namespace android { + +static const int64_t kMinStreamableFileSizeInBytes = 5 * 1024 * 1024; + +WebmWriter::WebmWriter(int fd) + : mFd(dup(fd)), + mInitCheck(mFd < 0 ? NO_INIT : OK), + mTimeCodeScale(1000000), + mStartTimestampUs(0), + mStartTimeOffsetMs(0), + mSegmentOffset(0), + mSegmentDataStart(0), + mInfoOffset(0), + mInfoSize(0), + mTracksOffset(0), + mCuesOffset(0), + mPaused(false), + mStarted(false), + mIsFileSizeLimitExplicitlyRequested(false), + mIsRealTimeRecording(false), + mStreamableFile(true), + mEstimatedCuesSize(0) { + mStreams[kAudioIndex] = WebmStream(kAudioType, "Audio", &WebmWriter::audioTrack); + mStreams[kVideoIndex] = WebmStream(kVideoType, "Video", &WebmWriter::videoTrack); + mSinkThread = new WebmFrameSinkThread( + mFd, + mSegmentDataStart, + mStreams[kVideoIndex].mSink, + mStreams[kAudioIndex].mSink, + mCuePoints); +} + +WebmWriter::WebmWriter(const char *filename) + : mInitCheck(NO_INIT), + mTimeCodeScale(1000000), + mStartTimestampUs(0), + mStartTimeOffsetMs(0), + mSegmentOffset(0), + mSegmentDataStart(0), + mInfoOffset(0), + mInfoSize(0), + mTracksOffset(0), + mCuesOffset(0), + mPaused(false), + mStarted(false), + mIsFileSizeLimitExplicitlyRequested(false), + mIsRealTimeRecording(false), + mStreamableFile(true), + mEstimatedCuesSize(0) { + mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR); + if (mFd >= 0) { + ALOGV("fd %d; flags: %o", mFd, fcntl(mFd, F_GETFL, 0)); + mInitCheck = OK; + } + mStreams[kAudioIndex] = WebmStream(kAudioType, "Audio", &WebmWriter::audioTrack); + mStreams[kVideoIndex] = WebmStream(kVideoType, "Video", &WebmWriter::videoTrack); + mSinkThread = new WebmFrameSinkThread( + mFd, + mSegmentDataStart, + mStreams[kVideoIndex].mSink, + mStreams[kAudioIndex].mSink, + mCuePoints); +} + +// static +sp<WebmElement> WebmWriter::videoTrack(const sp<MetaData>& md) { + int32_t width, height; + CHECK(md->findInt32(kKeyWidth, &width)); + CHECK(md->findInt32(kKeyHeight, &height)); + return WebmElement::VideoTrackEntry(width, height); +} + +// static +sp<WebmElement> WebmWriter::audioTrack(const sp<MetaData>& md) { + int32_t nChannels, samplerate; + uint32_t type; + const void *headerData1; + const char headerData2[] = { 3, 'v', 'o', 'r', 'b', 'i', 's', 7, 0, 0, 0, + 'a', 'n', 'd', 'r', 'o', 'i', 'd', 0, 0, 0, 0, 1 }; + const void *headerData3; + size_t headerSize1, headerSize2 = sizeof(headerData2), headerSize3; + + CHECK(md->findInt32(kKeyChannelCount, &nChannels)); + CHECK(md->findInt32(kKeySampleRate, &samplerate)); + CHECK(md->findData(kKeyVorbisInfo, &type, &headerData1, &headerSize1)); + CHECK(md->findData(kKeyVorbisBooks, &type, &headerData3, &headerSize3)); + + size_t codecPrivateSize = 1; + codecPrivateSize += XiphLaceCodeLen(headerSize1); + codecPrivateSize += XiphLaceCodeLen(headerSize2); + codecPrivateSize += headerSize1 + headerSize2 + headerSize3; + + off_t off = 0; + sp<ABuffer> codecPrivateBuf = new ABuffer(codecPrivateSize); + uint8_t *codecPrivateData = codecPrivateBuf->data(); + codecPrivateData[off++] = 2; + + off += XiphLaceEnc(codecPrivateData + off, headerSize1); + off += XiphLaceEnc(codecPrivateData + off, headerSize2); + + memcpy(codecPrivateData + off, headerData1, headerSize1); + off += headerSize1; + memcpy(codecPrivateData + off, headerData2, headerSize2); + off += headerSize2; + memcpy(codecPrivateData + off, headerData3, headerSize3); + + sp<WebmElement> entry = WebmElement::AudioTrackEntry( + nChannels, + samplerate, + codecPrivateBuf); + return entry; +} + +size_t WebmWriter::numTracks() { + Mutex::Autolock autolock(mLock); + + size_t numTracks = 0; + for (size_t i = 0; i < kMaxStreams; ++i) { + if (mStreams[i].mTrackEntry != NULL) { + numTracks++; + } + } + + return numTracks; +} + +uint64_t WebmWriter::estimateCuesSize(int32_t bitRate) { + // This implementation is based on estimateMoovBoxSize in MPEG4Writer. + // + // Statistical analysis shows that metadata usually accounts + // for a small portion of the total file size, usually < 0.6%. + + // The default MIN_MOOV_BOX_SIZE is set to 0.6% x 1MB / 2, + // where 1MB is the common file size limit for MMS application. + // The default MAX _MOOV_BOX_SIZE value is based on about 3 + // minute video recording with a bit rate about 3 Mbps, because + // statistics also show that most of the video captured are going + // to be less than 3 minutes. + + // If the estimation is wrong, we will pay the price of wasting + // some reserved space. This should not happen so often statistically. + static const int32_t factor = 2; + static const int64_t MIN_CUES_SIZE = 3 * 1024; // 3 KB + static const int64_t MAX_CUES_SIZE = (180 * 3000000 * 6LL / 8000); + int64_t size = MIN_CUES_SIZE; + + // Max file size limit is set + if (mMaxFileSizeLimitBytes != 0 && mIsFileSizeLimitExplicitlyRequested) { + size = mMaxFileSizeLimitBytes * 6 / 1000; + } + + // Max file duration limit is set + if (mMaxFileDurationLimitUs != 0) { + if (bitRate > 0) { + int64_t size2 = ((mMaxFileDurationLimitUs * bitRate * 6) / 1000 / 8000000); + if (mMaxFileSizeLimitBytes != 0 && mIsFileSizeLimitExplicitlyRequested) { + // When both file size and duration limits are set, + // we use the smaller limit of the two. + if (size > size2) { + size = size2; + } + } else { + // Only max file duration limit is set + size = size2; + } + } + } + + if (size < MIN_CUES_SIZE) { + size = MIN_CUES_SIZE; + } + + // Any long duration recording will be probably end up with + // non-streamable webm file. + if (size > MAX_CUES_SIZE) { + size = MAX_CUES_SIZE; + } + + ALOGV("limits: %" PRId64 "/%" PRId64 " bytes/us," + " bit rate: %d bps and the estimated cues size %" PRId64 " bytes", + mMaxFileSizeLimitBytes, mMaxFileDurationLimitUs, bitRate, size); + return factor * size; +} + +void WebmWriter::initStream(size_t idx) { + if (mStreams[idx].mThread != NULL) { + return; + } + if (mStreams[idx].mSource == NULL) { + ALOGV("adding dummy source ... "); + mStreams[idx].mThread = new WebmFrameEmptySourceThread( + mStreams[idx].mType, mStreams[idx].mSink); + } else { + ALOGV("adding source %p", mStreams[idx].mSource.get()); + mStreams[idx].mThread = new WebmFrameMediaSourceThread( + mStreams[idx].mSource, + mStreams[idx].mType, + mStreams[idx].mSink, + mTimeCodeScale, + mStartTimestampUs, + mStartTimeOffsetMs, + numTracks(), + mIsRealTimeRecording); + } +} + +void WebmWriter::release() { + close(mFd); + mFd = -1; + mInitCheck = NO_INIT; + mStarted = false; +} + +status_t WebmWriter::reset() { + if (mInitCheck != OK) { + return OK; + } else { + if (!mStarted) { + release(); + return OK; + } + } + + status_t err = OK; + int64_t maxDurationUs = 0; + int64_t minDurationUs = 0x7fffffffffffffffLL; + for (int i = 0; i < kMaxStreams; ++i) { + if (mStreams[i].mThread == NULL) { + continue; + } + + status_t status = mStreams[i].mThread->stop(); + if (err == OK && status != OK) { + err = status; + } + + int64_t durationUs = mStreams[i].mThread->getDurationUs(); + if (durationUs > maxDurationUs) { + maxDurationUs = durationUs; + } + if (durationUs < minDurationUs) { + minDurationUs = durationUs; + } + } + + if (numTracks() > 1) { + ALOGD("Duration from tracks range is [%" PRId64 ", %" PRId64 "] us", minDurationUs, maxDurationUs); + } + + mSinkThread->stop(); + + // Do not write out movie header on error. + if (err != OK) { + release(); + return err; + } + + sp<WebmElement> cues = new WebmMaster(kMkvCues, mCuePoints); + uint64_t cuesSize = cues->totalSize(); + // TRICKY Even when the cues do fit in the space we reserved, if they do not fit + // perfectly, we still need to check if there is enough "extra space" to write an + // EBML void element. + if (cuesSize != mEstimatedCuesSize && cuesSize > mEstimatedCuesSize - kMinEbmlVoidSize) { + mCuesOffset = ::lseek(mFd, 0, SEEK_CUR); + cues->write(mFd, cuesSize); + } else { + uint64_t spaceSize; + ::lseek(mFd, mCuesOffset, SEEK_SET); + cues->write(mFd, cuesSize); + sp<WebmElement> space = new EbmlVoid(mEstimatedCuesSize - cuesSize); + space->write(mFd, spaceSize); + } + + mCuePoints.clear(); + mStreams[kVideoIndex].mSink.clear(); + mStreams[kAudioIndex].mSink.clear(); + + uint8_t bary[sizeof(uint64_t)]; + uint64_t totalSize = ::lseek(mFd, 0, SEEK_END); + uint64_t segmentSize = totalSize - mSegmentDataStart; + ::lseek(mFd, mSegmentOffset + sizeOf(kMkvSegment), SEEK_SET); + uint64_t segmentSizeCoded = encodeUnsigned(segmentSize, sizeOf(kMkvUnknownLength)); + serializeCodedUnsigned(segmentSizeCoded, bary); + ::write(mFd, bary, sizeOf(kMkvUnknownLength)); + + uint64_t size; + uint64_t durationOffset = mInfoOffset + sizeOf(kMkvInfo) + sizeOf(mInfoSize) + + sizeOf(kMkvSegmentDuration) + sizeOf(sizeof(double)); + sp<WebmElement> duration = new WebmFloat( + kMkvSegmentDuration, + (double) (maxDurationUs * 1000 / mTimeCodeScale)); + duration->serializePayload(bary); + ::lseek(mFd, durationOffset, SEEK_SET); + ::write(mFd, bary, sizeof(double)); + + List<sp<WebmElement> > seekEntries; + seekEntries.push_back(WebmElement::SeekEntry(kMkvInfo, mInfoOffset - mSegmentDataStart)); + seekEntries.push_back(WebmElement::SeekEntry(kMkvTracks, mTracksOffset - mSegmentDataStart)); + seekEntries.push_back(WebmElement::SeekEntry(kMkvCues, mCuesOffset - mSegmentDataStart)); + sp<WebmElement> seekHead = new WebmMaster(kMkvSeekHead, seekEntries); + + uint64_t metaSeekSize; + ::lseek(mFd, mSegmentDataStart, SEEK_SET); + seekHead->write(mFd, metaSeekSize); + + uint64_t spaceSize; + sp<WebmElement> space = new EbmlVoid(kMaxMetaSeekSize - metaSeekSize); + space->write(mFd, spaceSize); + + release(); + return err; +} + +status_t WebmWriter::addSource(const sp<MediaSource> &source) { + Mutex::Autolock l(mLock); + if (mStarted) { + ALOGE("Attempt to add source AFTER recording is started"); + return UNKNOWN_ERROR; + } + + // At most 2 tracks can be supported. + if (mStreams[kVideoIndex].mTrackEntry != NULL + && mStreams[kAudioIndex].mTrackEntry != NULL) { + ALOGE("Too many tracks (2) to add"); + return ERROR_UNSUPPORTED; + } + + CHECK(source != NULL); + + // A track of type other than video or audio is not supported. + const char *mime; + source->getFormat()->findCString(kKeyMIMEType, &mime); + const char *vp8 = MEDIA_MIMETYPE_VIDEO_VP8; + const char *vorbis = MEDIA_MIMETYPE_AUDIO_VORBIS; + + size_t streamIndex; + if (!strncasecmp(mime, vp8, strlen(vp8))) { + streamIndex = kVideoIndex; + } else if (!strncasecmp(mime, vorbis, strlen(vorbis))) { + streamIndex = kAudioIndex; + } else { + ALOGE("Track (%s) other than %s or %s is not supported", mime, vp8, vorbis); + return ERROR_UNSUPPORTED; + } + + // No more than one video or one audio track is supported. + if (mStreams[streamIndex].mTrackEntry != NULL) { + ALOGE("%s track already exists", mStreams[streamIndex].mName); + return ERROR_UNSUPPORTED; + } + + // This is the first track of either audio or video. + // Go ahead to add the track. + mStreams[streamIndex].mSource = source; + mStreams[streamIndex].mTrackEntry = mStreams[streamIndex].mMakeTrack(source->getFormat()); + + return OK; +} + +status_t WebmWriter::start(MetaData *params) { + if (mInitCheck != OK) { + return UNKNOWN_ERROR; + } + + if (mStreams[kVideoIndex].mTrackEntry == NULL + && mStreams[kAudioIndex].mTrackEntry == NULL) { + ALOGE("No source added"); + return INVALID_OPERATION; + } + + if (mMaxFileSizeLimitBytes != 0) { + mIsFileSizeLimitExplicitlyRequested = true; + } + + if (params) { + int32_t isRealTimeRecording; + params->findInt32(kKeyRealTimeRecording, &isRealTimeRecording); + mIsRealTimeRecording = isRealTimeRecording; + } + + if (mStarted) { + if (mPaused) { + mPaused = false; + mStreams[kAudioIndex].mThread->resume(); + mStreams[kVideoIndex].mThread->resume(); + } + return OK; + } + + if (params) { + int32_t tcsl; + if (params->findInt32(kKeyTimeScale, &tcsl)) { + mTimeCodeScale = tcsl; + } + } + CHECK_GT(mTimeCodeScale, 0); + ALOGV("movie time scale: %" PRIu64, mTimeCodeScale); + + /* + * When the requested file size limit is small, the priority + * is to meet the file size limit requirement, rather than + * to make the file streamable. mStreamableFile does not tell + * whether the actual recorded file is streamable or not. + */ + mStreamableFile = (!mMaxFileSizeLimitBytes) + || (mMaxFileSizeLimitBytes >= kMinStreamableFileSizeInBytes); + + /* + * Write various metadata. + */ + sp<WebmElement> ebml, segment, info, seekHead, tracks, cues; + ebml = WebmElement::EbmlHeader(); + segment = new WebmMaster(kMkvSegment); + seekHead = new EbmlVoid(kMaxMetaSeekSize); + info = WebmElement::SegmentInfo(mTimeCodeScale, 0); + + List<sp<WebmElement> > children; + for (size_t i = 0; i < kMaxStreams; ++i) { + if (mStreams[i].mTrackEntry != NULL) { + children.push_back(mStreams[i].mTrackEntry); + } + } + tracks = new WebmMaster(kMkvTracks, children); + + if (!mStreamableFile) { + cues = NULL; + } else { + int32_t bitRate = -1; + if (params) { + params->findInt32(kKeyBitRate, &bitRate); + } + mEstimatedCuesSize = estimateCuesSize(bitRate); + CHECK_GE(mEstimatedCuesSize, 8); + cues = new EbmlVoid(mEstimatedCuesSize); + } + + sp<WebmElement> elems[] = { ebml, segment, seekHead, info, tracks, cues }; + size_t nElems = sizeof(elems) / sizeof(elems[0]); + uint64_t offsets[nElems]; + uint64_t sizes[nElems]; + for (uint32_t i = 0; i < nElems; i++) { + WebmElement *e = elems[i].get(); + if (!e) { + continue; + } + + uint64_t size; + offsets[i] = ::lseek(mFd, 0, SEEK_CUR); + sizes[i] = e->mSize; + e->write(mFd, size); + } + + mSegmentOffset = offsets[1]; + mSegmentDataStart = offsets[2]; + mInfoOffset = offsets[3]; + mInfoSize = sizes[3]; + mTracksOffset = offsets[4]; + mCuesOffset = offsets[5]; + + // start threads + if (params) { + params->findInt64(kKeyTime, &mStartTimestampUs); + } + + initStream(kAudioIndex); + initStream(kVideoIndex); + + mStreams[kAudioIndex].mThread->start(); + mStreams[kVideoIndex].mThread->start(); + mSinkThread->start(); + + mStarted = true; + return OK; +} + +status_t WebmWriter::pause() { + if (mInitCheck != OK) { + return OK; + } + mPaused = true; + status_t err = OK; + for (int i = 0; i < kMaxStreams; ++i) { + if (mStreams[i].mThread == NULL) { + continue; + } + status_t status = mStreams[i].mThread->pause(); + if (status != OK) { + err = status; + } + } + return err; +} + +status_t WebmWriter::stop() { + return reset(); +} + +bool WebmWriter::reachedEOS() { + return !mSinkThread->running(); +} +} /* namespace android */ diff --git a/media/libstagefright/webm/WebmWriter.h b/media/libstagefright/webm/WebmWriter.h new file mode 100644 index 0000000..529dec8 --- /dev/null +++ b/media/libstagefright/webm/WebmWriter.h @@ -0,0 +1,130 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef WEBMWRITER_H_ +#define WEBMWRITER_H_ + +#include "WebmConstants.h" +#include "WebmFrameThread.h" +#include "LinkedBlockingQueue.h" + +#include <media/stagefright/MediaSource.h> +#include <media/stagefright/MediaWriter.h> + +#include <utils/Errors.h> +#include <utils/Mutex.h> +#include <utils/StrongPointer.h> + +#include <stdint.h> + +using namespace webm; + +namespace android { + +class WebmWriter : public MediaWriter { +public: + WebmWriter(int fd); + WebmWriter(const char *filename); + ~WebmWriter() { reset(); } + + + status_t addSource(const sp<MediaSource> &source); + status_t start(MetaData *param = NULL); + status_t stop(); + status_t pause(); + bool reachedEOS(); + + void setStartTimeOffsetMs(int ms) { mStartTimeOffsetMs = ms; } + int32_t getStartTimeOffsetMs() const { return mStartTimeOffsetMs; } + +private: + int mFd; + status_t mInitCheck; + + uint64_t mTimeCodeScale; + int64_t mStartTimestampUs; + int32_t mStartTimeOffsetMs; + + uint64_t mSegmentOffset; + uint64_t mSegmentDataStart; + uint64_t mInfoOffset; + uint64_t mInfoSize; + uint64_t mTracksOffset; + uint64_t mCuesOffset; + + bool mPaused; + bool mStarted; + bool mIsFileSizeLimitExplicitlyRequested; + bool mIsRealTimeRecording; + bool mStreamableFile; + uint64_t mEstimatedCuesSize; + + Mutex mLock; + List<sp<WebmElement> > mCuePoints; + + enum { + kAudioIndex = 0, + kVideoIndex = 1, + kMaxStreams = 2, + }; + + struct WebmStream { + int mType; + const char *mName; + sp<WebmElement> (*mMakeTrack)(const sp<MetaData>&); + + sp<MediaSource> mSource; + sp<WebmElement> mTrackEntry; + sp<WebmFrameSourceThread> mThread; + LinkedBlockingQueue<const sp<WebmFrame> > mSink; + + WebmStream() + : mType(kInvalidType), + mName("Invalid"), + mMakeTrack(NULL) { + } + + WebmStream(int type, const char *name, sp<WebmElement> (*makeTrack)(const sp<MetaData>&)) + : mType(type), + mName(name), + mMakeTrack(makeTrack) { + } + + WebmStream &operator=(const WebmStream &other) { + mType = other.mType; + mName = other.mName; + mMakeTrack = other.mMakeTrack; + return *this; + } + }; + WebmStream mStreams[kMaxStreams]; + + sp<WebmFrameSinkThread> mSinkThread; + + size_t numTracks(); + uint64_t estimateCuesSize(int32_t bitRate); + void initStream(size_t idx); + void release(); + status_t reset(); + + static sp<WebmElement> videoTrack(const sp<MetaData>& md); + static sp<WebmElement> audioTrack(const sp<MetaData>& md); + + DISALLOW_EVIL_CONSTRUCTORS(WebmWriter); +}; + +} /* namespace android */ +#endif /* WEBMWRITER_H_ */ diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index 286ea13..2cb4786 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -29,6 +29,7 @@ #include <binder/IServiceManager.h> #include <cutils/properties.h> #include <media/IHDCP.h> +#include <media/IMediaHTTPService.h> #include <media/stagefright/foundation/ABitReader.h> #include <media/stagefright/foundation/ABuffer.h> #include <media/stagefright/foundation/ADebug.h> @@ -749,7 +750,8 @@ status_t WifiDisplaySource::PlaybackSession::setupMediaPacketizer( mExtractor = new NuMediaExtractor; - status_t err = mExtractor->setDataSource(mMediaPath.c_str()); + status_t err = mExtractor->setDataSource( + NULL /* httpService */, mMediaPath.c_str()); if (err != OK) { return err; @@ -1053,7 +1055,7 @@ status_t WifiDisplaySource::PlaybackSession::addVideoSource( err = source->setMaxAcquiredBufferCount(numInputBuffers); CHECK_EQ(err, (status_t)OK); - mBufferQueue = source->getBufferQueue(); + mProducer = source->getProducer(); return OK; } @@ -1077,7 +1079,7 @@ status_t WifiDisplaySource::PlaybackSession::addAudioSource(bool usePCMAudio) { } sp<IGraphicBufferProducer> WifiDisplaySource::PlaybackSession::getSurfaceTexture() { - return mBufferQueue; + return mProducer; } void WifiDisplaySource::PlaybackSession::requestIDRFrame() { diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.h b/media/libstagefright/wifi-display/source/PlaybackSession.h index 5c8ee94..2824143 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.h +++ b/media/libstagefright/wifi-display/source/PlaybackSession.h @@ -25,7 +25,6 @@ namespace android { struct ABuffer; -struct BufferQueue; struct IHDCP; struct IGraphicBufferProducer; struct MediaPuller; @@ -111,7 +110,7 @@ private: int64_t mLastLifesignUs; - sp<BufferQueue> mBufferQueue; + sp<IGraphicBufferProducer> mProducer; KeyedVector<size_t, sp<Track> > mTracks; ssize_t mVideoTrackIndex; diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.cpp b/media/libstagefright/wifi-display/source/RepeaterSource.cpp index cc8dee3..59d7e6e 100644 --- a/media/libstagefright/wifi-display/source/RepeaterSource.cpp +++ b/media/libstagefright/wifi-display/source/RepeaterSource.cpp @@ -79,6 +79,8 @@ status_t RepeaterSource::stop() { ALOGV("stopping"); + status_t err = mSource->stop(); + if (mLooper != NULL) { mLooper->stop(); mLooper.clear(); @@ -92,7 +94,6 @@ status_t RepeaterSource::stop() { mBuffer = NULL; } - status_t err = mSource->stop(); ALOGV("stopped"); diff --git a/media/libstagefright/yuv/Android.mk b/media/libstagefright/yuv/Android.mk index b3f7b1b..bb86dfc 100644 --- a/media/libstagefright/yuv/Android.mk +++ b/media/libstagefright/yuv/Android.mk @@ -12,5 +12,7 @@ LOCAL_SHARED_LIBRARIES := \ LOCAL_MODULE:= libstagefright_yuv +LOCAL_CFLAGS += -Werror + include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/yuv/YUVImage.cpp b/media/libstagefright/yuv/YUVImage.cpp index 7b9000b..bb3e2fd 100644 --- a/media/libstagefright/yuv/YUVImage.cpp +++ b/media/libstagefright/yuv/YUVImage.cpp @@ -226,8 +226,8 @@ void YUVImage::fastCopyRectangle420Planar( &ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement); int32_t yDestOffsetIncrement; - int32_t uDestOffsetIncrement; - int32_t vDestOffsetIncrement; + int32_t uDestOffsetIncrement = 0; + int32_t vDestOffsetIncrement = 0; destImage.getOffsetIncrementsPerDataRow( &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement); @@ -309,7 +309,7 @@ void YUVImage::fastCopyRectangle420SemiPlanar( int32_t yDestOffsetIncrement; int32_t uDestOffsetIncrement; - int32_t vDestOffsetIncrement; + int32_t vDestOffsetIncrement = 0; destImage.getOffsetIncrementsPerDataRow( &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement); @@ -393,9 +393,9 @@ bool YUVImage::writeToPPM(const char *filename) const { fprintf(fp, "255\n"); for (int32_t y = 0; y < mHeight; ++y) { for (int32_t x = 0; x < mWidth; ++x) { - uint8_t yValue; - uint8_t uValue; - uint8_t vValue; + uint8_t yValue = 0u; + uint8_t uValue = 0u; + uint8_t vValue = 0u; getPixelValue(x, y, &yValue, &uValue, & vValue); uint8_t rValue; |