content/media/omx/MediaOmxReader.cpp

Fri, 16 Jan 2015 04:50:19 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Fri, 16 Jan 2015 04:50:19 +0100
branch
TOR_BUG_9701
changeset 13
44a2da4a2ab2
permissions
-rw-r--r--

Replace accessor implementation with direct member state manipulation, by
request https://trac.torproject.org/projects/tor/ticket/9701#comment:32

     1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
     2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
     3 /* This Source Code Form is subject to the terms of the Mozilla Public
     4  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
     5  * You can obtain one at http://mozilla.org/MPL/2.0/. */
     7 #include "MediaOmxReader.h"
     9 #include "MediaDecoderStateMachine.h"
    10 #include "mozilla/TimeStamp.h"
    11 #include "mozilla/dom/TimeRanges.h"
    12 #include "MediaResource.h"
    13 #include "VideoUtils.h"
    14 #include "MediaOmxDecoder.h"
    15 #include "AbstractMediaDecoder.h"
    16 #include "AudioChannelService.h"
    17 #include "OmxDecoder.h"
    18 #include "MPAPI.h"
    19 #include "gfx2DGlue.h"
    21 #ifdef MOZ_AUDIO_OFFLOAD
    22 #include <stagefright/Utils.h>
    23 #include <cutils/properties.h>
    24 #include <stagefright/MetaData.h>
    25 #endif
    27 #define MAX_DROPPED_FRAMES 25
    28 // Try not to spend more than this much time in a single call to DecodeVideoFrame.
    29 #define MAX_VIDEO_DECODE_SECONDS 0.1
    31 using namespace mozilla::gfx;
    32 using namespace android;
    34 namespace mozilla {
    36 #ifdef PR_LOGGING
    37 extern PRLogModuleInfo* gMediaDecoderLog;
    38 #define DECODER_LOG(type, msg) PR_LOG(gMediaDecoderLog, type, msg)
    39 #else
    40 #define DECODER_LOG(type, msg)
    41 #endif
    43 MediaOmxReader::MediaOmxReader(AbstractMediaDecoder *aDecoder)
    44   : MediaDecoderReader(aDecoder)
    45   , mHasVideo(false)
    46   , mHasAudio(false)
    47   , mVideoSeekTimeUs(-1)
    48   , mAudioSeekTimeUs(-1)
    49   , mSkipCount(0)
    50 #ifdef DEBUG
    51   , mIsActive(true)
    52 #endif
    53 {
    54 #ifdef PR_LOGGING
    55   if (!gMediaDecoderLog) {
    56     gMediaDecoderLog = PR_NewLogModule("MediaDecoder");
    57   }
    58 #endif
    60   mAudioChannel = dom::AudioChannelService::GetDefaultAudioChannel();
    61 }
    63 MediaOmxReader::~MediaOmxReader()
    64 {
    65   ReleaseMediaResources();
    66   ReleaseDecoder();
    67   mOmxDecoder.clear();
    68 }
    70 nsresult MediaOmxReader::Init(MediaDecoderReader* aCloneDonor)
    71 {
    72   return NS_OK;
    73 }
    75 bool MediaOmxReader::IsWaitingMediaResources()
    76 {
    77   if (!mOmxDecoder.get()) {
    78     return false;
    79   }
    80   return mOmxDecoder->IsWaitingMediaResources();
    81 }
    83 bool MediaOmxReader::IsDormantNeeded()
    84 {
    85   if (!mOmxDecoder.get()) {
    86     return false;
    87   }
    88   return mOmxDecoder->IsDormantNeeded();
    89 }
    91 void MediaOmxReader::ReleaseMediaResources()
    92 {
    93   ResetDecode();
    94   // Before freeing a video codec, all video buffers needed to be released
    95   // even from graphics pipeline.
    96   VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
    97   if (container) {
    98     container->ClearCurrentFrame();
    99   }
   100   if (mOmxDecoder.get()) {
   101     mOmxDecoder->ReleaseMediaResources();
   102   }
   103 }
   105 void MediaOmxReader::ReleaseDecoder()
   106 {
   107   if (mOmxDecoder.get()) {
   108     mOmxDecoder->ReleaseDecoder();
   109   }
   110 }
   112 nsresult MediaOmxReader::InitOmxDecoder()
   113 {
   114   if (!mOmxDecoder.get()) {
   115     //register sniffers, if they are not registered in this process.
   116     DataSource::RegisterDefaultSniffers();
   117     mDecoder->GetResource()->SetReadMode(MediaCacheStream::MODE_METADATA);
   119     sp<DataSource> dataSource = new MediaStreamSource(mDecoder->GetResource(), mDecoder);
   120     dataSource->initCheck();
   122     mExtractor = MediaExtractor::Create(dataSource);
   123     if (!mExtractor.get()) {
   124       return NS_ERROR_FAILURE;
   125     }
   126     mOmxDecoder = new OmxDecoder(mDecoder->GetResource(), mDecoder);
   127     if (!mOmxDecoder->Init(mExtractor)) {
   128       return NS_ERROR_FAILURE;
   129     }
   130   }
   131   return NS_OK;
   132 }
   134 nsresult MediaOmxReader::ReadMetadata(MediaInfo* aInfo,
   135                                       MetadataTags** aTags)
   136 {
   137   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   138   MOZ_ASSERT(mIsActive);
   140   *aTags = nullptr;
   142   // Initialize the internal OMX Decoder.
   143   nsresult rv = InitOmxDecoder();
   144   if (NS_FAILED(rv)) {
   145     return rv;
   146   }
   148   if (!mOmxDecoder->TryLoad()) {
   149     return NS_ERROR_FAILURE;
   150   }
   152 #ifdef MOZ_AUDIO_OFFLOAD
   153   CheckAudioOffload();
   154 #endif
   156   if (IsWaitingMediaResources()) {
   157     return NS_OK;
   158   }
   160   // Set the total duration (the max of the audio and video track).
   161   int64_t durationUs;
   162   mOmxDecoder->GetDuration(&durationUs);
   163   if (durationUs) {
   164     ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   165     mDecoder->SetMediaDuration(durationUs);
   166   }
   168   // Check the MediaExtract flag if the source is seekable.
   169   mDecoder->SetMediaSeekable(mExtractor->flags() & MediaExtractor::CAN_SEEK);
   171   if (mOmxDecoder->HasVideo()) {
   172     int32_t displayWidth, displayHeight, width, height;
   173     mOmxDecoder->GetVideoParameters(&displayWidth, &displayHeight,
   174                                     &width, &height);
   175     nsIntRect pictureRect(0, 0, width, height);
   177     // Validate the container-reported frame and pictureRect sizes. This ensures
   178     // that our video frame creation code doesn't overflow.
   179     nsIntSize displaySize(displayWidth, displayHeight);
   180     nsIntSize frameSize(width, height);
   181     if (!IsValidVideoRegion(frameSize, pictureRect, displaySize)) {
   182       return NS_ERROR_FAILURE;
   183     }
   185     // Video track's frame sizes will not overflow. Activate the video track.
   186     mHasVideo = mInfo.mVideo.mHasVideo = true;
   187     mInfo.mVideo.mDisplay = displaySize;
   188     mPicture = pictureRect;
   189     mInitialFrame = frameSize;
   190     VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
   191     if (container) {
   192       container->SetCurrentFrame(gfxIntSize(displaySize.width, displaySize.height),
   193                                  nullptr,
   194                                  mozilla::TimeStamp::Now());
   195     }
   196   }
   198   if (mOmxDecoder->HasAudio()) {
   199     int32_t numChannels, sampleRate;
   200     mOmxDecoder->GetAudioParameters(&numChannels, &sampleRate);
   201     mHasAudio = mInfo.mAudio.mHasAudio = true;
   202     mInfo.mAudio.mChannels = numChannels;
   203     mInfo.mAudio.mRate = sampleRate;
   204   }
   206  *aInfo = mInfo;
   208   return NS_OK;
   209 }
   211 bool MediaOmxReader::DecodeVideoFrame(bool &aKeyframeSkip,
   212                                       int64_t aTimeThreshold)
   213 {
   214   MOZ_ASSERT(mIsActive);
   216   // Record number of frames decoded and parsed. Automatically update the
   217   // stats counters using the AutoNotifyDecoded stack-based class.
   218   uint32_t parsed = 0, decoded = 0;
   219   AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
   221   bool doSeek = mVideoSeekTimeUs != -1;
   222   if (doSeek) {
   223     aTimeThreshold = mVideoSeekTimeUs;
   224   }
   226   TimeStamp start = TimeStamp::Now();
   228   // Read next frame. Don't let this loop run for too long.
   229   while ((TimeStamp::Now() - start) < TimeDuration::FromSeconds(MAX_VIDEO_DECODE_SECONDS)) {
   230     MPAPI::VideoFrame frame;
   231     frame.mGraphicBuffer = nullptr;
   232     frame.mShouldSkip = false;
   233     if (!mOmxDecoder->ReadVideo(&frame, aTimeThreshold, aKeyframeSkip, doSeek)) {
   234       return false;
   235     }
   236     doSeek = false;
   238     // Ignore empty buffer which stagefright media read will sporadically return
   239     if (frame.mSize == 0 && !frame.mGraphicBuffer) {
   240       continue;
   241     }
   243     parsed++;
   244     if (frame.mShouldSkip && mSkipCount < MAX_DROPPED_FRAMES) {
   245       mSkipCount++;
   246       continue;
   247     }
   249     mSkipCount = 0;
   251     mVideoSeekTimeUs = -1;
   252     aKeyframeSkip = false;
   254     IntRect picture = ToIntRect(mPicture);
   255     if (frame.Y.mWidth != mInitialFrame.width ||
   256         frame.Y.mHeight != mInitialFrame.height) {
   258       // Frame size is different from what the container reports. This is legal,
   259       // and we will preserve the ratio of the crop rectangle as it
   260       // was reported relative to the picture size reported by the container.
   261       picture.x = (mPicture.x * frame.Y.mWidth) / mInitialFrame.width;
   262       picture.y = (mPicture.y * frame.Y.mHeight) / mInitialFrame.height;
   263       picture.width = (frame.Y.mWidth * mPicture.width) / mInitialFrame.width;
   264       picture.height = (frame.Y.mHeight * mPicture.height) / mInitialFrame.height;
   265     }
   267     // This is the approximate byte position in the stream.
   268     int64_t pos = mDecoder->GetResource()->Tell();
   270     VideoData *v;
   271     if (!frame.mGraphicBuffer) {
   273       VideoData::YCbCrBuffer b;
   274       b.mPlanes[0].mData = static_cast<uint8_t *>(frame.Y.mData);
   275       b.mPlanes[0].mStride = frame.Y.mStride;
   276       b.mPlanes[0].mHeight = frame.Y.mHeight;
   277       b.mPlanes[0].mWidth = frame.Y.mWidth;
   278       b.mPlanes[0].mOffset = frame.Y.mOffset;
   279       b.mPlanes[0].mSkip = frame.Y.mSkip;
   281       b.mPlanes[1].mData = static_cast<uint8_t *>(frame.Cb.mData);
   282       b.mPlanes[1].mStride = frame.Cb.mStride;
   283       b.mPlanes[1].mHeight = frame.Cb.mHeight;
   284       b.mPlanes[1].mWidth = frame.Cb.mWidth;
   285       b.mPlanes[1].mOffset = frame.Cb.mOffset;
   286       b.mPlanes[1].mSkip = frame.Cb.mSkip;
   288       b.mPlanes[2].mData = static_cast<uint8_t *>(frame.Cr.mData);
   289       b.mPlanes[2].mStride = frame.Cr.mStride;
   290       b.mPlanes[2].mHeight = frame.Cr.mHeight;
   291       b.mPlanes[2].mWidth = frame.Cr.mWidth;
   292       b.mPlanes[2].mOffset = frame.Cr.mOffset;
   293       b.mPlanes[2].mSkip = frame.Cr.mSkip;
   295       v = VideoData::Create(mInfo.mVideo,
   296                             mDecoder->GetImageContainer(),
   297                             pos,
   298                             frame.mTimeUs,
   299                             1, // We don't know the duration.
   300                             b,
   301                             frame.mKeyFrame,
   302                             -1,
   303                             picture);
   304     } else {
   305       v = VideoData::Create(mInfo.mVideo,
   306                             mDecoder->GetImageContainer(),
   307                             pos,
   308                             frame.mTimeUs,
   309                             1, // We don't know the duration.
   310                             frame.mGraphicBuffer,
   311                             frame.mKeyFrame,
   312                             -1,
   313                             picture);
   314     }
   316     if (!v) {
   317       NS_WARNING("Unable to create VideoData");
   318       return false;
   319     }
   321     decoded++;
   322     NS_ASSERTION(decoded <= parsed, "Expect to decode fewer frames than parsed in MediaPlugin...");
   324     mVideoQueue.Push(v);
   326     break;
   327   }
   329   return true;
   330 }
   332 void MediaOmxReader::NotifyDataArrived(const char* aBuffer, uint32_t aLength, int64_t aOffset)
   333 {
   334   android::OmxDecoder *omxDecoder = mOmxDecoder.get();
   336   if (omxDecoder) {
   337     omxDecoder->NotifyDataArrived(aBuffer, aLength, aOffset);
   338   }
   339 }
   341 bool MediaOmxReader::DecodeAudioData()
   342 {
   343   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   344   MOZ_ASSERT(mIsActive);
   346   // This is the approximate byte position in the stream.
   347   int64_t pos = mDecoder->GetResource()->Tell();
   349   // Read next frame
   350   MPAPI::AudioFrame source;
   351   if (!mOmxDecoder->ReadAudio(&source, mAudioSeekTimeUs)) {
   352     return false;
   353   }
   354   mAudioSeekTimeUs = -1;
   356   // Ignore empty buffer which stagefright media read will sporadically return
   357   if (source.mSize == 0) {
   358     return true;
   359   }
   361   uint32_t frames = source.mSize / (source.mAudioChannels *
   362                                     sizeof(AudioDataValue));
   364   typedef AudioCompactor::NativeCopy OmxCopy;
   365   return mAudioCompactor.Push(pos,
   366                               source.mTimeUs,
   367                               source.mAudioSampleRate,
   368                               frames,
   369                               source.mAudioChannels,
   370                               OmxCopy(static_cast<uint8_t *>(source.mData),
   371                                       source.mSize,
   372                                       source.mAudioChannels));
   373 }
   375 nsresult MediaOmxReader::Seek(int64_t aTarget, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime)
   376 {
   377   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   378   MOZ_ASSERT(mIsActive);
   380   ResetDecode();
   381   VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
   382   if (container && container->GetImageContainer()) {
   383     container->GetImageContainer()->ClearAllImagesExceptFront();
   384   }
   386   if (mHasAudio && mHasVideo) {
   387     // The OMXDecoder seeks/demuxes audio and video streams separately. So if
   388     // we seek both audio and video to aTarget, the audio stream can typically
   389     // seek closer to the seek target, since typically every audio block is
   390     // a sync point, whereas for video there are only keyframes once every few
   391     // seconds. So if we have both audio and video, we must seek the video
   392     // stream to the preceeding keyframe first, get the stream time, and then
   393     // seek the audio stream to match the video stream's time. Otherwise, the
   394     // audio and video streams won't be in sync after the seek.
   395     mVideoSeekTimeUs = aTarget;
   396     const VideoData* v = DecodeToFirstVideoData();
   397     mAudioSeekTimeUs = v ? v->mTime : aTarget;
   398   } else {
   399     mAudioSeekTimeUs = mVideoSeekTimeUs = aTarget;
   400   }
   402   return NS_OK;
   403 }
   405 static uint64_t BytesToTime(int64_t offset, uint64_t length, uint64_t durationUs) {
   406   double perc = double(offset) / double(length);
   407   if (perc > 1.0)
   408     perc = 1.0;
   409   return uint64_t(double(durationUs) * perc);
   410 }
   412 void MediaOmxReader::SetIdle() {
   413 #ifdef DEBUG
   414   mIsActive = false;
   415 #endif
   416   if (!mOmxDecoder.get()) {
   417     return;
   418   }
   419   mOmxDecoder->Pause();
   420 }
   422 void MediaOmxReader::SetActive() {
   423 #ifdef DEBUG
   424   mIsActive = true;
   425 #endif
   426   if (!mOmxDecoder.get()) {
   427     return;
   428   }
   429   DebugOnly<nsresult> result = mOmxDecoder->Play();
   430   NS_ASSERTION(result == NS_OK, "OmxDecoder should be in play state to continue decoding");
   431 }
   433 #ifdef MOZ_AUDIO_OFFLOAD
   434 void MediaOmxReader::CheckAudioOffload()
   435 {
   436   NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   438   char offloadProp[128];
   439   property_get("audio.offload.disable", offloadProp, "0");
   440   bool offloadDisable =  atoi(offloadProp) != 0;
   441   if (offloadDisable) {
   442     return;
   443   }
   445   mAudioOffloadTrack = mOmxDecoder->GetAudioOffloadTrack();
   446   sp<MetaData> meta = (mAudioOffloadTrack.get()) ?
   447       mAudioOffloadTrack->getFormat() : nullptr;
   449   // Supporting audio offload only when there is no video, no streaming
   450   bool hasNoVideo = !mOmxDecoder->HasVideo();
   451   bool isNotStreaming
   452       = mDecoder->GetResource()->IsDataCachedToEndOfResource(0);
   454   // Not much benefit in trying to offload other channel types. Most of them
   455   // aren't supported and also duration would be less than a minute
   456   bool isTypeMusic = mAudioChannel == dom::AudioChannel::Content;
   458   DECODER_LOG(PR_LOG_DEBUG, ("%s meta %p, no video %d, no streaming %d,"
   459       " channel type %d", __FUNCTION__, meta.get(), hasNoVideo,
   460       isNotStreaming, mAudioChannel));
   462   if ((meta.get()) && hasNoVideo && isNotStreaming && isTypeMusic &&
   463       canOffloadStream(meta, false, false, AUDIO_STREAM_MUSIC)) {
   464     DECODER_LOG(PR_LOG_DEBUG, ("Can offload this audio stream"));
   465     mDecoder->SetCanOffloadAudio(true);
   466   }
   467 }
   468 #endif
   470 } // namespace mozilla

mercurial