content/media/omx/MediaOmxReader.cpp

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/content/media/omx/MediaOmxReader.cpp	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,470 @@
     1.4 +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
     1.5 +/* vim:set ts=2 sw=2 sts=2 et cindent: */
     1.6 +/* This Source Code Form is subject to the terms of the Mozilla Public
     1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this file,
     1.8 + * You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.9 +
    1.10 +#include "MediaOmxReader.h"
    1.11 +
    1.12 +#include "MediaDecoderStateMachine.h"
    1.13 +#include "mozilla/TimeStamp.h"
    1.14 +#include "mozilla/dom/TimeRanges.h"
    1.15 +#include "MediaResource.h"
    1.16 +#include "VideoUtils.h"
    1.17 +#include "MediaOmxDecoder.h"
    1.18 +#include "AbstractMediaDecoder.h"
    1.19 +#include "AudioChannelService.h"
    1.20 +#include "OmxDecoder.h"
    1.21 +#include "MPAPI.h"
    1.22 +#include "gfx2DGlue.h"
    1.23 +
    1.24 +#ifdef MOZ_AUDIO_OFFLOAD
    1.25 +#include <stagefright/Utils.h>
    1.26 +#include <cutils/properties.h>
    1.27 +#include <stagefright/MetaData.h>
    1.28 +#endif
    1.29 +
    1.30 +#define MAX_DROPPED_FRAMES 25
    1.31 +// Try not to spend more than this much time in a single call to DecodeVideoFrame.
    1.32 +#define MAX_VIDEO_DECODE_SECONDS 0.1
    1.33 +
    1.34 +using namespace mozilla::gfx;
    1.35 +using namespace android;
    1.36 +
    1.37 +namespace mozilla {
    1.38 +
    1.39 +#ifdef PR_LOGGING
    1.40 +extern PRLogModuleInfo* gMediaDecoderLog;
    1.41 +#define DECODER_LOG(type, msg) PR_LOG(gMediaDecoderLog, type, msg)
    1.42 +#else
    1.43 +#define DECODER_LOG(type, msg)
    1.44 +#endif
    1.45 +
    1.46 +MediaOmxReader::MediaOmxReader(AbstractMediaDecoder *aDecoder)
    1.47 +  : MediaDecoderReader(aDecoder)
    1.48 +  , mHasVideo(false)
    1.49 +  , mHasAudio(false)
    1.50 +  , mVideoSeekTimeUs(-1)
    1.51 +  , mAudioSeekTimeUs(-1)
    1.52 +  , mSkipCount(0)
    1.53 +#ifdef DEBUG
    1.54 +  , mIsActive(true)
    1.55 +#endif
    1.56 +{
    1.57 +#ifdef PR_LOGGING
    1.58 +  if (!gMediaDecoderLog) {
    1.59 +    gMediaDecoderLog = PR_NewLogModule("MediaDecoder");
    1.60 +  }
    1.61 +#endif
    1.62 +
    1.63 +  mAudioChannel = dom::AudioChannelService::GetDefaultAudioChannel();
    1.64 +}
    1.65 +
    1.66 +MediaOmxReader::~MediaOmxReader()
    1.67 +{
    1.68 +  ReleaseMediaResources();
    1.69 +  ReleaseDecoder();
    1.70 +  mOmxDecoder.clear();
    1.71 +}
    1.72 +
    1.73 +nsresult MediaOmxReader::Init(MediaDecoderReader* aCloneDonor)
    1.74 +{
    1.75 +  return NS_OK;
    1.76 +}
    1.77 +
    1.78 +bool MediaOmxReader::IsWaitingMediaResources()
    1.79 +{
    1.80 +  if (!mOmxDecoder.get()) {
    1.81 +    return false;
    1.82 +  }
    1.83 +  return mOmxDecoder->IsWaitingMediaResources();
    1.84 +}
    1.85 +
    1.86 +bool MediaOmxReader::IsDormantNeeded()
    1.87 +{
    1.88 +  if (!mOmxDecoder.get()) {
    1.89 +    return false;
    1.90 +  }
    1.91 +  return mOmxDecoder->IsDormantNeeded();
    1.92 +}
    1.93 +
    1.94 +void MediaOmxReader::ReleaseMediaResources()
    1.95 +{
    1.96 +  ResetDecode();
    1.97 +  // Before freeing a video codec, all video buffers needed to be released
    1.98 +  // even from graphics pipeline.
    1.99 +  VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
   1.100 +  if (container) {
   1.101 +    container->ClearCurrentFrame();
   1.102 +  }
   1.103 +  if (mOmxDecoder.get()) {
   1.104 +    mOmxDecoder->ReleaseMediaResources();
   1.105 +  }
   1.106 +}
   1.107 +
   1.108 +void MediaOmxReader::ReleaseDecoder()
   1.109 +{
   1.110 +  if (mOmxDecoder.get()) {
   1.111 +    mOmxDecoder->ReleaseDecoder();
   1.112 +  }
   1.113 +}
   1.114 +
   1.115 +nsresult MediaOmxReader::InitOmxDecoder()
   1.116 +{
   1.117 +  if (!mOmxDecoder.get()) {
   1.118 +    //register sniffers, if they are not registered in this process.
   1.119 +    DataSource::RegisterDefaultSniffers();
   1.120 +    mDecoder->GetResource()->SetReadMode(MediaCacheStream::MODE_METADATA);
   1.121 +
   1.122 +    sp<DataSource> dataSource = new MediaStreamSource(mDecoder->GetResource(), mDecoder);
   1.123 +    dataSource->initCheck();
   1.124 +
   1.125 +    mExtractor = MediaExtractor::Create(dataSource);
   1.126 +    if (!mExtractor.get()) {
   1.127 +      return NS_ERROR_FAILURE;
   1.128 +    }
   1.129 +    mOmxDecoder = new OmxDecoder(mDecoder->GetResource(), mDecoder);
   1.130 +    if (!mOmxDecoder->Init(mExtractor)) {
   1.131 +      return NS_ERROR_FAILURE;
   1.132 +    }
   1.133 +  }
   1.134 +  return NS_OK;
   1.135 +}
   1.136 +
   1.137 +nsresult MediaOmxReader::ReadMetadata(MediaInfo* aInfo,
   1.138 +                                      MetadataTags** aTags)
   1.139 +{
   1.140 +  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   1.141 +  MOZ_ASSERT(mIsActive);
   1.142 +
   1.143 +  *aTags = nullptr;
   1.144 +
   1.145 +  // Initialize the internal OMX Decoder.
   1.146 +  nsresult rv = InitOmxDecoder();
   1.147 +  if (NS_FAILED(rv)) {
   1.148 +    return rv;
   1.149 +  }
   1.150 +
   1.151 +  if (!mOmxDecoder->TryLoad()) {
   1.152 +    return NS_ERROR_FAILURE;
   1.153 +  }
   1.154 +
   1.155 +#ifdef MOZ_AUDIO_OFFLOAD
   1.156 +  CheckAudioOffload();
   1.157 +#endif
   1.158 +
   1.159 +  if (IsWaitingMediaResources()) {
   1.160 +    return NS_OK;
   1.161 +  }
   1.162 +
   1.163 +  // Set the total duration (the max of the audio and video track).
   1.164 +  int64_t durationUs;
   1.165 +  mOmxDecoder->GetDuration(&durationUs);
   1.166 +  if (durationUs) {
   1.167 +    ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   1.168 +    mDecoder->SetMediaDuration(durationUs);
   1.169 +  }
   1.170 +
   1.171 +  // Check the MediaExtract flag if the source is seekable.
   1.172 +  mDecoder->SetMediaSeekable(mExtractor->flags() & MediaExtractor::CAN_SEEK);
   1.173 +
   1.174 +  if (mOmxDecoder->HasVideo()) {
   1.175 +    int32_t displayWidth, displayHeight, width, height;
   1.176 +    mOmxDecoder->GetVideoParameters(&displayWidth, &displayHeight,
   1.177 +                                    &width, &height);
   1.178 +    nsIntRect pictureRect(0, 0, width, height);
   1.179 +
   1.180 +    // Validate the container-reported frame and pictureRect sizes. This ensures
   1.181 +    // that our video frame creation code doesn't overflow.
   1.182 +    nsIntSize displaySize(displayWidth, displayHeight);
   1.183 +    nsIntSize frameSize(width, height);
   1.184 +    if (!IsValidVideoRegion(frameSize, pictureRect, displaySize)) {
   1.185 +      return NS_ERROR_FAILURE;
   1.186 +    }
   1.187 +
   1.188 +    // Video track's frame sizes will not overflow. Activate the video track.
   1.189 +    mHasVideo = mInfo.mVideo.mHasVideo = true;
   1.190 +    mInfo.mVideo.mDisplay = displaySize;
   1.191 +    mPicture = pictureRect;
   1.192 +    mInitialFrame = frameSize;
   1.193 +    VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
   1.194 +    if (container) {
   1.195 +      container->SetCurrentFrame(gfxIntSize(displaySize.width, displaySize.height),
   1.196 +                                 nullptr,
   1.197 +                                 mozilla::TimeStamp::Now());
   1.198 +    }
   1.199 +  }
   1.200 +
   1.201 +  if (mOmxDecoder->HasAudio()) {
   1.202 +    int32_t numChannels, sampleRate;
   1.203 +    mOmxDecoder->GetAudioParameters(&numChannels, &sampleRate);
   1.204 +    mHasAudio = mInfo.mAudio.mHasAudio = true;
   1.205 +    mInfo.mAudio.mChannels = numChannels;
   1.206 +    mInfo.mAudio.mRate = sampleRate;
   1.207 +  }
   1.208 +
   1.209 + *aInfo = mInfo;
   1.210 +
   1.211 +  return NS_OK;
   1.212 +}
   1.213 +
   1.214 +bool MediaOmxReader::DecodeVideoFrame(bool &aKeyframeSkip,
   1.215 +                                      int64_t aTimeThreshold)
   1.216 +{
   1.217 +  MOZ_ASSERT(mIsActive);
   1.218 +
   1.219 +  // Record number of frames decoded and parsed. Automatically update the
   1.220 +  // stats counters using the AutoNotifyDecoded stack-based class.
   1.221 +  uint32_t parsed = 0, decoded = 0;
   1.222 +  AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
   1.223 +
   1.224 +  bool doSeek = mVideoSeekTimeUs != -1;
   1.225 +  if (doSeek) {
   1.226 +    aTimeThreshold = mVideoSeekTimeUs;
   1.227 +  }
   1.228 +
   1.229 +  TimeStamp start = TimeStamp::Now();
   1.230 +
   1.231 +  // Read next frame. Don't let this loop run for too long.
   1.232 +  while ((TimeStamp::Now() - start) < TimeDuration::FromSeconds(MAX_VIDEO_DECODE_SECONDS)) {
   1.233 +    MPAPI::VideoFrame frame;
   1.234 +    frame.mGraphicBuffer = nullptr;
   1.235 +    frame.mShouldSkip = false;
   1.236 +    if (!mOmxDecoder->ReadVideo(&frame, aTimeThreshold, aKeyframeSkip, doSeek)) {
   1.237 +      return false;
   1.238 +    }
   1.239 +    doSeek = false;
   1.240 +
   1.241 +    // Ignore empty buffer which stagefright media read will sporadically return
   1.242 +    if (frame.mSize == 0 && !frame.mGraphicBuffer) {
   1.243 +      continue;
   1.244 +    }
   1.245 +
   1.246 +    parsed++;
   1.247 +    if (frame.mShouldSkip && mSkipCount < MAX_DROPPED_FRAMES) {
   1.248 +      mSkipCount++;
   1.249 +      continue;
   1.250 +    }
   1.251 +
   1.252 +    mSkipCount = 0;
   1.253 +
   1.254 +    mVideoSeekTimeUs = -1;
   1.255 +    aKeyframeSkip = false;
   1.256 +
   1.257 +    IntRect picture = ToIntRect(mPicture);
   1.258 +    if (frame.Y.mWidth != mInitialFrame.width ||
   1.259 +        frame.Y.mHeight != mInitialFrame.height) {
   1.260 +
   1.261 +      // Frame size is different from what the container reports. This is legal,
   1.262 +      // and we will preserve the ratio of the crop rectangle as it
   1.263 +      // was reported relative to the picture size reported by the container.
   1.264 +      picture.x = (mPicture.x * frame.Y.mWidth) / mInitialFrame.width;
   1.265 +      picture.y = (mPicture.y * frame.Y.mHeight) / mInitialFrame.height;
   1.266 +      picture.width = (frame.Y.mWidth * mPicture.width) / mInitialFrame.width;
   1.267 +      picture.height = (frame.Y.mHeight * mPicture.height) / mInitialFrame.height;
   1.268 +    }
   1.269 +
   1.270 +    // This is the approximate byte position in the stream.
   1.271 +    int64_t pos = mDecoder->GetResource()->Tell();
   1.272 +
   1.273 +    VideoData *v;
   1.274 +    if (!frame.mGraphicBuffer) {
   1.275 +
   1.276 +      VideoData::YCbCrBuffer b;
   1.277 +      b.mPlanes[0].mData = static_cast<uint8_t *>(frame.Y.mData);
   1.278 +      b.mPlanes[0].mStride = frame.Y.mStride;
   1.279 +      b.mPlanes[0].mHeight = frame.Y.mHeight;
   1.280 +      b.mPlanes[0].mWidth = frame.Y.mWidth;
   1.281 +      b.mPlanes[0].mOffset = frame.Y.mOffset;
   1.282 +      b.mPlanes[0].mSkip = frame.Y.mSkip;
   1.283 +
   1.284 +      b.mPlanes[1].mData = static_cast<uint8_t *>(frame.Cb.mData);
   1.285 +      b.mPlanes[1].mStride = frame.Cb.mStride;
   1.286 +      b.mPlanes[1].mHeight = frame.Cb.mHeight;
   1.287 +      b.mPlanes[1].mWidth = frame.Cb.mWidth;
   1.288 +      b.mPlanes[1].mOffset = frame.Cb.mOffset;
   1.289 +      b.mPlanes[1].mSkip = frame.Cb.mSkip;
   1.290 +
   1.291 +      b.mPlanes[2].mData = static_cast<uint8_t *>(frame.Cr.mData);
   1.292 +      b.mPlanes[2].mStride = frame.Cr.mStride;
   1.293 +      b.mPlanes[2].mHeight = frame.Cr.mHeight;
   1.294 +      b.mPlanes[2].mWidth = frame.Cr.mWidth;
   1.295 +      b.mPlanes[2].mOffset = frame.Cr.mOffset;
   1.296 +      b.mPlanes[2].mSkip = frame.Cr.mSkip;
   1.297 +
   1.298 +      v = VideoData::Create(mInfo.mVideo,
   1.299 +                            mDecoder->GetImageContainer(),
   1.300 +                            pos,
   1.301 +                            frame.mTimeUs,
   1.302 +                            1, // We don't know the duration.
   1.303 +                            b,
   1.304 +                            frame.mKeyFrame,
   1.305 +                            -1,
   1.306 +                            picture);
   1.307 +    } else {
   1.308 +      v = VideoData::Create(mInfo.mVideo,
   1.309 +                            mDecoder->GetImageContainer(),
   1.310 +                            pos,
   1.311 +                            frame.mTimeUs,
   1.312 +                            1, // We don't know the duration.
   1.313 +                            frame.mGraphicBuffer,
   1.314 +                            frame.mKeyFrame,
   1.315 +                            -1,
   1.316 +                            picture);
   1.317 +    }
   1.318 +
   1.319 +    if (!v) {
   1.320 +      NS_WARNING("Unable to create VideoData");
   1.321 +      return false;
   1.322 +    }
   1.323 +
   1.324 +    decoded++;
   1.325 +    NS_ASSERTION(decoded <= parsed, "Expect to decode fewer frames than parsed in MediaPlugin...");
   1.326 +
   1.327 +    mVideoQueue.Push(v);
   1.328 +
   1.329 +    break;
   1.330 +  }
   1.331 +
   1.332 +  return true;
   1.333 +}
   1.334 +
   1.335 +void MediaOmxReader::NotifyDataArrived(const char* aBuffer, uint32_t aLength, int64_t aOffset)
   1.336 +{
   1.337 +  android::OmxDecoder *omxDecoder = mOmxDecoder.get();
   1.338 +
   1.339 +  if (omxDecoder) {
   1.340 +    omxDecoder->NotifyDataArrived(aBuffer, aLength, aOffset);
   1.341 +  }
   1.342 +}
   1.343 +
   1.344 +bool MediaOmxReader::DecodeAudioData()
   1.345 +{
   1.346 +  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   1.347 +  MOZ_ASSERT(mIsActive);
   1.348 +
   1.349 +  // This is the approximate byte position in the stream.
   1.350 +  int64_t pos = mDecoder->GetResource()->Tell();
   1.351 +
   1.352 +  // Read next frame
   1.353 +  MPAPI::AudioFrame source;
   1.354 +  if (!mOmxDecoder->ReadAudio(&source, mAudioSeekTimeUs)) {
   1.355 +    return false;
   1.356 +  }
   1.357 +  mAudioSeekTimeUs = -1;
   1.358 +
   1.359 +  // Ignore empty buffer which stagefright media read will sporadically return
   1.360 +  if (source.mSize == 0) {
   1.361 +    return true;
   1.362 +  }
   1.363 +
   1.364 +  uint32_t frames = source.mSize / (source.mAudioChannels *
   1.365 +                                    sizeof(AudioDataValue));
   1.366 +
   1.367 +  typedef AudioCompactor::NativeCopy OmxCopy;
   1.368 +  return mAudioCompactor.Push(pos,
   1.369 +                              source.mTimeUs,
   1.370 +                              source.mAudioSampleRate,
   1.371 +                              frames,
   1.372 +                              source.mAudioChannels,
   1.373 +                              OmxCopy(static_cast<uint8_t *>(source.mData),
   1.374 +                                      source.mSize,
   1.375 +                                      source.mAudioChannels));
   1.376 +}
   1.377 +
   1.378 +nsresult MediaOmxReader::Seek(int64_t aTarget, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime)
   1.379 +{
   1.380 +  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   1.381 +  MOZ_ASSERT(mIsActive);
   1.382 +
   1.383 +  ResetDecode();
   1.384 +  VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
   1.385 +  if (container && container->GetImageContainer()) {
   1.386 +    container->GetImageContainer()->ClearAllImagesExceptFront();
   1.387 +  }
   1.388 +
   1.389 +  if (mHasAudio && mHasVideo) {
   1.390 +    // The OMXDecoder seeks/demuxes audio and video streams separately. So if
   1.391 +    // we seek both audio and video to aTarget, the audio stream can typically
   1.392 +    // seek closer to the seek target, since typically every audio block is
   1.393 +    // a sync point, whereas for video there are only keyframes once every few
   1.394 +    // seconds. So if we have both audio and video, we must seek the video
   1.395 +    // stream to the preceeding keyframe first, get the stream time, and then
   1.396 +    // seek the audio stream to match the video stream's time. Otherwise, the
   1.397 +    // audio and video streams won't be in sync after the seek.
   1.398 +    mVideoSeekTimeUs = aTarget;
   1.399 +    const VideoData* v = DecodeToFirstVideoData();
   1.400 +    mAudioSeekTimeUs = v ? v->mTime : aTarget;
   1.401 +  } else {
   1.402 +    mAudioSeekTimeUs = mVideoSeekTimeUs = aTarget;
   1.403 +  }
   1.404 +
   1.405 +  return NS_OK;
   1.406 +}
   1.407 +
   1.408 +static uint64_t BytesToTime(int64_t offset, uint64_t length, uint64_t durationUs) {
   1.409 +  double perc = double(offset) / double(length);
   1.410 +  if (perc > 1.0)
   1.411 +    perc = 1.0;
   1.412 +  return uint64_t(double(durationUs) * perc);
   1.413 +}
   1.414 +
   1.415 +void MediaOmxReader::SetIdle() {
   1.416 +#ifdef DEBUG
   1.417 +  mIsActive = false;
   1.418 +#endif
   1.419 +  if (!mOmxDecoder.get()) {
   1.420 +    return;
   1.421 +  }
   1.422 +  mOmxDecoder->Pause();
   1.423 +}
   1.424 +
   1.425 +void MediaOmxReader::SetActive() {
   1.426 +#ifdef DEBUG
   1.427 +  mIsActive = true;
   1.428 +#endif
   1.429 +  if (!mOmxDecoder.get()) {
   1.430 +    return;
   1.431 +  }
   1.432 +  DebugOnly<nsresult> result = mOmxDecoder->Play();
   1.433 +  NS_ASSERTION(result == NS_OK, "OmxDecoder should be in play state to continue decoding");
   1.434 +}
   1.435 +
   1.436 +#ifdef MOZ_AUDIO_OFFLOAD
   1.437 +void MediaOmxReader::CheckAudioOffload()
   1.438 +{
   1.439 +  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   1.440 +
   1.441 +  char offloadProp[128];
   1.442 +  property_get("audio.offload.disable", offloadProp, "0");
   1.443 +  bool offloadDisable =  atoi(offloadProp) != 0;
   1.444 +  if (offloadDisable) {
   1.445 +    return;
   1.446 +  }
   1.447 +
   1.448 +  mAudioOffloadTrack = mOmxDecoder->GetAudioOffloadTrack();
   1.449 +  sp<MetaData> meta = (mAudioOffloadTrack.get()) ?
   1.450 +      mAudioOffloadTrack->getFormat() : nullptr;
   1.451 +
   1.452 +  // Supporting audio offload only when there is no video, no streaming
   1.453 +  bool hasNoVideo = !mOmxDecoder->HasVideo();
   1.454 +  bool isNotStreaming
   1.455 +      = mDecoder->GetResource()->IsDataCachedToEndOfResource(0);
   1.456 +
   1.457 +  // Not much benefit in trying to offload other channel types. Most of them
   1.458 +  // aren't supported and also duration would be less than a minute
   1.459 +  bool isTypeMusic = mAudioChannel == dom::AudioChannel::Content;
   1.460 +
   1.461 +  DECODER_LOG(PR_LOG_DEBUG, ("%s meta %p, no video %d, no streaming %d,"
   1.462 +      " channel type %d", __FUNCTION__, meta.get(), hasNoVideo,
   1.463 +      isNotStreaming, mAudioChannel));
   1.464 +
   1.465 +  if ((meta.get()) && hasNoVideo && isNotStreaming && isTypeMusic &&
   1.466 +      canOffloadStream(meta, false, false, AUDIO_STREAM_MUSIC)) {
   1.467 +    DECODER_LOG(PR_LOG_DEBUG, ("Can offload this audio stream"));
   1.468 +    mDecoder->SetCanOffloadAudio(true);
   1.469 +  }
   1.470 +}
   1.471 +#endif
   1.472 +
   1.473 +} // namespace mozilla

mercurial