content/media/plugins/MediaPluginReader.cpp

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/content/media/plugins/MediaPluginReader.cpp	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,416 @@
     1.4 +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
     1.5 +/* vim:set ts=2 sw=2 sts=2 et cindent: */
     1.6 +/* This Source Code Form is subject to the terms of the Mozilla Public
     1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this file,
     1.8 + * You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.9 +#include "MediaPluginReader.h"
    1.10 +#include "mozilla/TimeStamp.h"
    1.11 +#include "mozilla/dom/TimeRanges.h"
    1.12 +#include "mozilla/gfx/Point.h"
    1.13 +#include "MediaResource.h"
    1.14 +#include "VideoUtils.h"
    1.15 +#include "MediaPluginDecoder.h"
    1.16 +#include "MediaPluginHost.h"
    1.17 +#include "MediaDecoderStateMachine.h"
    1.18 +#include "ImageContainer.h"
    1.19 +#include "AbstractMediaDecoder.h"
    1.20 +#include "gfx2DGlue.h"
    1.21 +
    1.22 +namespace mozilla {
    1.23 +
    1.24 +using namespace mozilla::gfx;
    1.25 +
    1.26 +typedef mozilla::layers::Image Image;
    1.27 +typedef mozilla::layers::PlanarYCbCrImage PlanarYCbCrImage;
    1.28 +
    1.29 +MediaPluginReader::MediaPluginReader(AbstractMediaDecoder *aDecoder,
    1.30 +                                     const nsACString& aContentType) :
    1.31 +  MediaDecoderReader(aDecoder),
    1.32 +  mType(aContentType),
    1.33 +  mPlugin(nullptr),
    1.34 +  mHasAudio(false),
    1.35 +  mHasVideo(false),
    1.36 +  mVideoSeekTimeUs(-1),
    1.37 +  mAudioSeekTimeUs(-1)
    1.38 +{
    1.39 +}
    1.40 +
    1.41 +MediaPluginReader::~MediaPluginReader()
    1.42 +{
    1.43 +  ResetDecode();
    1.44 +}
    1.45 +
    1.46 +nsresult MediaPluginReader::Init(MediaDecoderReader* aCloneDonor)
    1.47 +{
    1.48 +  return NS_OK;
    1.49 +}
    1.50 +
    1.51 +nsresult MediaPluginReader::ReadMetadata(MediaInfo* aInfo,
    1.52 +                                         MetadataTags** aTags)
    1.53 +{
    1.54 +  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
    1.55 +
    1.56 +  if (!mPlugin) {
    1.57 +    mPlugin = GetMediaPluginHost()->CreateDecoder(mDecoder->GetResource(), mType);
    1.58 +    if (!mPlugin) {
    1.59 +      return NS_ERROR_FAILURE;
    1.60 +    }
    1.61 +  }
    1.62 +
    1.63 +  // Set the total duration (the max of the audio and video track).
    1.64 +  int64_t durationUs;
    1.65 +  mPlugin->GetDuration(mPlugin, &durationUs);
    1.66 +  if (durationUs) {
    1.67 +    ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
    1.68 +    mDecoder->SetMediaDuration(durationUs);
    1.69 +  }
    1.70 +
    1.71 +  if (mPlugin->HasVideo(mPlugin)) {
    1.72 +    int32_t width, height;
    1.73 +    mPlugin->GetVideoParameters(mPlugin, &width, &height);
    1.74 +    nsIntRect pictureRect(0, 0, width, height);
    1.75 +
    1.76 +    // Validate the container-reported frame and pictureRect sizes. This ensures
    1.77 +    // that our video frame creation code doesn't overflow.
    1.78 +    nsIntSize displaySize(width, height);
    1.79 +    nsIntSize frameSize(width, height);
    1.80 +    if (!IsValidVideoRegion(frameSize, pictureRect, displaySize)) {
    1.81 +      return NS_ERROR_FAILURE;
    1.82 +    }
    1.83 +
    1.84 +    // Video track's frame sizes will not overflow. Activate the video track.
    1.85 +    mHasVideo = mInfo.mVideo.mHasVideo = true;
    1.86 +    mInfo.mVideo.mDisplay = displaySize;
    1.87 +    mPicture = pictureRect;
    1.88 +    mInitialFrame = frameSize;
    1.89 +    VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
    1.90 +    if (container) {
    1.91 +      container->SetCurrentFrame(gfxIntSize(displaySize.width, displaySize.height),
    1.92 +                                 nullptr,
    1.93 +                                 mozilla::TimeStamp::Now());
    1.94 +    }
    1.95 +  }
    1.96 +
    1.97 +  if (mPlugin->HasAudio(mPlugin)) {
    1.98 +    int32_t numChannels, sampleRate;
    1.99 +    mPlugin->GetAudioParameters(mPlugin, &numChannels, &sampleRate);
   1.100 +    mHasAudio = mInfo.mAudio.mHasAudio = true;
   1.101 +    mInfo.mAudio.mChannels = numChannels;
   1.102 +    mInfo.mAudio.mRate = sampleRate;
   1.103 +  }
   1.104 +
   1.105 + *aInfo = mInfo;
   1.106 + *aTags = nullptr;
   1.107 +  return NS_OK;
   1.108 +}
   1.109 +
   1.110 +// Resets all state related to decoding, emptying all buffers etc.
   1.111 +nsresult MediaPluginReader::ResetDecode()
   1.112 +{
   1.113 +  if (mLastVideoFrame) {
   1.114 +    mLastVideoFrame = nullptr;
   1.115 +  }
   1.116 +  if (mPlugin) {
   1.117 +    GetMediaPluginHost()->DestroyDecoder(mPlugin);
   1.118 +    mPlugin = nullptr;
   1.119 +  }
   1.120 +
   1.121 +  return NS_OK;
   1.122 +}
   1.123 +
   1.124 +bool MediaPluginReader::DecodeVideoFrame(bool &aKeyframeSkip,
   1.125 +                                         int64_t aTimeThreshold)
   1.126 +{
   1.127 +  // Record number of frames decoded and parsed. Automatically update the
   1.128 +  // stats counters using the AutoNotifyDecoded stack-based class.
   1.129 +  uint32_t parsed = 0, decoded = 0;
   1.130 +  AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
   1.131 +
   1.132 +  // Throw away the currently buffered frame if we are seeking.
   1.133 +  if (mLastVideoFrame && mVideoSeekTimeUs != -1) {
   1.134 +    mLastVideoFrame = nullptr;
   1.135 +  }
   1.136 +
   1.137 +  ImageBufferCallback bufferCallback(mDecoder->GetImageContainer());
   1.138 +  nsRefPtr<Image> currentImage;
   1.139 +
   1.140 +  // Read next frame
   1.141 +  while (true) {
   1.142 +    MPAPI::VideoFrame frame;
   1.143 +    if (!mPlugin->ReadVideo(mPlugin, &frame, mVideoSeekTimeUs, &bufferCallback)) {
   1.144 +      // We reached the end of the video stream. If we have a buffered
   1.145 +      // video frame, push it the video queue using the total duration
   1.146 +      // of the video as the end time.
   1.147 +      if (mLastVideoFrame) {
   1.148 +        int64_t durationUs;
   1.149 +        mPlugin->GetDuration(mPlugin, &durationUs);
   1.150 +        durationUs = std::max<int64_t>(durationUs - mLastVideoFrame->mTime, 0);
   1.151 +        mVideoQueue.Push(VideoData::ShallowCopyUpdateDuration(mLastVideoFrame,
   1.152 +                                                              durationUs));
   1.153 +        mLastVideoFrame = nullptr;
   1.154 +      }
   1.155 +      return false;
   1.156 +    }
   1.157 +    mVideoSeekTimeUs = -1;
   1.158 +
   1.159 +    if (aKeyframeSkip) {
   1.160 +      // Disable keyframe skipping for now as
   1.161 +      // stagefright doesn't seem to be telling us
   1.162 +      // when a frame is a keyframe.
   1.163 +#if 0
   1.164 +      if (!frame.mKeyFrame) {
   1.165 +        ++parsed;
   1.166 +        continue;
   1.167 +      }
   1.168 +#endif
   1.169 +      aKeyframeSkip = false;
   1.170 +    }
   1.171 +
   1.172 +    if (frame.mSize == 0)
   1.173 +      return true;
   1.174 +
   1.175 +    currentImage = bufferCallback.GetImage();
   1.176 +    int64_t pos = mDecoder->GetResource()->Tell();
   1.177 +    IntRect picture = ToIntRect(mPicture);
   1.178 +
   1.179 +    nsAutoPtr<VideoData> v;
   1.180 +    if (currentImage) {
   1.181 +      gfx::IntSize frameSize = currentImage->GetSize();
   1.182 +      if (frameSize.width != mInitialFrame.width ||
   1.183 +          frameSize.height != mInitialFrame.height) {
   1.184 +        // Frame size is different from what the container reports. This is legal,
   1.185 +        // and we will preserve the ratio of the crop rectangle as it
   1.186 +        // was reported relative to the picture size reported by the container.
   1.187 +        picture.x = (mPicture.x * frameSize.width) / mInitialFrame.width;
   1.188 +        picture.y = (mPicture.y * frameSize.height) / mInitialFrame.height;
   1.189 +        picture.width = (frameSize.width * mPicture.width) / mInitialFrame.width;
   1.190 +        picture.height = (frameSize.height * mPicture.height) / mInitialFrame.height;
   1.191 +      }
   1.192 +
   1.193 +      v = VideoData::CreateFromImage(mInfo.mVideo,
   1.194 +                                     mDecoder->GetImageContainer(),
   1.195 +                                     pos,
   1.196 +                                     frame.mTimeUs,
   1.197 +                                     1, // We don't know the duration yet.
   1.198 +                                     currentImage,
   1.199 +                                     frame.mKeyFrame,
   1.200 +                                     -1,
   1.201 +                                     picture);
   1.202 +    } else {
   1.203 +      // Assume YUV
   1.204 +      VideoData::YCbCrBuffer b;
   1.205 +      b.mPlanes[0].mData = static_cast<uint8_t *>(frame.Y.mData);
   1.206 +      b.mPlanes[0].mStride = frame.Y.mStride;
   1.207 +      b.mPlanes[0].mHeight = frame.Y.mHeight;
   1.208 +      b.mPlanes[0].mWidth = frame.Y.mWidth;
   1.209 +      b.mPlanes[0].mOffset = frame.Y.mOffset;
   1.210 +      b.mPlanes[0].mSkip = frame.Y.mSkip;
   1.211 +
   1.212 +      b.mPlanes[1].mData = static_cast<uint8_t *>(frame.Cb.mData);
   1.213 +      b.mPlanes[1].mStride = frame.Cb.mStride;
   1.214 +      b.mPlanes[1].mHeight = frame.Cb.mHeight;
   1.215 +      b.mPlanes[1].mWidth = frame.Cb.mWidth;
   1.216 +      b.mPlanes[1].mOffset = frame.Cb.mOffset;
   1.217 +      b.mPlanes[1].mSkip = frame.Cb.mSkip;
   1.218 +
   1.219 +      b.mPlanes[2].mData = static_cast<uint8_t *>(frame.Cr.mData);
   1.220 +      b.mPlanes[2].mStride = frame.Cr.mStride;
   1.221 +      b.mPlanes[2].mHeight = frame.Cr.mHeight;
   1.222 +      b.mPlanes[2].mWidth = frame.Cr.mWidth;
   1.223 +      b.mPlanes[2].mOffset = frame.Cr.mOffset;
   1.224 +      b.mPlanes[2].mSkip = frame.Cr.mSkip;
   1.225 +
   1.226 +      if (frame.Y.mWidth != mInitialFrame.width ||
   1.227 +          frame.Y.mHeight != mInitialFrame.height) {
   1.228 +
   1.229 +        // Frame size is different from what the container reports. This is legal,
   1.230 +        // and we will preserve the ratio of the crop rectangle as it
   1.231 +        // was reported relative to the picture size reported by the container.
   1.232 +        picture.x = (mPicture.x * frame.Y.mWidth) / mInitialFrame.width;
   1.233 +        picture.y = (mPicture.y * frame.Y.mHeight) / mInitialFrame.height;
   1.234 +        picture.width = (frame.Y.mWidth * mPicture.width) / mInitialFrame.width;
   1.235 +        picture.height = (frame.Y.mHeight * mPicture.height) / mInitialFrame.height;
   1.236 +      }
   1.237 +
   1.238 +      // This is the approximate byte position in the stream.
   1.239 +      v = VideoData::Create(mInfo.mVideo,
   1.240 +                            mDecoder->GetImageContainer(),
   1.241 +                            pos,
   1.242 +                            frame.mTimeUs,
   1.243 +                            1, // We don't know the duration yet.
   1.244 +                            b,
   1.245 +                            frame.mKeyFrame,
   1.246 +                            -1,
   1.247 +                            picture);
   1.248 +    }
   1.249 + 
   1.250 +    if (!v) {
   1.251 +      return false;
   1.252 +    }
   1.253 +    parsed++;
   1.254 +    decoded++;
   1.255 +    NS_ASSERTION(decoded <= parsed, "Expect to decode fewer frames than parsed in MediaPlugin...");
   1.256 +
   1.257 +    // Since MPAPI doesn't give us the end time of frames, we keep one frame
   1.258 +    // buffered in MediaPluginReader and push it into the queue as soon
   1.259 +    // we read the following frame so we can use that frame's start time as
   1.260 +    // the end time of the buffered frame.
   1.261 +    if (!mLastVideoFrame) {
   1.262 +      mLastVideoFrame = v;
   1.263 +      continue;
   1.264 +    }
   1.265 +
   1.266 +    // Calculate the duration as the timestamp of the current frame minus the
   1.267 +    // timestamp of the previous frame. We can then return the previously
   1.268 +    // decoded frame, and it will have a valid timestamp.
   1.269 +    int64_t duration = v->mTime - mLastVideoFrame->mTime;
   1.270 +    mLastVideoFrame = VideoData::ShallowCopyUpdateDuration(mLastVideoFrame, duration);
   1.271 +
   1.272 +    // We have the start time of the next frame, so we can push the previous
   1.273 +    // frame into the queue, except if the end time is below the threshold,
   1.274 +    // in which case it wouldn't be displayed anyway.
   1.275 +    if (mLastVideoFrame->GetEndTime() < aTimeThreshold) {
   1.276 +      mLastVideoFrame = nullptr;
   1.277 +      continue;
   1.278 +    }
   1.279 +
   1.280 +    mVideoQueue.Push(mLastVideoFrame.forget());
   1.281 +
   1.282 +    // Buffer the current frame we just decoded.
   1.283 +    mLastVideoFrame = v;
   1.284 +
   1.285 +    break;
   1.286 +  }
   1.287 +
   1.288 +  return true;
   1.289 +}
   1.290 +
   1.291 +bool MediaPluginReader::DecodeAudioData()
   1.292 +{
   1.293 +  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   1.294 +
   1.295 +  // This is the approximate byte position in the stream.
   1.296 +  int64_t pos = mDecoder->GetResource()->Tell();
   1.297 +
   1.298 +  // Read next frame
   1.299 +  MPAPI::AudioFrame source;
   1.300 +  if (!mPlugin->ReadAudio(mPlugin, &source, mAudioSeekTimeUs)) {
   1.301 +    return false;
   1.302 +  }
   1.303 +  mAudioSeekTimeUs = -1;
   1.304 +
   1.305 +  // Ignore empty buffers which stagefright media read will sporadically return
   1.306 +  if (source.mSize == 0)
   1.307 +    return true;
   1.308 +
   1.309 +  uint32_t frames = source.mSize / (source.mAudioChannels *
   1.310 +                                    sizeof(AudioDataValue));
   1.311 +
   1.312 +  typedef AudioCompactor::NativeCopy MPCopy;
   1.313 +  return mAudioCompactor.Push(pos,
   1.314 +                              source.mTimeUs,
   1.315 +                              source.mAudioSampleRate,
   1.316 +                              frames,
   1.317 +                              source.mAudioChannels,
   1.318 +                              MPCopy(static_cast<uint8_t *>(source.mData),
   1.319 +                                     source.mSize,
   1.320 +                                     source.mAudioChannels));
   1.321 +}
   1.322 +
   1.323 +nsresult MediaPluginReader::Seek(int64_t aTarget, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime)
   1.324 +{
   1.325 +  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
   1.326 +
   1.327 +  mVideoQueue.Reset();
   1.328 +  mAudioQueue.Reset();
   1.329 +
   1.330 +  mAudioSeekTimeUs = mVideoSeekTimeUs = aTarget;
   1.331 +
   1.332 +  return NS_OK;
   1.333 +}
   1.334 +
   1.335 +MediaPluginReader::ImageBufferCallback::ImageBufferCallback(mozilla::layers::ImageContainer *aImageContainer) :
   1.336 +  mImageContainer(aImageContainer)
   1.337 +{
   1.338 +}
   1.339 +
   1.340 +void *
   1.341 +MediaPluginReader::ImageBufferCallback::operator()(size_t aWidth, size_t aHeight,
   1.342 +                                                   MPAPI::ColorFormat aColorFormat)
   1.343 +{
   1.344 +  if (!mImageContainer) {
   1.345 +    NS_WARNING("No image container to construct an image");
   1.346 +    return nullptr;
   1.347 +  }
   1.348 +
   1.349 +  nsRefPtr<Image> image;
   1.350 +  switch(aColorFormat) {
   1.351 +    case MPAPI::RGB565:
   1.352 +      image = mozilla::layers::CreateSharedRGBImage(mImageContainer,
   1.353 +                                                    nsIntSize(aWidth, aHeight),
   1.354 +                                                    gfxImageFormat::RGB16_565);
   1.355 +      if (!image) {
   1.356 +        NS_WARNING("Could not create rgb image");
   1.357 +        return nullptr;
   1.358 +      }
   1.359 +
   1.360 +      mImage = image;
   1.361 +      return image->AsSharedImage()->GetBuffer();
   1.362 +    case MPAPI::I420:
   1.363 +      return CreateI420Image(aWidth, aHeight);
   1.364 +    default:
   1.365 +      NS_NOTREACHED("Color format not supported");
   1.366 +      return nullptr;
   1.367 +  }
   1.368 +}
   1.369 +
   1.370 +uint8_t *
   1.371 +MediaPluginReader::ImageBufferCallback::CreateI420Image(size_t aWidth,
   1.372 +                                                        size_t aHeight)
   1.373 +{
   1.374 +  mImage = mImageContainer->CreateImage(ImageFormat::PLANAR_YCBCR);
   1.375 +  PlanarYCbCrImage *yuvImage = static_cast<PlanarYCbCrImage *>(mImage.get());
   1.376 +
   1.377 +  if (!yuvImage) {
   1.378 +    NS_WARNING("Could not create I420 image");
   1.379 +    return nullptr;
   1.380 +  }
   1.381 +
   1.382 +  size_t frameSize = aWidth * aHeight;
   1.383 +
   1.384 +  // Allocate enough for one full resolution Y plane
   1.385 +  // and two quarter resolution Cb/Cr planes.
   1.386 +  uint8_t *buffer = yuvImage->AllocateAndGetNewBuffer(frameSize * 3 / 2);
   1.387 +
   1.388 +  mozilla::layers::PlanarYCbCrData frameDesc;
   1.389 +
   1.390 +  frameDesc.mYChannel = buffer;
   1.391 +  frameDesc.mCbChannel = buffer + frameSize;
   1.392 +  frameDesc.mCrChannel = buffer + frameSize * 5 / 4;
   1.393 +
   1.394 +  frameDesc.mYSize = IntSize(aWidth, aHeight);
   1.395 +  frameDesc.mCbCrSize = IntSize(aWidth / 2, aHeight / 2);
   1.396 +
   1.397 +  frameDesc.mYStride = aWidth;
   1.398 +  frameDesc.mCbCrStride = aWidth / 2;
   1.399 +
   1.400 +  frameDesc.mYSkip = 0;
   1.401 +  frameDesc.mCbSkip = 0;
   1.402 +  frameDesc.mCrSkip = 0;
   1.403 +
   1.404 +  frameDesc.mPicX = 0;
   1.405 +  frameDesc.mPicY = 0;
   1.406 +  frameDesc.mPicSize = IntSize(aWidth, aHeight);
   1.407 +
   1.408 +  yuvImage->SetDataNoCopy(frameDesc);
   1.409 +
   1.410 +  return buffer;
   1.411 +}
   1.412 +
   1.413 +already_AddRefed<Image>
   1.414 +MediaPluginReader::ImageBufferCallback::GetImage()
   1.415 +{
   1.416 +  return mImage.forget();
   1.417 +}
   1.418 +
   1.419 +} // namespace mozilla

mercurial