content/media/omx/OmxDecoder.h

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/content/media/omx/OmxDecoder.h	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,254 @@
     1.4 +#include <stagefright/foundation/ABase.h>
     1.5 +#include <stagefright/foundation/AHandlerReflector.h>
     1.6 +#include <stagefright/foundation/ALooper.h>
     1.7 +#include <stagefright/MediaSource.h>
     1.8 +#include <stagefright/DataSource.h>
     1.9 +#include <stagefright/MediaSource.h>
    1.10 +#include <utils/RefBase.h>
    1.11 +#include <stagefright/MediaExtractor.h>
    1.12 +
    1.13 +#include "GonkNativeWindow.h"
    1.14 +#include "GonkNativeWindowClient.h"
    1.15 +#include "mozilla/layers/FenceUtils.h"
    1.16 +#include "MP3FrameParser.h"
    1.17 +#include "MPAPI.h"
    1.18 +#include "MediaResource.h"
    1.19 +#include "AbstractMediaDecoder.h"
    1.20 +#include "OMXCodecProxy.h"
    1.21 +
    1.22 +namespace android {
    1.23 +class OmxDecoder;
    1.24 +};
    1.25 +
    1.26 +namespace android {
    1.27 +
    1.28 +// MediaStreamSource is a DataSource that reads from a MPAPI media stream.
    1.29 +class MediaStreamSource : public DataSource {
    1.30 +  typedef mozilla::MediaResource MediaResource;
    1.31 +  typedef mozilla::AbstractMediaDecoder AbstractMediaDecoder;
    1.32 +
    1.33 +  Mutex mLock;
    1.34 +  nsRefPtr<MediaResource> mResource;
    1.35 +  AbstractMediaDecoder *mDecoder;
    1.36 +public:
    1.37 +  MediaStreamSource(MediaResource* aResource,
    1.38 +                    AbstractMediaDecoder *aDecoder);
    1.39 +
    1.40 +  virtual status_t initCheck() const;
    1.41 +  virtual ssize_t readAt(off64_t offset, void *data, size_t size);
    1.42 +  virtual ssize_t readAt(off_t offset, void *data, size_t size) {
    1.43 +    return readAt(static_cast<off64_t>(offset), data, size);
    1.44 +  }
    1.45 +  virtual status_t getSize(off_t *size) {
    1.46 +    off64_t size64;
    1.47 +    status_t status = getSize(&size64);
    1.48 +    *size = size64;
    1.49 +    return status;
    1.50 +  }
    1.51 +  virtual status_t getSize(off64_t *size);
    1.52 +  virtual uint32_t flags() {
    1.53 +    return kWantsPrefetching;
    1.54 +  }
    1.55 +
    1.56 +  virtual ~MediaStreamSource();
    1.57 +
    1.58 +private:
    1.59 +  MediaStreamSource(const MediaStreamSource &);
    1.60 +  MediaStreamSource &operator=(const MediaStreamSource &);
    1.61 +};
    1.62 +
    1.63 +class OmxDecoder : public OMXCodecProxy::EventListener {
    1.64 +  typedef MPAPI::AudioFrame AudioFrame;
    1.65 +  typedef MPAPI::VideoFrame VideoFrame;
    1.66 +  typedef mozilla::MP3FrameParser MP3FrameParser;
    1.67 +  typedef mozilla::MediaResource MediaResource;
    1.68 +  typedef mozilla::AbstractMediaDecoder AbstractMediaDecoder;
    1.69 +  typedef mozilla::layers::FenceHandle FenceHandle;
    1.70 +  typedef mozilla::layers::TextureClient TextureClient;
    1.71 +
    1.72 +  enum {
    1.73 +    kPreferSoftwareCodecs = 1,
    1.74 +    kSoftwareCodecsOnly = 8,
    1.75 +    kHardwareCodecsOnly = 16,
    1.76 +  };
    1.77 +
    1.78 +  enum {
    1.79 +    kNotifyPostReleaseVideoBuffer = 'noti',
    1.80 +    kNotifyStatusChanged = 'stat'
    1.81 +  };
    1.82 +
    1.83 +  AbstractMediaDecoder *mDecoder;
    1.84 +  nsRefPtr<MediaResource> mResource;
    1.85 +  sp<GonkNativeWindow> mNativeWindow;
    1.86 +  sp<GonkNativeWindowClient> mNativeWindowClient;
    1.87 +  sp<MediaSource> mVideoTrack;
    1.88 +  sp<OMXCodecProxy> mVideoSource;
    1.89 +  sp<MediaSource> mAudioOffloadTrack;
    1.90 +  sp<MediaSource> mAudioTrack;
    1.91 +  sp<MediaSource> mAudioSource;
    1.92 +  int32_t mDisplayWidth;
    1.93 +  int32_t mDisplayHeight;
    1.94 +  int32_t mVideoWidth;
    1.95 +  int32_t mVideoHeight;
    1.96 +  int32_t mVideoColorFormat;
    1.97 +  int32_t mVideoStride;
    1.98 +  int32_t mVideoSliceHeight;
    1.99 +  int32_t mVideoRotation;
   1.100 +  int32_t mAudioChannels;
   1.101 +  int32_t mAudioSampleRate;
   1.102 +  int64_t mDurationUs;
   1.103 +  VideoFrame mVideoFrame;
   1.104 +  AudioFrame mAudioFrame;
   1.105 +  MP3FrameParser mMP3FrameParser;
   1.106 +  bool mIsMp3;
   1.107 +
   1.108 +  // Lifetime of these should be handled by OMXCodec, as long as we release
   1.109 +  //   them after use: see ReleaseVideoBuffer(), ReleaseAudioBuffer()
   1.110 +  MediaBuffer *mVideoBuffer;
   1.111 +  MediaBuffer *mAudioBuffer;
   1.112 +
   1.113 +  struct BufferItem {
   1.114 +    BufferItem()
   1.115 +     : mMediaBuffer(nullptr)
   1.116 +    {
   1.117 +    }
   1.118 +    BufferItem(MediaBuffer* aMediaBuffer, const FenceHandle& aReleaseFenceHandle)
   1.119 +     : mMediaBuffer(aMediaBuffer)
   1.120 +     , mReleaseFenceHandle(aReleaseFenceHandle) {
   1.121 +    }
   1.122 +
   1.123 +    MediaBuffer* mMediaBuffer;
   1.124 +    // a fence will signal when the current buffer is no longer being read.
   1.125 +    FenceHandle mReleaseFenceHandle;
   1.126 +  };
   1.127 +
   1.128 +  // Hold video's MediaBuffers that are released during video seeking.
   1.129 +  // The holded MediaBuffers are released soon after seek completion.
   1.130 +  // OMXCodec does not accept MediaBuffer during seeking. If MediaBuffer is
   1.131 +  //  returned to OMXCodec during seeking, OMXCodec calls assert.
   1.132 +  Vector<BufferItem> mPendingVideoBuffers;
   1.133 +  // The lock protects mPendingVideoBuffers.
   1.134 +  Mutex mPendingVideoBuffersLock;
   1.135 +
   1.136 +  // Show if OMXCodec is seeking.
   1.137 +  bool mIsVideoSeeking;
   1.138 +  // The lock protects video MediaBuffer release()'s pending operations called
   1.139 +  //  from multiple threads. The pending operations happen only during video
   1.140 +  //  seeking. Holding mSeekLock long time could affect to video rendering.
   1.141 +  // Holding time should be minimum.
   1.142 +  Mutex mSeekLock;
   1.143 +
   1.144 +  // ALooper is a message loop used in stagefright.
   1.145 +  // It creates a thread for messages and handles messages in the thread.
   1.146 +  // ALooper is a clone of Looper in android Java.
   1.147 +  // http://developer.android.com/reference/android/os/Looper.html
   1.148 +  sp<ALooper> mLooper;
   1.149 +  // deliver a message to a wrapped object(OmxDecoder).
   1.150 +  // AHandlerReflector is similar to Handler in android Java.
   1.151 +  // http://developer.android.com/reference/android/os/Handler.html
   1.152 +  sp<AHandlerReflector<OmxDecoder> > mReflector;
   1.153 +
   1.154 +  // 'true' if a read from the audio stream was done while reading the metadata
   1.155 +  bool mAudioMetadataRead;
   1.156 +
   1.157 +  void ReleaseVideoBuffer();
   1.158 +  void ReleaseAudioBuffer();
   1.159 +  // Call with mSeekLock held.
   1.160 +  void ReleaseAllPendingVideoBuffersLocked();
   1.161 +
   1.162 +  void PlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
   1.163 +  void CbYCrYFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
   1.164 +  void SemiPlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
   1.165 +  void SemiPlanarYVU420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
   1.166 +  bool ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
   1.167 +  bool ToAudioFrame(AudioFrame *aFrame, int64_t aTimeUs, void *aData, size_t aDataOffset, size_t aSize,
   1.168 +                    int32_t aAudioChannels, int32_t aAudioSampleRate);
   1.169 +
   1.170 +  //True if decoder is in a paused state
   1.171 +  bool mAudioPaused;
   1.172 +  bool mVideoPaused;
   1.173 +
   1.174 +public:
   1.175 +  OmxDecoder(MediaResource *aResource, AbstractMediaDecoder *aDecoder);
   1.176 +  ~OmxDecoder();
   1.177 +
   1.178 +  // MediaResourceManagerClient::EventListener
   1.179 +  virtual void statusChanged();
   1.180 +
   1.181 +  // The MediaExtractor provides essential information for creating OMXCodec
   1.182 +  // instance. Such as video/audio codec, we can retrieve them through the
   1.183 +  // MediaExtractor::getTrackMetaData().
   1.184 +  // In general cases, the extractor is created by a sp<DataSource> which
   1.185 +  // connect to a MediaResource like ChannelMediaResource.
   1.186 +  // Data is read from the MediaResource to create a suitable extractor which
   1.187 +  // extracts data from a container.
   1.188 +  // Note: RTSP requires a custom extractor because it doesn't have a container.
   1.189 +  bool Init(sp<MediaExtractor>& extractor);
   1.190 +
   1.191 +  bool TryLoad();
   1.192 +  bool IsDormantNeeded();
   1.193 +  bool IsWaitingMediaResources();
   1.194 +  bool AllocateMediaResources();
   1.195 +  void ReleaseMediaResources();
   1.196 +  bool SetVideoFormat();
   1.197 +  bool SetAudioFormat();
   1.198 +
   1.199 +  void ReleaseDecoder();
   1.200 +
   1.201 +  bool NotifyDataArrived(const char* aBuffer, uint32_t aLength, int64_t aOffset);
   1.202 +
   1.203 +  void GetDuration(int64_t *durationUs) {
   1.204 +    *durationUs = mDurationUs;
   1.205 +  }
   1.206 +
   1.207 +  void GetVideoParameters(int32_t* aDisplayWidth, int32_t* aDisplayHeight,
   1.208 +                          int32_t* aWidth, int32_t* aHeight) {
   1.209 +    *aDisplayWidth = mDisplayWidth;
   1.210 +    *aDisplayHeight = mDisplayHeight;
   1.211 +    *aWidth = mVideoWidth;
   1.212 +    *aHeight = mVideoHeight;
   1.213 +  }
   1.214 +
   1.215 +  void GetAudioParameters(int32_t *numChannels, int32_t *sampleRate) {
   1.216 +    *numChannels = mAudioChannels;
   1.217 +    *sampleRate = mAudioSampleRate;
   1.218 +  }
   1.219 +
   1.220 +  bool HasVideo() {
   1.221 +    return mVideoSource != nullptr;
   1.222 +  }
   1.223 +
   1.224 +  bool HasAudio() {
   1.225 +    return mAudioSource != nullptr;
   1.226 +  }
   1.227 +
   1.228 +  bool ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs,
   1.229 +                 bool aKeyframeSkip = false,
   1.230 +                 bool aDoSeek = false);
   1.231 +  bool ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs);
   1.232 +
   1.233 +  MediaResource *GetResource() {
   1.234 +    return mResource;
   1.235 +  }
   1.236 +
   1.237 +  //Change decoder into a playing state
   1.238 +  nsresult Play();
   1.239 +
   1.240 +  //Change decoder into a paused state
   1.241 +  void Pause();
   1.242 +
   1.243 +  // Post kNotifyPostReleaseVideoBuffer message to OmxDecoder via ALooper.
   1.244 +  void PostReleaseVideoBuffer(MediaBuffer *aBuffer, const FenceHandle& aReleaseFenceHandle);
   1.245 +  // Receive a message from AHandlerReflector.
   1.246 +  // Called on ALooper thread.
   1.247 +  void onMessageReceived(const sp<AMessage> &msg);
   1.248 +
   1.249 +  int64_t ProcessCachedData(int64_t aOffset, bool aWaitForCompletion);
   1.250 +
   1.251 +  sp<MediaSource> GetAudioOffloadTrack() { return mAudioOffloadTrack; }
   1.252 +
   1.253 +  static void RecycleCallback(TextureClient* aClient, void* aClosure);
   1.254 +};
   1.255 +
   1.256 +}
   1.257 +

mercurial