michael@0: #include michael@0: #include michael@0: #include michael@0: #include michael@0: #include michael@0: #include michael@0: #include michael@0: #include michael@0: michael@0: #include "GonkNativeWindow.h" michael@0: #include "GonkNativeWindowClient.h" michael@0: #include "mozilla/layers/FenceUtils.h" michael@0: #include "MP3FrameParser.h" michael@0: #include "MPAPI.h" michael@0: #include "MediaResource.h" michael@0: #include "AbstractMediaDecoder.h" michael@0: #include "OMXCodecProxy.h" michael@0: michael@0: namespace android { michael@0: class OmxDecoder; michael@0: }; michael@0: michael@0: namespace android { michael@0: michael@0: // MediaStreamSource is a DataSource that reads from a MPAPI media stream. michael@0: class MediaStreamSource : public DataSource { michael@0: typedef mozilla::MediaResource MediaResource; michael@0: typedef mozilla::AbstractMediaDecoder AbstractMediaDecoder; michael@0: michael@0: Mutex mLock; michael@0: nsRefPtr mResource; michael@0: AbstractMediaDecoder *mDecoder; michael@0: public: michael@0: MediaStreamSource(MediaResource* aResource, michael@0: AbstractMediaDecoder *aDecoder); michael@0: michael@0: virtual status_t initCheck() const; michael@0: virtual ssize_t readAt(off64_t offset, void *data, size_t size); michael@0: virtual ssize_t readAt(off_t offset, void *data, size_t size) { michael@0: return readAt(static_cast(offset), data, size); michael@0: } michael@0: virtual status_t getSize(off_t *size) { michael@0: off64_t size64; michael@0: status_t status = getSize(&size64); michael@0: *size = size64; michael@0: return status; michael@0: } michael@0: virtual status_t getSize(off64_t *size); michael@0: virtual uint32_t flags() { michael@0: return kWantsPrefetching; michael@0: } michael@0: michael@0: virtual ~MediaStreamSource(); michael@0: michael@0: private: michael@0: MediaStreamSource(const MediaStreamSource &); michael@0: MediaStreamSource &operator=(const MediaStreamSource &); michael@0: }; michael@0: michael@0: class OmxDecoder : public OMXCodecProxy::EventListener { michael@0: typedef MPAPI::AudioFrame AudioFrame; michael@0: typedef MPAPI::VideoFrame VideoFrame; michael@0: typedef mozilla::MP3FrameParser MP3FrameParser; michael@0: typedef mozilla::MediaResource MediaResource; michael@0: typedef mozilla::AbstractMediaDecoder AbstractMediaDecoder; michael@0: typedef mozilla::layers::FenceHandle FenceHandle; michael@0: typedef mozilla::layers::TextureClient TextureClient; michael@0: michael@0: enum { michael@0: kPreferSoftwareCodecs = 1, michael@0: kSoftwareCodecsOnly = 8, michael@0: kHardwareCodecsOnly = 16, michael@0: }; michael@0: michael@0: enum { michael@0: kNotifyPostReleaseVideoBuffer = 'noti', michael@0: kNotifyStatusChanged = 'stat' michael@0: }; michael@0: michael@0: AbstractMediaDecoder *mDecoder; michael@0: nsRefPtr mResource; michael@0: sp mNativeWindow; michael@0: sp mNativeWindowClient; michael@0: sp mVideoTrack; michael@0: sp mVideoSource; michael@0: sp mAudioOffloadTrack; michael@0: sp mAudioTrack; michael@0: sp mAudioSource; michael@0: int32_t mDisplayWidth; michael@0: int32_t mDisplayHeight; michael@0: int32_t mVideoWidth; michael@0: int32_t mVideoHeight; michael@0: int32_t mVideoColorFormat; michael@0: int32_t mVideoStride; michael@0: int32_t mVideoSliceHeight; michael@0: int32_t mVideoRotation; michael@0: int32_t mAudioChannels; michael@0: int32_t mAudioSampleRate; michael@0: int64_t mDurationUs; michael@0: VideoFrame mVideoFrame; michael@0: AudioFrame mAudioFrame; michael@0: MP3FrameParser mMP3FrameParser; michael@0: bool mIsMp3; michael@0: michael@0: // Lifetime of these should be handled by OMXCodec, as long as we release michael@0: // them after use: see ReleaseVideoBuffer(), ReleaseAudioBuffer() michael@0: MediaBuffer *mVideoBuffer; michael@0: MediaBuffer *mAudioBuffer; michael@0: michael@0: struct BufferItem { michael@0: BufferItem() michael@0: : mMediaBuffer(nullptr) michael@0: { michael@0: } michael@0: BufferItem(MediaBuffer* aMediaBuffer, const FenceHandle& aReleaseFenceHandle) michael@0: : mMediaBuffer(aMediaBuffer) michael@0: , mReleaseFenceHandle(aReleaseFenceHandle) { michael@0: } michael@0: michael@0: MediaBuffer* mMediaBuffer; michael@0: // a fence will signal when the current buffer is no longer being read. michael@0: FenceHandle mReleaseFenceHandle; michael@0: }; michael@0: michael@0: // Hold video's MediaBuffers that are released during video seeking. michael@0: // The holded MediaBuffers are released soon after seek completion. michael@0: // OMXCodec does not accept MediaBuffer during seeking. If MediaBuffer is michael@0: // returned to OMXCodec during seeking, OMXCodec calls assert. michael@0: Vector mPendingVideoBuffers; michael@0: // The lock protects mPendingVideoBuffers. michael@0: Mutex mPendingVideoBuffersLock; michael@0: michael@0: // Show if OMXCodec is seeking. michael@0: bool mIsVideoSeeking; michael@0: // The lock protects video MediaBuffer release()'s pending operations called michael@0: // from multiple threads. The pending operations happen only during video michael@0: // seeking. Holding mSeekLock long time could affect to video rendering. michael@0: // Holding time should be minimum. michael@0: Mutex mSeekLock; michael@0: michael@0: // ALooper is a message loop used in stagefright. michael@0: // It creates a thread for messages and handles messages in the thread. michael@0: // ALooper is a clone of Looper in android Java. michael@0: // http://developer.android.com/reference/android/os/Looper.html michael@0: sp mLooper; michael@0: // deliver a message to a wrapped object(OmxDecoder). michael@0: // AHandlerReflector is similar to Handler in android Java. michael@0: // http://developer.android.com/reference/android/os/Handler.html michael@0: sp > mReflector; michael@0: michael@0: // 'true' if a read from the audio stream was done while reading the metadata michael@0: bool mAudioMetadataRead; michael@0: michael@0: void ReleaseVideoBuffer(); michael@0: void ReleaseAudioBuffer(); michael@0: // Call with mSeekLock held. michael@0: void ReleaseAllPendingVideoBuffersLocked(); michael@0: michael@0: void PlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); michael@0: void CbYCrYFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); michael@0: void SemiPlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); michael@0: void SemiPlanarYVU420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); michael@0: bool ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); michael@0: bool ToAudioFrame(AudioFrame *aFrame, int64_t aTimeUs, void *aData, size_t aDataOffset, size_t aSize, michael@0: int32_t aAudioChannels, int32_t aAudioSampleRate); michael@0: michael@0: //True if decoder is in a paused state michael@0: bool mAudioPaused; michael@0: bool mVideoPaused; michael@0: michael@0: public: michael@0: OmxDecoder(MediaResource *aResource, AbstractMediaDecoder *aDecoder); michael@0: ~OmxDecoder(); michael@0: michael@0: // MediaResourceManagerClient::EventListener michael@0: virtual void statusChanged(); michael@0: michael@0: // The MediaExtractor provides essential information for creating OMXCodec michael@0: // instance. Such as video/audio codec, we can retrieve them through the michael@0: // MediaExtractor::getTrackMetaData(). michael@0: // In general cases, the extractor is created by a sp which michael@0: // connect to a MediaResource like ChannelMediaResource. michael@0: // Data is read from the MediaResource to create a suitable extractor which michael@0: // extracts data from a container. michael@0: // Note: RTSP requires a custom extractor because it doesn't have a container. michael@0: bool Init(sp& extractor); michael@0: michael@0: bool TryLoad(); michael@0: bool IsDormantNeeded(); michael@0: bool IsWaitingMediaResources(); michael@0: bool AllocateMediaResources(); michael@0: void ReleaseMediaResources(); michael@0: bool SetVideoFormat(); michael@0: bool SetAudioFormat(); michael@0: michael@0: void ReleaseDecoder(); michael@0: michael@0: bool NotifyDataArrived(const char* aBuffer, uint32_t aLength, int64_t aOffset); michael@0: michael@0: void GetDuration(int64_t *durationUs) { michael@0: *durationUs = mDurationUs; michael@0: } michael@0: michael@0: void GetVideoParameters(int32_t* aDisplayWidth, int32_t* aDisplayHeight, michael@0: int32_t* aWidth, int32_t* aHeight) { michael@0: *aDisplayWidth = mDisplayWidth; michael@0: *aDisplayHeight = mDisplayHeight; michael@0: *aWidth = mVideoWidth; michael@0: *aHeight = mVideoHeight; michael@0: } michael@0: michael@0: void GetAudioParameters(int32_t *numChannels, int32_t *sampleRate) { michael@0: *numChannels = mAudioChannels; michael@0: *sampleRate = mAudioSampleRate; michael@0: } michael@0: michael@0: bool HasVideo() { michael@0: return mVideoSource != nullptr; michael@0: } michael@0: michael@0: bool HasAudio() { michael@0: return mAudioSource != nullptr; michael@0: } michael@0: michael@0: bool ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs, michael@0: bool aKeyframeSkip = false, michael@0: bool aDoSeek = false); michael@0: bool ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs); michael@0: michael@0: MediaResource *GetResource() { michael@0: return mResource; michael@0: } michael@0: michael@0: //Change decoder into a playing state michael@0: nsresult Play(); michael@0: michael@0: //Change decoder into a paused state michael@0: void Pause(); michael@0: michael@0: // Post kNotifyPostReleaseVideoBuffer message to OmxDecoder via ALooper. michael@0: void PostReleaseVideoBuffer(MediaBuffer *aBuffer, const FenceHandle& aReleaseFenceHandle); michael@0: // Receive a message from AHandlerReflector. michael@0: // Called on ALooper thread. michael@0: void onMessageReceived(const sp &msg); michael@0: michael@0: int64_t ProcessCachedData(int64_t aOffset, bool aWaitForCompletion); michael@0: michael@0: sp GetAudioOffloadTrack() { return mAudioOffloadTrack; } michael@0: michael@0: static void RecycleCallback(TextureClient* aClient, void* aClosure); michael@0: }; michael@0: michael@0: } michael@0: