Tue, 06 Jan 2015 21:39:09 +0100
Conditionally force memory storage according to privacy.thirdparty.isolate;
This solves Tor bug #9701, complying with disk avoidance documented in
https://www.torproject.org/projects/torbrowser/design/#disk-avoidance.
michael@0 | 1 | #include <stagefright/foundation/ABase.h> |
michael@0 | 2 | #include <stagefright/foundation/AHandlerReflector.h> |
michael@0 | 3 | #include <stagefright/foundation/ALooper.h> |
michael@0 | 4 | #include <stagefright/MediaSource.h> |
michael@0 | 5 | #include <stagefright/DataSource.h> |
michael@0 | 6 | #include <stagefright/MediaSource.h> |
michael@0 | 7 | #include <utils/RefBase.h> |
michael@0 | 8 | #include <stagefright/MediaExtractor.h> |
michael@0 | 9 | |
michael@0 | 10 | #include "GonkNativeWindow.h" |
michael@0 | 11 | #include "GonkNativeWindowClient.h" |
michael@0 | 12 | #include "mozilla/layers/FenceUtils.h" |
michael@0 | 13 | #include "MP3FrameParser.h" |
michael@0 | 14 | #include "MPAPI.h" |
michael@0 | 15 | #include "MediaResource.h" |
michael@0 | 16 | #include "AbstractMediaDecoder.h" |
michael@0 | 17 | #include "OMXCodecProxy.h" |
michael@0 | 18 | |
michael@0 | 19 | namespace android { |
michael@0 | 20 | class OmxDecoder; |
michael@0 | 21 | }; |
michael@0 | 22 | |
michael@0 | 23 | namespace android { |
michael@0 | 24 | |
michael@0 | 25 | // MediaStreamSource is a DataSource that reads from a MPAPI media stream. |
michael@0 | 26 | class MediaStreamSource : public DataSource { |
michael@0 | 27 | typedef mozilla::MediaResource MediaResource; |
michael@0 | 28 | typedef mozilla::AbstractMediaDecoder AbstractMediaDecoder; |
michael@0 | 29 | |
michael@0 | 30 | Mutex mLock; |
michael@0 | 31 | nsRefPtr<MediaResource> mResource; |
michael@0 | 32 | AbstractMediaDecoder *mDecoder; |
michael@0 | 33 | public: |
michael@0 | 34 | MediaStreamSource(MediaResource* aResource, |
michael@0 | 35 | AbstractMediaDecoder *aDecoder); |
michael@0 | 36 | |
michael@0 | 37 | virtual status_t initCheck() const; |
michael@0 | 38 | virtual ssize_t readAt(off64_t offset, void *data, size_t size); |
michael@0 | 39 | virtual ssize_t readAt(off_t offset, void *data, size_t size) { |
michael@0 | 40 | return readAt(static_cast<off64_t>(offset), data, size); |
michael@0 | 41 | } |
michael@0 | 42 | virtual status_t getSize(off_t *size) { |
michael@0 | 43 | off64_t size64; |
michael@0 | 44 | status_t status = getSize(&size64); |
michael@0 | 45 | *size = size64; |
michael@0 | 46 | return status; |
michael@0 | 47 | } |
michael@0 | 48 | virtual status_t getSize(off64_t *size); |
michael@0 | 49 | virtual uint32_t flags() { |
michael@0 | 50 | return kWantsPrefetching; |
michael@0 | 51 | } |
michael@0 | 52 | |
michael@0 | 53 | virtual ~MediaStreamSource(); |
michael@0 | 54 | |
michael@0 | 55 | private: |
michael@0 | 56 | MediaStreamSource(const MediaStreamSource &); |
michael@0 | 57 | MediaStreamSource &operator=(const MediaStreamSource &); |
michael@0 | 58 | }; |
michael@0 | 59 | |
michael@0 | 60 | class OmxDecoder : public OMXCodecProxy::EventListener { |
michael@0 | 61 | typedef MPAPI::AudioFrame AudioFrame; |
michael@0 | 62 | typedef MPAPI::VideoFrame VideoFrame; |
michael@0 | 63 | typedef mozilla::MP3FrameParser MP3FrameParser; |
michael@0 | 64 | typedef mozilla::MediaResource MediaResource; |
michael@0 | 65 | typedef mozilla::AbstractMediaDecoder AbstractMediaDecoder; |
michael@0 | 66 | typedef mozilla::layers::FenceHandle FenceHandle; |
michael@0 | 67 | typedef mozilla::layers::TextureClient TextureClient; |
michael@0 | 68 | |
michael@0 | 69 | enum { |
michael@0 | 70 | kPreferSoftwareCodecs = 1, |
michael@0 | 71 | kSoftwareCodecsOnly = 8, |
michael@0 | 72 | kHardwareCodecsOnly = 16, |
michael@0 | 73 | }; |
michael@0 | 74 | |
michael@0 | 75 | enum { |
michael@0 | 76 | kNotifyPostReleaseVideoBuffer = 'noti', |
michael@0 | 77 | kNotifyStatusChanged = 'stat' |
michael@0 | 78 | }; |
michael@0 | 79 | |
michael@0 | 80 | AbstractMediaDecoder *mDecoder; |
michael@0 | 81 | nsRefPtr<MediaResource> mResource; |
michael@0 | 82 | sp<GonkNativeWindow> mNativeWindow; |
michael@0 | 83 | sp<GonkNativeWindowClient> mNativeWindowClient; |
michael@0 | 84 | sp<MediaSource> mVideoTrack; |
michael@0 | 85 | sp<OMXCodecProxy> mVideoSource; |
michael@0 | 86 | sp<MediaSource> mAudioOffloadTrack; |
michael@0 | 87 | sp<MediaSource> mAudioTrack; |
michael@0 | 88 | sp<MediaSource> mAudioSource; |
michael@0 | 89 | int32_t mDisplayWidth; |
michael@0 | 90 | int32_t mDisplayHeight; |
michael@0 | 91 | int32_t mVideoWidth; |
michael@0 | 92 | int32_t mVideoHeight; |
michael@0 | 93 | int32_t mVideoColorFormat; |
michael@0 | 94 | int32_t mVideoStride; |
michael@0 | 95 | int32_t mVideoSliceHeight; |
michael@0 | 96 | int32_t mVideoRotation; |
michael@0 | 97 | int32_t mAudioChannels; |
michael@0 | 98 | int32_t mAudioSampleRate; |
michael@0 | 99 | int64_t mDurationUs; |
michael@0 | 100 | VideoFrame mVideoFrame; |
michael@0 | 101 | AudioFrame mAudioFrame; |
michael@0 | 102 | MP3FrameParser mMP3FrameParser; |
michael@0 | 103 | bool mIsMp3; |
michael@0 | 104 | |
michael@0 | 105 | // Lifetime of these should be handled by OMXCodec, as long as we release |
michael@0 | 106 | // them after use: see ReleaseVideoBuffer(), ReleaseAudioBuffer() |
michael@0 | 107 | MediaBuffer *mVideoBuffer; |
michael@0 | 108 | MediaBuffer *mAudioBuffer; |
michael@0 | 109 | |
michael@0 | 110 | struct BufferItem { |
michael@0 | 111 | BufferItem() |
michael@0 | 112 | : mMediaBuffer(nullptr) |
michael@0 | 113 | { |
michael@0 | 114 | } |
michael@0 | 115 | BufferItem(MediaBuffer* aMediaBuffer, const FenceHandle& aReleaseFenceHandle) |
michael@0 | 116 | : mMediaBuffer(aMediaBuffer) |
michael@0 | 117 | , mReleaseFenceHandle(aReleaseFenceHandle) { |
michael@0 | 118 | } |
michael@0 | 119 | |
michael@0 | 120 | MediaBuffer* mMediaBuffer; |
michael@0 | 121 | // a fence will signal when the current buffer is no longer being read. |
michael@0 | 122 | FenceHandle mReleaseFenceHandle; |
michael@0 | 123 | }; |
michael@0 | 124 | |
michael@0 | 125 | // Hold video's MediaBuffers that are released during video seeking. |
michael@0 | 126 | // The holded MediaBuffers are released soon after seek completion. |
michael@0 | 127 | // OMXCodec does not accept MediaBuffer during seeking. If MediaBuffer is |
michael@0 | 128 | // returned to OMXCodec during seeking, OMXCodec calls assert. |
michael@0 | 129 | Vector<BufferItem> mPendingVideoBuffers; |
michael@0 | 130 | // The lock protects mPendingVideoBuffers. |
michael@0 | 131 | Mutex mPendingVideoBuffersLock; |
michael@0 | 132 | |
michael@0 | 133 | // Show if OMXCodec is seeking. |
michael@0 | 134 | bool mIsVideoSeeking; |
michael@0 | 135 | // The lock protects video MediaBuffer release()'s pending operations called |
michael@0 | 136 | // from multiple threads. The pending operations happen only during video |
michael@0 | 137 | // seeking. Holding mSeekLock long time could affect to video rendering. |
michael@0 | 138 | // Holding time should be minimum. |
michael@0 | 139 | Mutex mSeekLock; |
michael@0 | 140 | |
michael@0 | 141 | // ALooper is a message loop used in stagefright. |
michael@0 | 142 | // It creates a thread for messages and handles messages in the thread. |
michael@0 | 143 | // ALooper is a clone of Looper in android Java. |
michael@0 | 144 | // http://developer.android.com/reference/android/os/Looper.html |
michael@0 | 145 | sp<ALooper> mLooper; |
michael@0 | 146 | // deliver a message to a wrapped object(OmxDecoder). |
michael@0 | 147 | // AHandlerReflector is similar to Handler in android Java. |
michael@0 | 148 | // http://developer.android.com/reference/android/os/Handler.html |
michael@0 | 149 | sp<AHandlerReflector<OmxDecoder> > mReflector; |
michael@0 | 150 | |
michael@0 | 151 | // 'true' if a read from the audio stream was done while reading the metadata |
michael@0 | 152 | bool mAudioMetadataRead; |
michael@0 | 153 | |
michael@0 | 154 | void ReleaseVideoBuffer(); |
michael@0 | 155 | void ReleaseAudioBuffer(); |
michael@0 | 156 | // Call with mSeekLock held. |
michael@0 | 157 | void ReleaseAllPendingVideoBuffersLocked(); |
michael@0 | 158 | |
michael@0 | 159 | void PlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); |
michael@0 | 160 | void CbYCrYFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); |
michael@0 | 161 | void SemiPlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); |
michael@0 | 162 | void SemiPlanarYVU420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); |
michael@0 | 163 | bool ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); |
michael@0 | 164 | bool ToAudioFrame(AudioFrame *aFrame, int64_t aTimeUs, void *aData, size_t aDataOffset, size_t aSize, |
michael@0 | 165 | int32_t aAudioChannels, int32_t aAudioSampleRate); |
michael@0 | 166 | |
michael@0 | 167 | //True if decoder is in a paused state |
michael@0 | 168 | bool mAudioPaused; |
michael@0 | 169 | bool mVideoPaused; |
michael@0 | 170 | |
michael@0 | 171 | public: |
michael@0 | 172 | OmxDecoder(MediaResource *aResource, AbstractMediaDecoder *aDecoder); |
michael@0 | 173 | ~OmxDecoder(); |
michael@0 | 174 | |
michael@0 | 175 | // MediaResourceManagerClient::EventListener |
michael@0 | 176 | virtual void statusChanged(); |
michael@0 | 177 | |
michael@0 | 178 | // The MediaExtractor provides essential information for creating OMXCodec |
michael@0 | 179 | // instance. Such as video/audio codec, we can retrieve them through the |
michael@0 | 180 | // MediaExtractor::getTrackMetaData(). |
michael@0 | 181 | // In general cases, the extractor is created by a sp<DataSource> which |
michael@0 | 182 | // connect to a MediaResource like ChannelMediaResource. |
michael@0 | 183 | // Data is read from the MediaResource to create a suitable extractor which |
michael@0 | 184 | // extracts data from a container. |
michael@0 | 185 | // Note: RTSP requires a custom extractor because it doesn't have a container. |
michael@0 | 186 | bool Init(sp<MediaExtractor>& extractor); |
michael@0 | 187 | |
michael@0 | 188 | bool TryLoad(); |
michael@0 | 189 | bool IsDormantNeeded(); |
michael@0 | 190 | bool IsWaitingMediaResources(); |
michael@0 | 191 | bool AllocateMediaResources(); |
michael@0 | 192 | void ReleaseMediaResources(); |
michael@0 | 193 | bool SetVideoFormat(); |
michael@0 | 194 | bool SetAudioFormat(); |
michael@0 | 195 | |
michael@0 | 196 | void ReleaseDecoder(); |
michael@0 | 197 | |
michael@0 | 198 | bool NotifyDataArrived(const char* aBuffer, uint32_t aLength, int64_t aOffset); |
michael@0 | 199 | |
michael@0 | 200 | void GetDuration(int64_t *durationUs) { |
michael@0 | 201 | *durationUs = mDurationUs; |
michael@0 | 202 | } |
michael@0 | 203 | |
michael@0 | 204 | void GetVideoParameters(int32_t* aDisplayWidth, int32_t* aDisplayHeight, |
michael@0 | 205 | int32_t* aWidth, int32_t* aHeight) { |
michael@0 | 206 | *aDisplayWidth = mDisplayWidth; |
michael@0 | 207 | *aDisplayHeight = mDisplayHeight; |
michael@0 | 208 | *aWidth = mVideoWidth; |
michael@0 | 209 | *aHeight = mVideoHeight; |
michael@0 | 210 | } |
michael@0 | 211 | |
michael@0 | 212 | void GetAudioParameters(int32_t *numChannels, int32_t *sampleRate) { |
michael@0 | 213 | *numChannels = mAudioChannels; |
michael@0 | 214 | *sampleRate = mAudioSampleRate; |
michael@0 | 215 | } |
michael@0 | 216 | |
michael@0 | 217 | bool HasVideo() { |
michael@0 | 218 | return mVideoSource != nullptr; |
michael@0 | 219 | } |
michael@0 | 220 | |
michael@0 | 221 | bool HasAudio() { |
michael@0 | 222 | return mAudioSource != nullptr; |
michael@0 | 223 | } |
michael@0 | 224 | |
michael@0 | 225 | bool ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs, |
michael@0 | 226 | bool aKeyframeSkip = false, |
michael@0 | 227 | bool aDoSeek = false); |
michael@0 | 228 | bool ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs); |
michael@0 | 229 | |
michael@0 | 230 | MediaResource *GetResource() { |
michael@0 | 231 | return mResource; |
michael@0 | 232 | } |
michael@0 | 233 | |
michael@0 | 234 | //Change decoder into a playing state |
michael@0 | 235 | nsresult Play(); |
michael@0 | 236 | |
michael@0 | 237 | //Change decoder into a paused state |
michael@0 | 238 | void Pause(); |
michael@0 | 239 | |
michael@0 | 240 | // Post kNotifyPostReleaseVideoBuffer message to OmxDecoder via ALooper. |
michael@0 | 241 | void PostReleaseVideoBuffer(MediaBuffer *aBuffer, const FenceHandle& aReleaseFenceHandle); |
michael@0 | 242 | // Receive a message from AHandlerReflector. |
michael@0 | 243 | // Called on ALooper thread. |
michael@0 | 244 | void onMessageReceived(const sp<AMessage> &msg); |
michael@0 | 245 | |
michael@0 | 246 | int64_t ProcessCachedData(int64_t aOffset, bool aWaitForCompletion); |
michael@0 | 247 | |
michael@0 | 248 | sp<MediaSource> GetAudioOffloadTrack() { return mAudioOffloadTrack; } |
michael@0 | 249 | |
michael@0 | 250 | static void RecycleCallback(TextureClient* aClient, void* aClosure); |
michael@0 | 251 | }; |
michael@0 | 252 | |
michael@0 | 253 | } |
michael@0 | 254 |