|
1 #include <stagefright/foundation/ABase.h> |
|
2 #include <stagefright/foundation/AHandlerReflector.h> |
|
3 #include <stagefright/foundation/ALooper.h> |
|
4 #include <stagefright/MediaSource.h> |
|
5 #include <stagefright/DataSource.h> |
|
6 #include <stagefright/MediaSource.h> |
|
7 #include <utils/RefBase.h> |
|
8 #include <stagefright/MediaExtractor.h> |
|
9 |
|
10 #include "GonkNativeWindow.h" |
|
11 #include "GonkNativeWindowClient.h" |
|
12 #include "mozilla/layers/FenceUtils.h" |
|
13 #include "MP3FrameParser.h" |
|
14 #include "MPAPI.h" |
|
15 #include "MediaResource.h" |
|
16 #include "AbstractMediaDecoder.h" |
|
17 #include "OMXCodecProxy.h" |
|
18 |
|
19 namespace android { |
|
20 class OmxDecoder; |
|
21 }; |
|
22 |
|
23 namespace android { |
|
24 |
|
25 // MediaStreamSource is a DataSource that reads from a MPAPI media stream. |
|
26 class MediaStreamSource : public DataSource { |
|
27 typedef mozilla::MediaResource MediaResource; |
|
28 typedef mozilla::AbstractMediaDecoder AbstractMediaDecoder; |
|
29 |
|
30 Mutex mLock; |
|
31 nsRefPtr<MediaResource> mResource; |
|
32 AbstractMediaDecoder *mDecoder; |
|
33 public: |
|
34 MediaStreamSource(MediaResource* aResource, |
|
35 AbstractMediaDecoder *aDecoder); |
|
36 |
|
37 virtual status_t initCheck() const; |
|
38 virtual ssize_t readAt(off64_t offset, void *data, size_t size); |
|
39 virtual ssize_t readAt(off_t offset, void *data, size_t size) { |
|
40 return readAt(static_cast<off64_t>(offset), data, size); |
|
41 } |
|
42 virtual status_t getSize(off_t *size) { |
|
43 off64_t size64; |
|
44 status_t status = getSize(&size64); |
|
45 *size = size64; |
|
46 return status; |
|
47 } |
|
48 virtual status_t getSize(off64_t *size); |
|
49 virtual uint32_t flags() { |
|
50 return kWantsPrefetching; |
|
51 } |
|
52 |
|
53 virtual ~MediaStreamSource(); |
|
54 |
|
55 private: |
|
56 MediaStreamSource(const MediaStreamSource &); |
|
57 MediaStreamSource &operator=(const MediaStreamSource &); |
|
58 }; |
|
59 |
|
60 class OmxDecoder : public OMXCodecProxy::EventListener { |
|
61 typedef MPAPI::AudioFrame AudioFrame; |
|
62 typedef MPAPI::VideoFrame VideoFrame; |
|
63 typedef mozilla::MP3FrameParser MP3FrameParser; |
|
64 typedef mozilla::MediaResource MediaResource; |
|
65 typedef mozilla::AbstractMediaDecoder AbstractMediaDecoder; |
|
66 typedef mozilla::layers::FenceHandle FenceHandle; |
|
67 typedef mozilla::layers::TextureClient TextureClient; |
|
68 |
|
69 enum { |
|
70 kPreferSoftwareCodecs = 1, |
|
71 kSoftwareCodecsOnly = 8, |
|
72 kHardwareCodecsOnly = 16, |
|
73 }; |
|
74 |
|
75 enum { |
|
76 kNotifyPostReleaseVideoBuffer = 'noti', |
|
77 kNotifyStatusChanged = 'stat' |
|
78 }; |
|
79 |
|
80 AbstractMediaDecoder *mDecoder; |
|
81 nsRefPtr<MediaResource> mResource; |
|
82 sp<GonkNativeWindow> mNativeWindow; |
|
83 sp<GonkNativeWindowClient> mNativeWindowClient; |
|
84 sp<MediaSource> mVideoTrack; |
|
85 sp<OMXCodecProxy> mVideoSource; |
|
86 sp<MediaSource> mAudioOffloadTrack; |
|
87 sp<MediaSource> mAudioTrack; |
|
88 sp<MediaSource> mAudioSource; |
|
89 int32_t mDisplayWidth; |
|
90 int32_t mDisplayHeight; |
|
91 int32_t mVideoWidth; |
|
92 int32_t mVideoHeight; |
|
93 int32_t mVideoColorFormat; |
|
94 int32_t mVideoStride; |
|
95 int32_t mVideoSliceHeight; |
|
96 int32_t mVideoRotation; |
|
97 int32_t mAudioChannels; |
|
98 int32_t mAudioSampleRate; |
|
99 int64_t mDurationUs; |
|
100 VideoFrame mVideoFrame; |
|
101 AudioFrame mAudioFrame; |
|
102 MP3FrameParser mMP3FrameParser; |
|
103 bool mIsMp3; |
|
104 |
|
105 // Lifetime of these should be handled by OMXCodec, as long as we release |
|
106 // them after use: see ReleaseVideoBuffer(), ReleaseAudioBuffer() |
|
107 MediaBuffer *mVideoBuffer; |
|
108 MediaBuffer *mAudioBuffer; |
|
109 |
|
110 struct BufferItem { |
|
111 BufferItem() |
|
112 : mMediaBuffer(nullptr) |
|
113 { |
|
114 } |
|
115 BufferItem(MediaBuffer* aMediaBuffer, const FenceHandle& aReleaseFenceHandle) |
|
116 : mMediaBuffer(aMediaBuffer) |
|
117 , mReleaseFenceHandle(aReleaseFenceHandle) { |
|
118 } |
|
119 |
|
120 MediaBuffer* mMediaBuffer; |
|
121 // a fence will signal when the current buffer is no longer being read. |
|
122 FenceHandle mReleaseFenceHandle; |
|
123 }; |
|
124 |
|
125 // Hold video's MediaBuffers that are released during video seeking. |
|
126 // The holded MediaBuffers are released soon after seek completion. |
|
127 // OMXCodec does not accept MediaBuffer during seeking. If MediaBuffer is |
|
128 // returned to OMXCodec during seeking, OMXCodec calls assert. |
|
129 Vector<BufferItem> mPendingVideoBuffers; |
|
130 // The lock protects mPendingVideoBuffers. |
|
131 Mutex mPendingVideoBuffersLock; |
|
132 |
|
133 // Show if OMXCodec is seeking. |
|
134 bool mIsVideoSeeking; |
|
135 // The lock protects video MediaBuffer release()'s pending operations called |
|
136 // from multiple threads. The pending operations happen only during video |
|
137 // seeking. Holding mSeekLock long time could affect to video rendering. |
|
138 // Holding time should be minimum. |
|
139 Mutex mSeekLock; |
|
140 |
|
141 // ALooper is a message loop used in stagefright. |
|
142 // It creates a thread for messages and handles messages in the thread. |
|
143 // ALooper is a clone of Looper in android Java. |
|
144 // http://developer.android.com/reference/android/os/Looper.html |
|
145 sp<ALooper> mLooper; |
|
146 // deliver a message to a wrapped object(OmxDecoder). |
|
147 // AHandlerReflector is similar to Handler in android Java. |
|
148 // http://developer.android.com/reference/android/os/Handler.html |
|
149 sp<AHandlerReflector<OmxDecoder> > mReflector; |
|
150 |
|
151 // 'true' if a read from the audio stream was done while reading the metadata |
|
152 bool mAudioMetadataRead; |
|
153 |
|
154 void ReleaseVideoBuffer(); |
|
155 void ReleaseAudioBuffer(); |
|
156 // Call with mSeekLock held. |
|
157 void ReleaseAllPendingVideoBuffersLocked(); |
|
158 |
|
159 void PlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); |
|
160 void CbYCrYFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); |
|
161 void SemiPlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); |
|
162 void SemiPlanarYVU420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); |
|
163 bool ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); |
|
164 bool ToAudioFrame(AudioFrame *aFrame, int64_t aTimeUs, void *aData, size_t aDataOffset, size_t aSize, |
|
165 int32_t aAudioChannels, int32_t aAudioSampleRate); |
|
166 |
|
167 //True if decoder is in a paused state |
|
168 bool mAudioPaused; |
|
169 bool mVideoPaused; |
|
170 |
|
171 public: |
|
172 OmxDecoder(MediaResource *aResource, AbstractMediaDecoder *aDecoder); |
|
173 ~OmxDecoder(); |
|
174 |
|
175 // MediaResourceManagerClient::EventListener |
|
176 virtual void statusChanged(); |
|
177 |
|
178 // The MediaExtractor provides essential information for creating OMXCodec |
|
179 // instance. Such as video/audio codec, we can retrieve them through the |
|
180 // MediaExtractor::getTrackMetaData(). |
|
181 // In general cases, the extractor is created by a sp<DataSource> which |
|
182 // connect to a MediaResource like ChannelMediaResource. |
|
183 // Data is read from the MediaResource to create a suitable extractor which |
|
184 // extracts data from a container. |
|
185 // Note: RTSP requires a custom extractor because it doesn't have a container. |
|
186 bool Init(sp<MediaExtractor>& extractor); |
|
187 |
|
188 bool TryLoad(); |
|
189 bool IsDormantNeeded(); |
|
190 bool IsWaitingMediaResources(); |
|
191 bool AllocateMediaResources(); |
|
192 void ReleaseMediaResources(); |
|
193 bool SetVideoFormat(); |
|
194 bool SetAudioFormat(); |
|
195 |
|
196 void ReleaseDecoder(); |
|
197 |
|
198 bool NotifyDataArrived(const char* aBuffer, uint32_t aLength, int64_t aOffset); |
|
199 |
|
200 void GetDuration(int64_t *durationUs) { |
|
201 *durationUs = mDurationUs; |
|
202 } |
|
203 |
|
204 void GetVideoParameters(int32_t* aDisplayWidth, int32_t* aDisplayHeight, |
|
205 int32_t* aWidth, int32_t* aHeight) { |
|
206 *aDisplayWidth = mDisplayWidth; |
|
207 *aDisplayHeight = mDisplayHeight; |
|
208 *aWidth = mVideoWidth; |
|
209 *aHeight = mVideoHeight; |
|
210 } |
|
211 |
|
212 void GetAudioParameters(int32_t *numChannels, int32_t *sampleRate) { |
|
213 *numChannels = mAudioChannels; |
|
214 *sampleRate = mAudioSampleRate; |
|
215 } |
|
216 |
|
217 bool HasVideo() { |
|
218 return mVideoSource != nullptr; |
|
219 } |
|
220 |
|
221 bool HasAudio() { |
|
222 return mAudioSource != nullptr; |
|
223 } |
|
224 |
|
225 bool ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs, |
|
226 bool aKeyframeSkip = false, |
|
227 bool aDoSeek = false); |
|
228 bool ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs); |
|
229 |
|
230 MediaResource *GetResource() { |
|
231 return mResource; |
|
232 } |
|
233 |
|
234 //Change decoder into a playing state |
|
235 nsresult Play(); |
|
236 |
|
237 //Change decoder into a paused state |
|
238 void Pause(); |
|
239 |
|
240 // Post kNotifyPostReleaseVideoBuffer message to OmxDecoder via ALooper. |
|
241 void PostReleaseVideoBuffer(MediaBuffer *aBuffer, const FenceHandle& aReleaseFenceHandle); |
|
242 // Receive a message from AHandlerReflector. |
|
243 // Called on ALooper thread. |
|
244 void onMessageReceived(const sp<AMessage> &msg); |
|
245 |
|
246 int64_t ProcessCachedData(int64_t aOffset, bool aWaitForCompletion); |
|
247 |
|
248 sp<MediaSource> GetAudioOffloadTrack() { return mAudioOffloadTrack; } |
|
249 |
|
250 static void RecycleCallback(TextureClient* aClient, void* aClosure); |
|
251 }; |
|
252 |
|
253 } |
|
254 |