|
1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ |
|
2 /* vim:set ts=2 sw=2 sts=2 et cindent: */ |
|
3 /* This Source Code Form is subject to the terms of the Mozilla Public |
|
4 * License, v. 2.0. If a copy of the MPL was not distributed with this file, |
|
5 * You can obtain one at http://mozilla.org/MPL/2.0/. */ |
|
6 |
|
7 #include "RtspOmxReader.h" |
|
8 |
|
9 #include "AbstractMediaDecoder.h" |
|
10 #include "MediaDecoderStateMachine.h" |
|
11 #include "MPAPI.h" |
|
12 #include "mozilla/dom/TimeRanges.h" |
|
13 #include "mozilla/Mutex.h" |
|
14 #include "mozilla/TimeStamp.h" |
|
15 #include "OmxDecoder.h" |
|
16 #include "RtspMediaResource.h" |
|
17 #include "RtspOmxDecoder.h" |
|
18 #include "VideoUtils.h" |
|
19 |
|
20 #include <stagefright/MediaExtractor.h> |
|
21 #include <stagefright/MediaBufferGroup.h> |
|
22 #include <stagefright/MetaData.h> |
|
23 |
|
24 #define FRAME_DEFAULT_SIZE 1024 |
|
25 |
|
26 using namespace android; |
|
27 |
|
28 namespace mozilla { |
|
29 |
|
30 /* class RtspMediaSource : implements MediaSource for OMX. |
|
31 * The decoder thread will trigger the MediaDecodeStateMachine to read a/v frame. |
|
32 * Then RtspOmxReader calls OMX decoder to decode a/v frame. Finally the code |
|
33 * path run into the read() here, it reads un-decoded frame data from mResource |
|
34 * and construct a MediaBuffer for output to OMX decoder. |
|
35 * */ |
|
36 class RtspMediaSource : public android::MediaSource { |
|
37 public: |
|
38 RtspMediaSource(RtspMediaResource *aRtspMediaResource, |
|
39 ssize_t aTrackIdx, |
|
40 uint32_t aFrameMaxSize, |
|
41 const sp<MetaData>& aMeta) |
|
42 : mRtspResource(aRtspMediaResource) |
|
43 , mFormat(aMeta) |
|
44 , mTrackIdx(aTrackIdx) |
|
45 , mMonitor("RtspMediaSource.mMonitor") |
|
46 , mIsStarted(false) |
|
47 , mGroup(nullptr) |
|
48 , mBuffer(nullptr) |
|
49 , mFrameMaxSize(aFrameMaxSize) { |
|
50 MOZ_COUNT_CTOR(RtspMediaSource); |
|
51 }; |
|
52 virtual ~RtspMediaSource() { |
|
53 MOZ_COUNT_DTOR(RtspMediaSource); |
|
54 } |
|
55 virtual status_t start(MetaData *params = nullptr) MOZ_FINAL MOZ_OVERRIDE; |
|
56 virtual status_t stop() MOZ_FINAL MOZ_OVERRIDE; |
|
57 virtual sp<MetaData> getFormat() MOZ_FINAL MOZ_OVERRIDE { |
|
58 ReentrantMonitorAutoEnter mon(mMonitor); |
|
59 return mFormat; |
|
60 }; |
|
61 virtual status_t read(MediaBuffer **buffer, |
|
62 const ReadOptions *options = nullptr) MOZ_FINAL MOZ_OVERRIDE ; |
|
63 private: |
|
64 nsRefPtr<RtspMediaResource> mRtspResource; |
|
65 sp<MetaData> mFormat; |
|
66 uint32_t mTrackIdx; |
|
67 ReentrantMonitor mMonitor; |
|
68 bool mIsStarted; |
|
69 |
|
70 // mGroup owns the mBuffer. mFrameMaxSize is the mBuffer size. |
|
71 // mBuffer is the input buffer for omx decoder. |
|
72 nsAutoPtr<MediaBufferGroup> mGroup; |
|
73 MediaBuffer* mBuffer; |
|
74 uint32_t mFrameMaxSize; |
|
75 }; |
|
76 |
|
77 status_t RtspMediaSource::start(MetaData *params) |
|
78 { |
|
79 ReentrantMonitorAutoEnter mon(mMonitor); |
|
80 if (!mIsStarted) { |
|
81 // RtspMediaSource relinquish the ownership of MediaBuffer |buf| to mGroup. |
|
82 mGroup = new MediaBufferGroup(); |
|
83 MediaBuffer* buf = new MediaBuffer(mFrameMaxSize); |
|
84 mGroup->add_buffer(buf); |
|
85 mIsStarted = true; |
|
86 } |
|
87 return OK; |
|
88 } |
|
89 |
|
90 status_t RtspMediaSource::stop() |
|
91 { |
|
92 ReentrantMonitorAutoEnter mon(mMonitor); |
|
93 if (mIsStarted) { |
|
94 if (mBuffer) { |
|
95 mBuffer->release(); |
|
96 mBuffer = nullptr; |
|
97 } |
|
98 mGroup = nullptr; |
|
99 mIsStarted = false; |
|
100 } |
|
101 return OK; |
|
102 } |
|
103 |
|
104 status_t RtspMediaSource::read(MediaBuffer **out, const ReadOptions *options) |
|
105 { |
|
106 ReentrantMonitorAutoEnter mon(mMonitor); |
|
107 NS_ENSURE_TRUE(mIsStarted, MEDIA_ERROR_BASE); |
|
108 NS_ENSURE_TRUE(out, MEDIA_ERROR_BASE); |
|
109 *out = nullptr; |
|
110 |
|
111 // Video/audio track's initial frame size is FRAME_DEFAULT_SIZE. |
|
112 // We need to realloc the mBuffer if the mBuffer doesn't have enough space |
|
113 // for next ReadFrameFromTrack function. (actualFrameSize > mFrameMaxSize) |
|
114 status_t err; |
|
115 uint32_t readCount; |
|
116 uint32_t actualFrameSize; |
|
117 uint64_t time; |
|
118 nsresult rv; |
|
119 |
|
120 while (1) { |
|
121 err = mGroup->acquire_buffer(&mBuffer); |
|
122 NS_ENSURE_TRUE(err == OK, err); |
|
123 |
|
124 rv = mRtspResource->ReadFrameFromTrack((uint8_t *)mBuffer->data(), |
|
125 mFrameMaxSize, mTrackIdx, readCount, |
|
126 time, actualFrameSize); |
|
127 if (NS_FAILED(rv)) { |
|
128 // Release mGroup and mBuffer. |
|
129 stop(); |
|
130 // Since RtspMediaSource is an implementation of Android media source, |
|
131 // it's held by OMXCodec and isn't released yet. So we have to re-construct |
|
132 // mGroup and mBuffer. |
|
133 start(); |
|
134 NS_WARNING("ReadFrameFromTrack failed; releasing buffers and returning."); |
|
135 return ERROR_CONNECTION_LOST; |
|
136 } |
|
137 if (actualFrameSize > mFrameMaxSize) { |
|
138 // release mGroup and mBuffer |
|
139 stop(); |
|
140 // re-construct mGroup and mBuffer |
|
141 mFrameMaxSize = actualFrameSize; |
|
142 err = start(); |
|
143 NS_ENSURE_TRUE(err == OK, err); |
|
144 } else { |
|
145 // ReadFrameFromTrack success, break the while loop. |
|
146 break; |
|
147 } |
|
148 } |
|
149 mBuffer->set_range(0, readCount); |
|
150 if (NS_SUCCEEDED(rv)) { |
|
151 mBuffer->meta_data()->clear(); |
|
152 // fill the meta data |
|
153 mBuffer->meta_data()->setInt64(kKeyTime, time); |
|
154 *out = mBuffer; |
|
155 mBuffer = nullptr; |
|
156 return OK; |
|
157 } |
|
158 |
|
159 return ERROR_END_OF_STREAM; |
|
160 } |
|
161 |
|
162 |
|
163 // RtspExtractor is a custom extractor for Rtsp stream, whereas the other |
|
164 // XXXExtractors are made for container media content. |
|
165 // The extractor is used for |OmxDecoder::Init|, it provides the essential |
|
166 // information for creating OMXCodec instance. |
|
167 // For example, the |getTrackMetaData| returns metadata that includes the |
|
168 // codec type. |
|
169 class RtspExtractor: public MediaExtractor |
|
170 { |
|
171 public: |
|
172 virtual size_t countTracks() MOZ_FINAL MOZ_OVERRIDE; |
|
173 virtual sp<android::MediaSource> getTrack(size_t index) |
|
174 MOZ_FINAL MOZ_OVERRIDE; |
|
175 virtual sp<MetaData> getTrackMetaData( |
|
176 size_t index, uint32_t flag = 0) MOZ_FINAL MOZ_OVERRIDE; |
|
177 virtual uint32_t flags() const MOZ_FINAL MOZ_OVERRIDE; |
|
178 |
|
179 RtspExtractor(RtspMediaResource *aResource) |
|
180 : mRtspResource(aResource) { |
|
181 MOZ_COUNT_CTOR(RtspExtractor); |
|
182 MOZ_ASSERT(aResource); |
|
183 mController = mRtspResource->GetMediaStreamController(); |
|
184 MOZ_ASSERT(mController); |
|
185 } |
|
186 virtual ~RtspExtractor() MOZ_OVERRIDE { |
|
187 MOZ_COUNT_DTOR(RtspExtractor); |
|
188 } |
|
189 private: |
|
190 // mRtspResource is a pointer to RtspMediaResource. When |getTrack| is called |
|
191 // we use mRtspResource to construct a RtspMediaSource. |
|
192 RtspMediaResource* mRtspResource; |
|
193 // Through the mController in mRtspResource, we can get the essential |
|
194 // information for the extractor. |
|
195 nsRefPtr<nsIStreamingProtocolController> mController; |
|
196 }; |
|
197 |
|
198 size_t RtspExtractor::countTracks() |
|
199 { |
|
200 uint8_t tracks = 0; |
|
201 if (mController) { |
|
202 mController->GetTotalTracks(&tracks); |
|
203 } |
|
204 return size_t(tracks); |
|
205 } |
|
206 |
|
207 sp<android::MediaSource> RtspExtractor::getTrack(size_t index) |
|
208 { |
|
209 NS_ENSURE_TRUE(index < countTracks(), nullptr); |
|
210 |
|
211 sp<MetaData> meta = getTrackMetaData(index); |
|
212 sp<android::MediaSource> source = new RtspMediaSource(mRtspResource, |
|
213 index, |
|
214 FRAME_DEFAULT_SIZE, |
|
215 meta); |
|
216 return source; |
|
217 } |
|
218 |
|
219 sp<MetaData> RtspExtractor::getTrackMetaData(size_t index, uint32_t flag) |
|
220 { |
|
221 NS_ENSURE_TRUE(index < countTracks(), nullptr);; |
|
222 |
|
223 sp<MetaData> meta = new MetaData(); |
|
224 nsCOMPtr<nsIStreamingProtocolMetaData> rtspMetadata; |
|
225 mController->GetTrackMetaData(index, getter_AddRefs(rtspMetadata)); |
|
226 |
|
227 if (rtspMetadata) { |
|
228 // Convert msMeta into meta. |
|
229 // The getter function of nsIStreamingProtocolMetaData will initialize the |
|
230 // metadata values to 0 before setting them. |
|
231 nsCString mime; |
|
232 rtspMetadata->GetMimeType(mime); |
|
233 meta->setCString(kKeyMIMEType, mime.get()); |
|
234 uint32_t temp32; |
|
235 rtspMetadata->GetWidth(&temp32); |
|
236 meta->setInt32(kKeyWidth, temp32); |
|
237 rtspMetadata->GetHeight(&temp32); |
|
238 meta->setInt32(kKeyHeight, temp32); |
|
239 rtspMetadata->GetSampleRate(&temp32); |
|
240 meta->setInt32(kKeySampleRate, temp32); |
|
241 rtspMetadata->GetChannelCount(&temp32); |
|
242 meta->setInt32(kKeyChannelCount, temp32); |
|
243 uint64_t temp64; |
|
244 rtspMetadata->GetDuration(&temp64); |
|
245 meta->setInt64(kKeyDuration, temp64); |
|
246 |
|
247 nsCString tempCString; |
|
248 rtspMetadata->GetEsdsData(tempCString); |
|
249 if (tempCString.Length()) { |
|
250 meta->setData(kKeyESDS, 0, tempCString.get(), tempCString.Length()); |
|
251 } |
|
252 rtspMetadata->GetAvccData(tempCString); |
|
253 if (tempCString.Length()) { |
|
254 meta->setData(kKeyAVCC, 0, tempCString.get(), tempCString.Length()); |
|
255 } |
|
256 } |
|
257 return meta; |
|
258 } |
|
259 |
|
260 uint32_t RtspExtractor::flags() const |
|
261 { |
|
262 if (mRtspResource->IsRealTime()) { |
|
263 return 0; |
|
264 } else { |
|
265 return MediaExtractor::CAN_SEEK; |
|
266 } |
|
267 } |
|
268 |
|
269 nsresult RtspOmxReader::InitOmxDecoder() |
|
270 { |
|
271 if (!mOmxDecoder.get()) { |
|
272 NS_ASSERTION(mDecoder, "RtspOmxReader mDecoder is null."); |
|
273 NS_ASSERTION(mDecoder->GetResource(), |
|
274 "RtspOmxReader mDecoder->GetResource() is null."); |
|
275 mExtractor = new RtspExtractor(mRtspResource); |
|
276 mOmxDecoder = new OmxDecoder(mDecoder->GetResource(), mDecoder); |
|
277 if (!mOmxDecoder->Init(mExtractor)) { |
|
278 return NS_ERROR_FAILURE; |
|
279 } |
|
280 } |
|
281 return NS_OK; |
|
282 } |
|
283 |
|
284 nsresult RtspOmxReader::Seek(int64_t aTime, int64_t aStartTime, |
|
285 int64_t aEndTime, int64_t aCurrentTime) |
|
286 { |
|
287 // The seek function of Rtsp is time-based, we call the SeekTime function in |
|
288 // RtspMediaResource. The SeekTime function finally send a seek command to |
|
289 // Rtsp stream server through network and also clear the buffer data in |
|
290 // RtspMediaResource. |
|
291 if (mRtspResource) { |
|
292 mRtspResource->SeekTime(aTime); |
|
293 } |
|
294 |
|
295 // Call |MediaOmxReader::Seek| to notify the OMX decoder we are performing a |
|
296 // seek operation. The function will clear the |mVideoQueue| and |mAudioQueue| |
|
297 // that store the decoded data and also call the |DecodeToTarget| to pass |
|
298 // the seek time to OMX a/v decoders. |
|
299 return MediaOmxReader::Seek(aTime, aStartTime, aEndTime, aCurrentTime); |
|
300 } |
|
301 |
|
302 nsresult |
|
303 RtspOmxReader::ReadMetadata(MediaInfo* aInfo, |
|
304 MetadataTags** aTags) |
|
305 { |
|
306 SetActive(); |
|
307 |
|
308 nsresult rv = MediaOmxReader::ReadMetadata(aInfo, aTags); |
|
309 NS_ENSURE_SUCCESS(rv, rv); |
|
310 |
|
311 return NS_OK; |
|
312 } |
|
313 |
|
314 void RtspOmxReader::SetIdle() { |
|
315 // Call parent class to set OMXCodec idle. |
|
316 MediaOmxReader::SetIdle(); |
|
317 |
|
318 // Need to pause RTSP streaming OMXCodec decoding. |
|
319 if (mRtspResource) { |
|
320 nsIStreamingProtocolController* controller = |
|
321 mRtspResource->GetMediaStreamController(); |
|
322 if (controller) { |
|
323 controller->Pause(); |
|
324 } |
|
325 } |
|
326 } |
|
327 |
|
328 void RtspOmxReader::SetActive() { |
|
329 // Need to start RTSP streaming OMXCodec decoding. |
|
330 if (mRtspResource) { |
|
331 nsIStreamingProtocolController* controller = |
|
332 mRtspResource->GetMediaStreamController(); |
|
333 if (controller) { |
|
334 controller->Play(); |
|
335 } |
|
336 } |
|
337 |
|
338 // Call parent class to set OMXCodec active. |
|
339 MediaOmxReader::SetActive(); |
|
340 } |
|
341 |
|
342 } // namespace mozilla |