|
1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ |
|
2 /* vim:set ts=2 sw=2 sts=2 et cindent: */ |
|
3 /* This Source Code Form is subject to the terms of the Mozilla Public |
|
4 * License, v. 2.0. If a copy of the MPL was not distributed with this file, |
|
5 * You can obtain one at http://mozilla.org/MPL/2.0/. */ |
|
6 |
|
7 #include <stagefright/ColorConverter.h> |
|
8 #include <stagefright/DataSource.h> |
|
9 #include <stagefright/MediaExtractor.h> |
|
10 #include <stagefright/MetaData.h> |
|
11 #include <stagefright/OMXCodec.h> |
|
12 #include <media/stagefright/MediaErrors.h> |
|
13 #ifdef MOZ_WIDGET_GONK |
|
14 #include <OMX.h> |
|
15 #else |
|
16 #include <stagefright/OMXClient.h> |
|
17 #endif |
|
18 #include <algorithm> |
|
19 |
|
20 #include "mozilla/Assertions.h" |
|
21 #include "mozilla/Types.h" |
|
22 #include "MPAPI.h" |
|
23 |
|
24 #include "android/log.h" |
|
25 |
|
26 #define MAX_DECODER_NAME_LEN 256 |
|
27 #define AVC_MIME_TYPE "video/avc" |
|
28 |
|
29 #if !defined(MOZ_ANDROID_FROYO) |
|
30 #define DEFAULT_STAGEFRIGHT_FLAGS OMXCodec::kClientNeedsFramebuffer |
|
31 #else |
|
32 #define DEFAULT_STAGEFRIGHT_FLAGS 0 |
|
33 #endif |
|
34 |
|
35 #undef LOG |
|
36 #define LOG(args...) __android_log_print(ANDROID_LOG_INFO, "OmxPlugin" , ## args) |
|
37 |
|
38 #if defined(MOZ_ANDROID_FROYO) || defined(MOZ_ANDROID_GB) |
|
39 // Android versions 2.x.x have common API differences |
|
40 #define MOZ_ANDROID_V2_X_X |
|
41 #endif |
|
42 |
|
43 #if !defined(MOZ_ANDROID_V2_X_X) && !defined(MOZ_ANDROID_HC) |
|
44 #define MOZ_ANDROID_V4_OR_ABOVE |
|
45 #endif |
|
46 |
|
47 #if defined(MOZ_ANDROID_V4_OR_ABOVE) |
|
48 #include <I420ColorConverter.h> |
|
49 #endif |
|
50 |
|
51 using namespace MPAPI; |
|
52 |
|
53 #if !defined(MOZ_STAGEFRIGHT_OFF_T) |
|
54 #define MOZ_STAGEFRIGHT_OFF_T off64_t |
|
55 #endif |
|
56 |
|
57 using namespace android; |
|
58 |
|
59 namespace OmxPlugin { |
|
60 |
|
61 const int OMX_QCOM_COLOR_FormatYVU420PackedSemiPlanar32m4ka = 0x7FA30C01; |
|
62 const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00; |
|
63 const int OMX_TI_COLOR_FormatYUV420PackedSemiPlanar = 0x7F000100; |
|
64 |
|
65 class OmxDecoder { |
|
66 PluginHost *mPluginHost; |
|
67 Decoder *mDecoder; |
|
68 sp<MediaSource> mVideoTrack; |
|
69 sp<MediaSource> mVideoSource; |
|
70 sp<MediaSource> mAudioTrack; |
|
71 sp<MediaSource> mAudioSource; |
|
72 int32_t mVideoWidth; |
|
73 int32_t mVideoHeight; |
|
74 int32_t mVideoColorFormat; |
|
75 int32_t mVideoStride; |
|
76 int32_t mVideoSliceHeight; |
|
77 int32_t mVideoCropLeft; |
|
78 int32_t mVideoCropTop; |
|
79 int32_t mVideoCropRight; |
|
80 int32_t mVideoCropBottom; |
|
81 int32_t mVideoRotation; |
|
82 int32_t mAudioChannels; |
|
83 int32_t mAudioSampleRate; |
|
84 int64_t mDurationUs; |
|
85 MediaBuffer *mVideoBuffer; |
|
86 VideoFrame mVideoFrame; |
|
87 MediaBuffer *mAudioBuffer; |
|
88 AudioFrame mAudioFrame; |
|
89 ColorConverter *mColorConverter; |
|
90 |
|
91 // 'true' if a read from the audio stream was done while reading the metadata |
|
92 bool mAudioMetadataRead; |
|
93 |
|
94 void ReleaseVideoBuffer(); |
|
95 void ReleaseAudioBuffer(); |
|
96 |
|
97 void ToVideoFrame_YUV420Planar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); |
|
98 void ToVideoFrame_CbYCrY(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); |
|
99 void ToVideoFrame_YUV420SemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); |
|
100 void ToVideoFrame_YVU420SemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); |
|
101 void ToVideoFrame_YUV420PackedSemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); |
|
102 void ToVideoFrame_YVU420PackedSemiPlanar32m4ka(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame); |
|
103 bool ToVideoFrame_RGB565(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback); |
|
104 bool ToVideoFrame_ColorConverter(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback); |
|
105 bool ToVideoFrame_I420ColorConverter(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback); |
|
106 bool ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback); |
|
107 bool ToAudioFrame(AudioFrame *aFrame, int64_t aTimeUs, void *aData, size_t aDataOffset, size_t aSize, |
|
108 int32_t aAudioChannels, int32_t aAudioSampleRate); |
|
109 public: |
|
110 OmxDecoder(PluginHost *aPluginHost, Decoder *aDecoder); |
|
111 ~OmxDecoder(); |
|
112 |
|
113 bool Init(); |
|
114 bool SetVideoFormat(); |
|
115 bool SetAudioFormat(); |
|
116 |
|
117 void GetDuration(int64_t *durationUs) { |
|
118 *durationUs = mDurationUs; |
|
119 } |
|
120 |
|
121 void GetVideoParameters(int32_t *width, int32_t *height) { |
|
122 *width = mVideoWidth; |
|
123 *height = mVideoHeight; |
|
124 } |
|
125 |
|
126 void GetAudioParameters(int32_t *numChannels, int32_t *sampleRate) { |
|
127 *numChannels = mAudioChannels; |
|
128 *sampleRate = mAudioSampleRate; |
|
129 } |
|
130 |
|
131 bool HasVideo() { |
|
132 return mVideoSource != nullptr; |
|
133 } |
|
134 |
|
135 bool HasAudio() { |
|
136 return mAudioSource != nullptr; |
|
137 } |
|
138 |
|
139 bool ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs, BufferCallback *aBufferCallback); |
|
140 bool ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs); |
|
141 }; |
|
142 |
|
143 #if !defined(MOZ_WIDGET_GONK) |
|
144 static class OmxClientInstance { |
|
145 public: |
|
146 OmxClientInstance() |
|
147 : mClient(new OMXClient()) |
|
148 , mStatus(mClient->connect()) |
|
149 { |
|
150 } |
|
151 |
|
152 status_t IsValid() |
|
153 { |
|
154 return mStatus == OK; |
|
155 } |
|
156 |
|
157 OMXClient *get() |
|
158 { |
|
159 return mClient; |
|
160 } |
|
161 |
|
162 ~OmxClientInstance() |
|
163 { |
|
164 if (mStatus == OK) { |
|
165 mClient->disconnect(); |
|
166 } |
|
167 delete mClient; |
|
168 } |
|
169 |
|
170 private: |
|
171 OMXClient *mClient; |
|
172 status_t mStatus; |
|
173 } sClientInstance; |
|
174 #endif |
|
175 |
|
176 OmxDecoder::OmxDecoder(PluginHost *aPluginHost, Decoder *aDecoder) : |
|
177 mPluginHost(aPluginHost), |
|
178 mDecoder(aDecoder), |
|
179 mVideoWidth(0), |
|
180 mVideoHeight(0), |
|
181 mVideoColorFormat(0), |
|
182 mVideoStride(0), |
|
183 mVideoSliceHeight(0), |
|
184 mVideoCropLeft(0), |
|
185 mVideoCropTop(0), |
|
186 mVideoCropRight(0), |
|
187 mVideoCropBottom(0), |
|
188 mVideoRotation(0), |
|
189 mAudioChannels(-1), |
|
190 mAudioSampleRate(-1), |
|
191 mDurationUs(-1), |
|
192 mVideoBuffer(nullptr), |
|
193 mAudioBuffer(nullptr), |
|
194 mColorConverter(nullptr), |
|
195 mAudioMetadataRead(false) |
|
196 { |
|
197 } |
|
198 |
|
199 OmxDecoder::~OmxDecoder() |
|
200 { |
|
201 ReleaseVideoBuffer(); |
|
202 ReleaseAudioBuffer(); |
|
203 |
|
204 if (mVideoSource.get()) { |
|
205 mVideoSource->stop(); |
|
206 } |
|
207 |
|
208 if (mAudioSource.get()) { |
|
209 mAudioSource->stop(); |
|
210 } |
|
211 |
|
212 #ifndef MOZ_ANDROID_HC |
|
213 if (mColorConverter) { |
|
214 delete mColorConverter; |
|
215 } |
|
216 #endif |
|
217 } |
|
218 |
|
219 class AutoStopMediaSource { |
|
220 sp<MediaSource> mMediaSource; |
|
221 public: |
|
222 AutoStopMediaSource(sp<MediaSource> aMediaSource) : mMediaSource(aMediaSource) { |
|
223 } |
|
224 |
|
225 ~AutoStopMediaSource() { |
|
226 mMediaSource->stop(); |
|
227 } |
|
228 }; |
|
229 |
|
230 #ifdef MOZ_WIDGET_GONK |
|
231 static sp<IOMX> sOMX = nullptr; |
|
232 static sp<IOMX> GetOMX() { |
|
233 if(sOMX.get() == nullptr) { |
|
234 sOMX = reinterpret_cast<IOMX*>(new OMX); |
|
235 } |
|
236 return sOMX; |
|
237 } |
|
238 #endif |
|
239 |
|
240 static uint32_t |
|
241 GetDefaultStagefrightFlags(PluginHost *aPluginHost) |
|
242 { |
|
243 uint32_t flags = DEFAULT_STAGEFRIGHT_FLAGS; |
|
244 |
|
245 #if !defined(MOZ_ANDROID_FROYO) |
|
246 |
|
247 char hardware[256] = ""; |
|
248 aPluginHost->GetSystemInfoString("hardware", hardware, sizeof(hardware)); |
|
249 |
|
250 if (!strcmp("qcom", hardware)) { |
|
251 // Qualcomm's OMXCodec implementation interprets this flag to mean that we |
|
252 // only want a thumbnail and therefore only need one frame. After the first |
|
253 // frame it returns EOS. |
|
254 // All other OMXCodec implementations seen so far interpret this flag |
|
255 // sanely; some do not return full framebuffers unless this flag is passed. |
|
256 flags &= ~OMXCodec::kClientNeedsFramebuffer; |
|
257 } |
|
258 |
|
259 LOG("Hardware %s; using default flags %#x\n", hardware, flags); |
|
260 |
|
261 #endif |
|
262 |
|
263 return flags; |
|
264 } |
|
265 |
|
266 static uint32_t GetVideoCreationFlags(PluginHost* aPluginHost) |
|
267 { |
|
268 #ifdef MOZ_WIDGET_GONK |
|
269 // Flag value of zero means return a hardware or software decoder |
|
270 // depending on what the device supports. |
|
271 return 0; |
|
272 #else |
|
273 // Check whether the user has set a pref to override our default OMXCodec |
|
274 // CreationFlags flags. This is useful for A/B testing hardware and software |
|
275 // decoders for performance and bugs. The interesting flag values are: |
|
276 // 0 = Let Stagefright choose hardware or software decoding (default) |
|
277 // 8 = Force software decoding |
|
278 // 16 = Force hardware decoding |
|
279 int32_t flags = 0; |
|
280 aPluginHost->GetIntPref("media.stagefright.omxcodec.flags", &flags); |
|
281 if (flags != 0) { |
|
282 #if !defined(MOZ_ANDROID_V2_X_X) |
|
283 LOG("media.stagefright.omxcodec.flags=%d", flags); |
|
284 if ((flags & OMXCodec::kHardwareCodecsOnly) != 0) { |
|
285 LOG("FORCE HARDWARE DECODING"); |
|
286 } else if ((flags & OMXCodec::kSoftwareCodecsOnly) != 0) { |
|
287 LOG("FORCE SOFTWARE DECODING"); |
|
288 } |
|
289 #endif |
|
290 } |
|
291 |
|
292 flags |= GetDefaultStagefrightFlags(aPluginHost); |
|
293 |
|
294 return static_cast<uint32_t>(flags); |
|
295 #endif |
|
296 } |
|
297 |
|
298 enum ColorFormatSupport { |
|
299 ColorFormatNotSupported = 0, |
|
300 ColorFormatSupportOK, |
|
301 ColorFormatSupportPreferred, |
|
302 }; |
|
303 |
|
304 static ColorFormatSupport |
|
305 IsColorFormatSupported(OMX_COLOR_FORMATTYPE aColorFormat) |
|
306 { |
|
307 switch (static_cast<int>(aColorFormat)) { |
|
308 case OMX_COLOR_FormatCbYCrY: |
|
309 case OMX_COLOR_FormatYUV420Planar: |
|
310 case OMX_COLOR_FormatYUV420SemiPlanar: |
|
311 case OMX_QCOM_COLOR_FormatYVU420PackedSemiPlanar32m4ka: |
|
312 case OMX_QCOM_COLOR_FormatYVU420SemiPlanar: |
|
313 case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar: |
|
314 LOG("Colour format %#x supported natively.", aColorFormat); |
|
315 // Prefer natively supported colour formats over formats that need another |
|
316 // slow software conversion. |
|
317 return ColorFormatSupportPreferred; |
|
318 default: |
|
319 break; |
|
320 } |
|
321 |
|
322 // These formats are okay if we can't find a better one; Android provides a |
|
323 // software conversion to a sane colour format. |
|
324 #if !defined(MOZ_ANDROID_HC) |
|
325 if (ColorConverter(aColorFormat, OMX_COLOR_Format16bitRGB565).isValid()) { |
|
326 LOG("Colour format %#x supported by Android ColorConverter.", aColorFormat); |
|
327 return ColorFormatSupportOK; |
|
328 } |
|
329 #endif |
|
330 |
|
331 #if defined(MOZ_ANDROID_V4_OR_ABOVE) |
|
332 I420ColorConverter yuvConverter; |
|
333 |
|
334 if (yuvConverter.isLoaded() && |
|
335 yuvConverter.getDecoderOutputFormat() == aColorFormat) { |
|
336 LOG("Colour format %#x supported by Android I420ColorConverter.", aColorFormat); |
|
337 return ColorFormatSupportOK; |
|
338 } |
|
339 #endif |
|
340 |
|
341 return ColorFormatNotSupported; |
|
342 } |
|
343 |
|
344 #if defined(MOZ_ANDROID_KK) |
|
345 /** |
|
346 * Look for a decoder that supports a colour format that we support. |
|
347 */ |
|
348 static bool |
|
349 FindPreferredDecoderAndColorFormat(const sp<IOMX>& aOmx, |
|
350 char *aDecoderName, |
|
351 size_t aDecoderLen, |
|
352 OMX_COLOR_FORMATTYPE *aColorFormat) |
|
353 { |
|
354 Vector<CodecCapabilities> codecs; |
|
355 |
|
356 // Get all AVC decoder/colour format pairs that this device supports. |
|
357 QueryCodecs(aOmx, AVC_MIME_TYPE, true /* queryDecoders */, &codecs); |
|
358 |
|
359 // We assume that faster (hardware accelerated) decoders come first in the |
|
360 // list, so we choose the first decoder with a colour format we can use. |
|
361 for (uint32_t i = 0; i < codecs.size(); i++) { |
|
362 const CodecCapabilities &caps = codecs[i]; |
|
363 const Vector<OMX_U32> &colors = caps.mColorFormats; |
|
364 |
|
365 bool found = false; |
|
366 for (uint32_t j = 0; j < colors.size(); j++) { |
|
367 OMX_COLOR_FORMATTYPE color = (OMX_COLOR_FORMATTYPE)colors[j]; |
|
368 |
|
369 LOG("Decoder %s can output colour format %#x.\n", |
|
370 caps.mComponentName.string(), color); |
|
371 |
|
372 ColorFormatSupport supported = IsColorFormatSupported(color); |
|
373 |
|
374 if (supported) { |
|
375 strncpy(aDecoderName, caps.mComponentName.string(), aDecoderLen); |
|
376 *aColorFormat = color; |
|
377 found = true; |
|
378 } |
|
379 |
|
380 if (supported == ColorFormatSupportPreferred) { |
|
381 // The colour format is natively supported -- that's as good as we're |
|
382 // going to get. |
|
383 break; |
|
384 } |
|
385 } |
|
386 |
|
387 if (found) { |
|
388 return true; |
|
389 } |
|
390 } |
|
391 |
|
392 return false; |
|
393 } |
|
394 #endif |
|
395 |
|
396 static sp<MediaSource> CreateVideoSource(PluginHost* aPluginHost, |
|
397 const sp<IOMX>& aOmx, |
|
398 const sp<MediaSource>& aVideoTrack) |
|
399 { |
|
400 uint32_t flags = GetVideoCreationFlags(aPluginHost); |
|
401 |
|
402 char decoderName[MAX_DECODER_NAME_LEN] = ""; |
|
403 sp<MetaData> videoFormat = aVideoTrack->getFormat(); |
|
404 |
|
405 #if defined(MOZ_ANDROID_KK) |
|
406 OMX_COLOR_FORMATTYPE colorFormat = (OMX_COLOR_FORMATTYPE)0; |
|
407 if (FindPreferredDecoderAndColorFormat(aOmx, |
|
408 decoderName, sizeof(decoderName), |
|
409 &colorFormat)) { |
|
410 // We found a colour format that we can handle. Tell OMXCodec to use it in |
|
411 // case it isn't the default. |
|
412 videoFormat->setInt32(kKeyColorFormat, colorFormat); |
|
413 |
|
414 LOG("Found compatible decoder %s with colour format %#x.\n", |
|
415 decoderName, colorFormat); |
|
416 } |
|
417 #endif |
|
418 |
|
419 if (flags == DEFAULT_STAGEFRIGHT_FLAGS) { |
|
420 // Let Stagefright choose hardware or software decoder. |
|
421 sp<MediaSource> videoSource = OMXCodec::Create(aOmx, videoFormat, |
|
422 false, aVideoTrack, |
|
423 decoderName[0] ? decoderName : nullptr, |
|
424 flags); |
|
425 if (videoSource == nullptr) |
|
426 return nullptr; |
|
427 |
|
428 // Now that OMXCodec has parsed the video's AVCDecoderConfigurationRecord, |
|
429 // check whether we know how to decode this video. |
|
430 int32_t videoColorFormat; |
|
431 if (videoSource->getFormat()->findInt32(kKeyColorFormat, &videoColorFormat)) { |
|
432 |
|
433 if (IsColorFormatSupported((OMX_COLOR_FORMATTYPE)videoColorFormat)) { |
|
434 return videoSource; |
|
435 } |
|
436 |
|
437 // We need to implement a ToVideoFrame_*() color conversion |
|
438 // function for this video color format. |
|
439 LOG("Unknown video color format: %#x", videoColorFormat); |
|
440 } else { |
|
441 LOG("Video color format not found"); |
|
442 } |
|
443 |
|
444 // Throw away the videoSource and try again with new flags. |
|
445 LOG("Falling back to software decoder"); |
|
446 videoSource.clear(); |
|
447 #if defined(MOZ_ANDROID_V2_X_X) |
|
448 flags = DEFAULT_STAGEFRIGHT_FLAGS | OMXCodec::kPreferSoftwareCodecs; |
|
449 #else |
|
450 flags = DEFAULT_STAGEFRIGHT_FLAGS | OMXCodec::kSoftwareCodecsOnly; |
|
451 #endif |
|
452 } |
|
453 |
|
454 MOZ_ASSERT(flags != DEFAULT_STAGEFRIGHT_FLAGS); |
|
455 return OMXCodec::Create(aOmx, aVideoTrack->getFormat(), false, aVideoTrack, |
|
456 nullptr, flags); |
|
457 } |
|
458 |
|
459 bool OmxDecoder::Init() |
|
460 { |
|
461 #if defined(MOZ_WIDGET_ANDROID) |
|
462 // OMXClient::connect() always returns OK and aborts fatally if |
|
463 // it can't connect. We may need to implement the connect functionality |
|
464 // ourselves if this proves to be an issue. |
|
465 if (!sClientInstance.IsValid()) { |
|
466 LOG("OMXClient failed to connect"); |
|
467 return false; |
|
468 } |
|
469 #endif |
|
470 |
|
471 //register sniffers, if they are not registered in this process. |
|
472 DataSource::RegisterDefaultSniffers(); |
|
473 |
|
474 sp<DataSource> dataSource = |
|
475 DataSource::CreateFromURI(static_cast<char*>(mDecoder->mResource)); |
|
476 if (!dataSource.get() || dataSource->initCheck()) { |
|
477 return false; |
|
478 } |
|
479 |
|
480 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); |
|
481 if (extractor == nullptr) { |
|
482 return false; |
|
483 } |
|
484 |
|
485 ssize_t audioTrackIndex = -1; |
|
486 ssize_t videoTrackIndex = -1; |
|
487 const char *audioMime = nullptr; |
|
488 const char *videoMime = nullptr; |
|
489 |
|
490 for (size_t i = 0; i < extractor->countTracks(); ++i) { |
|
491 sp<MetaData> meta = extractor->getTrackMetaData(i); |
|
492 |
|
493 const char *mime; |
|
494 if (!meta->findCString(kKeyMIMEType, &mime)) { |
|
495 continue; |
|
496 } |
|
497 |
|
498 if (videoTrackIndex == -1 && !strncasecmp(mime, "video/", 6)) { |
|
499 videoTrackIndex = i; |
|
500 videoMime = mime; |
|
501 } else if (audioTrackIndex == -1 && !strncasecmp(mime, "audio/", 6)) { |
|
502 audioTrackIndex = i; |
|
503 audioMime = mime; |
|
504 } |
|
505 } |
|
506 |
|
507 if (videoTrackIndex == -1 && audioTrackIndex == -1) { |
|
508 return false; |
|
509 } |
|
510 |
|
511 int64_t totalDurationUs = 0; |
|
512 |
|
513 #ifdef MOZ_WIDGET_GONK |
|
514 sp<IOMX> omx = GetOMX(); |
|
515 #else |
|
516 sp<IOMX> omx = sClientInstance.get()->interface(); |
|
517 #endif |
|
518 |
|
519 sp<MediaSource> videoTrack; |
|
520 sp<MediaSource> videoSource; |
|
521 if (videoTrackIndex != -1 && (videoTrack = extractor->getTrack(videoTrackIndex)) != nullptr) { |
|
522 #if defined(MOZ_ANDROID_FROYO) |
|
523 // Allow up to 720P video. |
|
524 sp<MetaData> meta = extractor->getTrackMetaData(videoTrackIndex); |
|
525 meta->setInt32(kKeyMaxInputSize, (1280 * 720 * 3) / 2); |
|
526 #endif |
|
527 videoSource = CreateVideoSource(mPluginHost, omx, videoTrack); |
|
528 if (videoSource == nullptr) { |
|
529 LOG("OMXCodec failed to initialize video decoder for \"%s\"", videoMime); |
|
530 return false; |
|
531 } |
|
532 status_t status = videoSource->start(); |
|
533 if (status != OK) { |
|
534 LOG("videoSource->start() failed with status %#x", status); |
|
535 return false; |
|
536 } |
|
537 int64_t durationUs; |
|
538 if (videoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { |
|
539 if (durationUs < 0) |
|
540 LOG("video duration %lld should be nonnegative", durationUs); |
|
541 if (durationUs > totalDurationUs) |
|
542 totalDurationUs = durationUs; |
|
543 } |
|
544 } |
|
545 |
|
546 sp<MediaSource> audioTrack; |
|
547 sp<MediaSource> audioSource; |
|
548 if (audioTrackIndex != -1 && (audioTrack = extractor->getTrack(audioTrackIndex)) != nullptr) |
|
549 { |
|
550 if (!strcasecmp(audioMime, "audio/raw")) { |
|
551 audioSource = audioTrack; |
|
552 } else { |
|
553 audioSource = OMXCodec::Create(omx, |
|
554 audioTrack->getFormat(), |
|
555 false, // decoder |
|
556 audioTrack); |
|
557 } |
|
558 |
|
559 if (audioSource == nullptr) { |
|
560 LOG("OMXCodec failed to initialize audio decoder for \"%s\"", audioMime); |
|
561 return false; |
|
562 } |
|
563 |
|
564 status_t status = audioSource->start(); |
|
565 if (status != OK) { |
|
566 LOG("audioSource->start() failed with status %#x", status); |
|
567 return false; |
|
568 } |
|
569 |
|
570 int64_t durationUs; |
|
571 if (audioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { |
|
572 if (durationUs < 0) |
|
573 LOG("audio duration %lld should be nonnegative", durationUs); |
|
574 if (durationUs > totalDurationUs) |
|
575 totalDurationUs = durationUs; |
|
576 } |
|
577 } |
|
578 |
|
579 // set decoder state |
|
580 mVideoTrack = videoTrack; |
|
581 mVideoSource = videoSource; |
|
582 mAudioTrack = audioTrack; |
|
583 mAudioSource = audioSource; |
|
584 mDurationUs = totalDurationUs; |
|
585 |
|
586 if (mVideoSource.get() && !SetVideoFormat()) |
|
587 return false; |
|
588 |
|
589 // To reliably get the channel and sample rate data we need to read from the |
|
590 // audio source until we get a INFO_FORMAT_CHANGE status |
|
591 if (mAudioSource.get()) { |
|
592 if (mAudioSource->read(&mAudioBuffer) != INFO_FORMAT_CHANGED) { |
|
593 sp<MetaData> meta = mAudioSource->getFormat(); |
|
594 if (!meta->findInt32(kKeyChannelCount, &mAudioChannels) || |
|
595 !meta->findInt32(kKeySampleRate, &mAudioSampleRate)) { |
|
596 return false; |
|
597 } |
|
598 mAudioMetadataRead = true; |
|
599 |
|
600 if (mAudioChannels < 0) { |
|
601 LOG("audio channel count %d must be nonnegative", mAudioChannels); |
|
602 return false; |
|
603 } |
|
604 |
|
605 if (mAudioSampleRate < 0) { |
|
606 LOG("audio sample rate %d must be nonnegative", mAudioSampleRate); |
|
607 return false; |
|
608 } |
|
609 } |
|
610 else if (!SetAudioFormat()) { |
|
611 return false; |
|
612 } |
|
613 } |
|
614 return true; |
|
615 } |
|
616 |
|
617 bool OmxDecoder::SetVideoFormat() { |
|
618 sp<MetaData> format = mVideoSource->getFormat(); |
|
619 |
|
620 // Stagefright's kKeyWidth and kKeyHeight are what MPAPI calls stride and |
|
621 // slice height. Stagefright only seems to use its kKeyStride and |
|
622 // kKeySliceHeight to initialize camera video formats. |
|
623 |
|
624 #if defined(DEBUG) && !defined(MOZ_ANDROID_FROYO) |
|
625 int32_t unexpected; |
|
626 if (format->findInt32(kKeyStride, &unexpected)) |
|
627 LOG("Expected kKeyWidth, but found kKeyStride %d", unexpected); |
|
628 if (format->findInt32(kKeySliceHeight, &unexpected)) |
|
629 LOG("Expected kKeyHeight, but found kKeySliceHeight %d", unexpected); |
|
630 #endif // DEBUG |
|
631 |
|
632 const char *componentName; |
|
633 |
|
634 if (!format->findInt32(kKeyWidth, &mVideoStride) || |
|
635 !format->findInt32(kKeyHeight, &mVideoSliceHeight) || |
|
636 !format->findCString(kKeyDecoderComponent, &componentName) || |
|
637 !format->findInt32(kKeyColorFormat, &mVideoColorFormat) ) { |
|
638 return false; |
|
639 } |
|
640 |
|
641 if (mVideoStride <= 0) { |
|
642 LOG("stride %d must be positive", mVideoStride); |
|
643 return false; |
|
644 } |
|
645 |
|
646 if (mVideoSliceHeight <= 0) { |
|
647 LOG("slice height %d must be positive", mVideoSliceHeight); |
|
648 return false; |
|
649 } |
|
650 |
|
651 // Gingerbread does not support the kKeyCropRect key |
|
652 #if !defined(MOZ_ANDROID_V2_X_X) |
|
653 if (!format->findRect(kKeyCropRect, &mVideoCropLeft, &mVideoCropTop, |
|
654 &mVideoCropRight, &mVideoCropBottom)) { |
|
655 #endif |
|
656 mVideoCropLeft = 0; |
|
657 mVideoCropTop = 0; |
|
658 mVideoCropRight = mVideoStride - 1; |
|
659 mVideoCropBottom = mVideoSliceHeight - 1; |
|
660 LOG("crop rect not available, assuming no cropping"); |
|
661 #if !defined(MOZ_ANDROID_V2_X_X) |
|
662 } |
|
663 #endif |
|
664 |
|
665 if (mVideoCropLeft < 0 || mVideoCropLeft >= mVideoCropRight || mVideoCropRight >= mVideoStride || |
|
666 mVideoCropTop < 0 || mVideoCropTop >= mVideoCropBottom || mVideoCropBottom >= mVideoSliceHeight) { |
|
667 LOG("invalid crop rect %d,%d-%d,%d", mVideoCropLeft, mVideoCropTop, mVideoCropRight, mVideoCropBottom); |
|
668 return false; |
|
669 } |
|
670 |
|
671 mVideoWidth = mVideoCropRight - mVideoCropLeft + 1; |
|
672 mVideoHeight = mVideoCropBottom - mVideoCropTop + 1; |
|
673 MOZ_ASSERT(mVideoWidth > 0 && mVideoWidth <= mVideoStride); |
|
674 MOZ_ASSERT(mVideoHeight > 0 && mVideoHeight <= mVideoSliceHeight); |
|
675 |
|
676 #if !defined(MOZ_ANDROID_FROYO) |
|
677 if (!format->findInt32(kKeyRotation, &mVideoRotation)) { |
|
678 #endif |
|
679 mVideoRotation = 0; |
|
680 #if !defined(MOZ_ANDROID_FROYO) |
|
681 LOG("rotation not available, assuming 0"); |
|
682 } |
|
683 #endif |
|
684 |
|
685 if (mVideoRotation != 0 && mVideoRotation != 90 && |
|
686 mVideoRotation != 180 && mVideoRotation != 270) { |
|
687 LOG("invalid rotation %d, assuming 0", mVideoRotation); |
|
688 } |
|
689 |
|
690 LOG("width: %d height: %d component: %s format: %#x stride: %d sliceHeight: %d rotation: %d crop: %d,%d-%d,%d", |
|
691 mVideoWidth, mVideoHeight, componentName, mVideoColorFormat, |
|
692 mVideoStride, mVideoSliceHeight, mVideoRotation, |
|
693 mVideoCropLeft, mVideoCropTop, mVideoCropRight, mVideoCropBottom); |
|
694 |
|
695 return true; |
|
696 } |
|
697 |
|
698 bool OmxDecoder::SetAudioFormat() { |
|
699 // If the format changed, update our cached info. |
|
700 if (!mAudioSource->getFormat()->findInt32(kKeyChannelCount, &mAudioChannels) || |
|
701 !mAudioSource->getFormat()->findInt32(kKeySampleRate, &mAudioSampleRate)) { |
|
702 return false; |
|
703 } |
|
704 |
|
705 LOG("channelCount: %d sampleRate: %d", mAudioChannels, mAudioSampleRate); |
|
706 |
|
707 if (mAudioChannels < 0) { |
|
708 LOG("audio channel count %d must be nonnegative", mAudioChannels); |
|
709 return false; |
|
710 } |
|
711 |
|
712 if (mAudioSampleRate < 0) { |
|
713 LOG("audio sample rate %d must be nonnegative", mAudioSampleRate); |
|
714 return false; |
|
715 } |
|
716 |
|
717 return true; |
|
718 } |
|
719 |
|
720 void OmxDecoder::ReleaseVideoBuffer() { |
|
721 if (mVideoBuffer) { |
|
722 mVideoBuffer->release(); |
|
723 mVideoBuffer = nullptr; |
|
724 } |
|
725 } |
|
726 |
|
727 void OmxDecoder::ReleaseAudioBuffer() { |
|
728 if (mAudioBuffer) { |
|
729 mAudioBuffer->release(); |
|
730 mAudioBuffer = nullptr; |
|
731 } |
|
732 } |
|
733 |
|
734 void OmxDecoder::ToVideoFrame_YUV420Planar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) { |
|
735 void *y = aData; |
|
736 void *u = static_cast<uint8_t *>(y) + mVideoStride * mVideoSliceHeight; |
|
737 void *v = static_cast<uint8_t *>(u) + mVideoStride/2 * mVideoSliceHeight/2; |
|
738 aFrame->Set(aTimeUs, aKeyFrame, |
|
739 aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation, |
|
740 y, mVideoStride, mVideoWidth, mVideoHeight, 0, 0, |
|
741 u, mVideoStride/2, mVideoWidth/2, mVideoHeight/2, 0, 0, |
|
742 v, mVideoStride/2, mVideoWidth/2, mVideoHeight/2, 0, 0); |
|
743 } |
|
744 |
|
745 void OmxDecoder::ToVideoFrame_CbYCrY(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) { |
|
746 aFrame->Set(aTimeUs, aKeyFrame, |
|
747 aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation, |
|
748 aData, mVideoStride, mVideoWidth, mVideoHeight, 1, 1, |
|
749 aData, mVideoStride, mVideoWidth/2, mVideoHeight/2, 0, 3, |
|
750 aData, mVideoStride, mVideoWidth/2, mVideoHeight/2, 2, 3); |
|
751 } |
|
752 |
|
753 void OmxDecoder::ToVideoFrame_YUV420SemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) { |
|
754 int32_t videoStride = mVideoStride; |
|
755 int32_t videoSliceHeight = mVideoSliceHeight; |
|
756 |
|
757 // OMX.SEC.avcdec rounds mVideoStride and mVideoSliceHeight up to the nearest |
|
758 // multiple of 16 but the data itself is too small to fit. What we do is check |
|
759 // to see if the video size patches the raw width and height. If so we can |
|
760 // use those figures instead. |
|
761 |
|
762 if (static_cast<int>(aSize) == mVideoWidth * mVideoHeight * 3 / 2) { |
|
763 videoStride = mVideoWidth; |
|
764 videoSliceHeight = mVideoHeight; |
|
765 } |
|
766 |
|
767 void *y = aData; |
|
768 void *uv = static_cast<uint8_t *>(y) + (videoStride * videoSliceHeight); |
|
769 aFrame->Set(aTimeUs, aKeyFrame, |
|
770 aData, aSize, videoStride, videoSliceHeight, mVideoRotation, |
|
771 y, videoStride, mVideoWidth, mVideoHeight, 0, 0, |
|
772 uv, videoStride, mVideoWidth/2, mVideoHeight/2, 0, 1, |
|
773 uv, videoStride, mVideoWidth/2, mVideoHeight/2, 1, 1); |
|
774 } |
|
775 |
|
776 void OmxDecoder::ToVideoFrame_YVU420SemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) { |
|
777 ToVideoFrame_YUV420SemiPlanar(aFrame, aTimeUs, aData, aSize, aKeyFrame); |
|
778 aFrame->Cb.mOffset = 1; |
|
779 aFrame->Cr.mOffset = 0; |
|
780 } |
|
781 |
|
782 void OmxDecoder::ToVideoFrame_YUV420PackedSemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) { |
|
783 void *y = aData; |
|
784 void *uv = static_cast<uint8_t *>(y) + mVideoStride * (mVideoSliceHeight - mVideoCropTop/2); |
|
785 aFrame->Set(aTimeUs, aKeyFrame, |
|
786 aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation, |
|
787 y, mVideoStride, mVideoWidth, mVideoHeight, 0, 0, |
|
788 uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 0, 1, |
|
789 uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 1, 1); |
|
790 } |
|
791 |
|
792 void OmxDecoder::ToVideoFrame_YVU420PackedSemiPlanar32m4ka(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) { |
|
793 size_t roundedSliceHeight = (mVideoSliceHeight + 31) & ~31; |
|
794 size_t roundedStride = (mVideoStride + 31) & ~31; |
|
795 void *y = aData; |
|
796 void *uv = static_cast<uint8_t *>(y) + (roundedStride * roundedSliceHeight); |
|
797 aFrame->Set(aTimeUs, aKeyFrame, |
|
798 aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation, |
|
799 y, mVideoStride, mVideoWidth, mVideoHeight, 0, 0, |
|
800 uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 1, 1, |
|
801 uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 0, 1); |
|
802 } |
|
803 |
|
804 bool OmxDecoder::ToVideoFrame_RGB565(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback) { |
|
805 void *buffer = (*aBufferCallback)(mVideoWidth, mVideoHeight, MPAPI::RGB565); |
|
806 |
|
807 if (!buffer) { |
|
808 return false; |
|
809 } |
|
810 |
|
811 aFrame->mTimeUs = aTimeUs; |
|
812 |
|
813 memcpy(buffer, aData, mVideoWidth * mVideoHeight * 2); |
|
814 |
|
815 aFrame->mSize = mVideoWidth * mVideoHeight * 2; |
|
816 |
|
817 return true; |
|
818 } |
|
819 |
|
820 bool OmxDecoder::ToVideoFrame_ColorConverter(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback) { |
|
821 #ifdef MOZ_ANDROID_HC |
|
822 return false; |
|
823 #else |
|
824 if (!mColorConverter) { |
|
825 mColorConverter = new ColorConverter((OMX_COLOR_FORMATTYPE)mVideoColorFormat, |
|
826 OMX_COLOR_Format16bitRGB565); |
|
827 } |
|
828 |
|
829 if (!mColorConverter->isValid()) { |
|
830 return false; |
|
831 } |
|
832 |
|
833 aFrame->mTimeUs = aTimeUs; |
|
834 |
|
835 void *buffer = (*aBufferCallback)(mVideoWidth, mVideoHeight, MPAPI::RGB565); |
|
836 |
|
837 if (!buffer) { |
|
838 return false; |
|
839 } |
|
840 |
|
841 aFrame->mSize = mVideoWidth * mVideoHeight * 2; |
|
842 |
|
843 #if defined(MOZ_ANDROID_V2_X_X) |
|
844 mColorConverter->convert(mVideoWidth, mVideoHeight, |
|
845 aData, 0 /* srcSkip */, |
|
846 buffer, mVideoWidth * 2); |
|
847 #else |
|
848 mColorConverter->convert(aData, mVideoStride, mVideoSliceHeight, |
|
849 mVideoCropLeft, mVideoCropTop, |
|
850 mVideoCropLeft + mVideoWidth - 1, |
|
851 mVideoCropTop + mVideoHeight - 1, |
|
852 buffer, mVideoWidth, mVideoHeight, |
|
853 0, 0, mVideoWidth - 1, mVideoHeight - 1); |
|
854 #endif |
|
855 |
|
856 return true; |
|
857 #endif |
|
858 } |
|
859 |
|
860 bool OmxDecoder::ToVideoFrame_I420ColorConverter(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback) |
|
861 { |
|
862 #if defined(MOZ_ANDROID_V4_OR_ABOVE) |
|
863 I420ColorConverter yuvConverter; |
|
864 |
|
865 if (!yuvConverter.isLoaded()) { |
|
866 return false; |
|
867 } |
|
868 |
|
869 if (yuvConverter.getDecoderOutputFormat() != mVideoColorFormat) { |
|
870 return false; |
|
871 } |
|
872 |
|
873 void *buffer = (*aBufferCallback)(mVideoWidth, mVideoHeight, MPAPI::I420); |
|
874 |
|
875 ARect crop = { mVideoCropLeft, mVideoCropTop, mVideoCropRight, mVideoCropBottom }; |
|
876 int result = yuvConverter.convertDecoderOutputToI420(aData, |
|
877 mVideoWidth, |
|
878 mVideoHeight, |
|
879 crop, |
|
880 buffer); |
|
881 |
|
882 // result is 0 on success, -1 otherwise. |
|
883 if (result == OK) { |
|
884 aFrame->mTimeUs = aTimeUs; |
|
885 aFrame->mSize = mVideoWidth * mVideoHeight * 3 / 2; |
|
886 } |
|
887 |
|
888 return result == OK; |
|
889 #else |
|
890 return false; |
|
891 #endif |
|
892 } |
|
893 |
|
894 bool OmxDecoder::ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback) { |
|
895 switch (mVideoColorFormat) { |
|
896 // Froyo support is best handled with the android color conversion code. I |
|
897 // get corrupted videos when using our own routines below. |
|
898 #if !defined(MOZ_ANDROID_FROYO) |
|
899 case OMX_COLOR_FormatYUV420Planar: // e.g. Asus Transformer, Stagefright's software decoder |
|
900 ToVideoFrame_YUV420Planar(aFrame, aTimeUs, aData, aSize, aKeyFrame); |
|
901 break; |
|
902 case OMX_COLOR_FormatCbYCrY: // e.g. Droid 1 |
|
903 ToVideoFrame_CbYCrY(aFrame, aTimeUs, aData, aSize, aKeyFrame); |
|
904 break; |
|
905 case OMX_COLOR_FormatYUV420SemiPlanar: // e.g. Galaxy S III |
|
906 ToVideoFrame_YUV420SemiPlanar(aFrame, aTimeUs, aData, aSize, aKeyFrame); |
|
907 break; |
|
908 case OMX_QCOM_COLOR_FormatYVU420SemiPlanar: // e.g. Nexus One |
|
909 ToVideoFrame_YVU420SemiPlanar(aFrame, aTimeUs, aData, aSize, aKeyFrame); |
|
910 break; |
|
911 case OMX_QCOM_COLOR_FormatYVU420PackedSemiPlanar32m4ka: // e.g. Otoro |
|
912 ToVideoFrame_YVU420PackedSemiPlanar32m4ka(aFrame, aTimeUs, aData, aSize, aKeyFrame); |
|
913 break; |
|
914 case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar: // e.g. Galaxy Nexus |
|
915 ToVideoFrame_YUV420PackedSemiPlanar(aFrame, aTimeUs, aData, aSize, aKeyFrame); |
|
916 break; |
|
917 case OMX_COLOR_Format16bitRGB565: |
|
918 return ToVideoFrame_RGB565(aFrame, aTimeUs, aData, aSize, aKeyFrame, aBufferCallback); |
|
919 break; |
|
920 #endif |
|
921 default: |
|
922 if (!ToVideoFrame_ColorConverter(aFrame, aTimeUs, aData, aSize, aKeyFrame, aBufferCallback) && |
|
923 !ToVideoFrame_I420ColorConverter(aFrame, aTimeUs, aData, aSize, aKeyFrame, aBufferCallback)) { |
|
924 LOG("Unknown video color format: %#x", mVideoColorFormat); |
|
925 return false; |
|
926 } |
|
927 } |
|
928 return true; |
|
929 } |
|
930 |
|
931 bool OmxDecoder::ToAudioFrame(AudioFrame *aFrame, int64_t aTimeUs, void *aData, size_t aDataOffset, size_t aSize, int32_t aAudioChannels, int32_t aAudioSampleRate) |
|
932 { |
|
933 aFrame->Set(aTimeUs, reinterpret_cast<char *>(aData) + aDataOffset, aSize, aAudioChannels, aAudioSampleRate); |
|
934 return true; |
|
935 } |
|
936 |
|
937 class ReadOptions : public MediaSource::ReadOptions |
|
938 { |
|
939 // HTC have their own version of ReadOptions with extra fields. If we don't |
|
940 // have this here, HTCOMXCodec will corrupt our stack. |
|
941 uint32_t sadface[16]; |
|
942 }; |
|
943 |
|
944 bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs, |
|
945 BufferCallback *aBufferCallback) |
|
946 { |
|
947 MOZ_ASSERT(aSeekTimeUs >= -1); |
|
948 |
|
949 if (!mVideoSource.get()) |
|
950 return false; |
|
951 |
|
952 ReleaseVideoBuffer(); |
|
953 |
|
954 status_t err; |
|
955 |
|
956 if (aSeekTimeUs != -1) { |
|
957 ReadOptions options; |
|
958 options.setSeekTo(aSeekTimeUs); |
|
959 err = mVideoSource->read(&mVideoBuffer, &options); |
|
960 } else { |
|
961 err = mVideoSource->read(&mVideoBuffer); |
|
962 } |
|
963 |
|
964 aFrame->mSize = 0; |
|
965 |
|
966 if (err == OK && mVideoBuffer->range_length() > 0) { |
|
967 int64_t timeUs; |
|
968 int32_t keyFrame; |
|
969 |
|
970 if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs) ) { |
|
971 LOG("no frame time"); |
|
972 return false; |
|
973 } |
|
974 |
|
975 if (timeUs < 0) { |
|
976 LOG("frame time %lld must be nonnegative", timeUs); |
|
977 return false; |
|
978 } |
|
979 |
|
980 if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) { |
|
981 keyFrame = 0; |
|
982 } |
|
983 |
|
984 char *data = reinterpret_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset(); |
|
985 size_t length = mVideoBuffer->range_length(); |
|
986 |
|
987 if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame, aBufferCallback)) { |
|
988 return false; |
|
989 } |
|
990 } |
|
991 else if (err == INFO_FORMAT_CHANGED) { |
|
992 // If the format changed, update our cached info. |
|
993 LOG("mVideoSource INFO_FORMAT_CHANGED"); |
|
994 if (!SetVideoFormat()) |
|
995 return false; |
|
996 else |
|
997 return ReadVideo(aFrame, aSeekTimeUs, aBufferCallback); |
|
998 } |
|
999 else if (err == ERROR_END_OF_STREAM) { |
|
1000 LOG("mVideoSource END_OF_STREAM"); |
|
1001 } |
|
1002 else if (err != OK) { |
|
1003 LOG("mVideoSource ERROR %#x", err); |
|
1004 } |
|
1005 |
|
1006 return err == OK; |
|
1007 } |
|
1008 |
|
1009 bool OmxDecoder::ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs) |
|
1010 { |
|
1011 MOZ_ASSERT(aSeekTimeUs >= -1); |
|
1012 |
|
1013 status_t err; |
|
1014 if (mAudioMetadataRead && aSeekTimeUs == -1) { |
|
1015 // Use the data read into the buffer during metadata time |
|
1016 err = OK; |
|
1017 } |
|
1018 else { |
|
1019 ReleaseAudioBuffer(); |
|
1020 if (aSeekTimeUs != -1) { |
|
1021 ReadOptions options; |
|
1022 options.setSeekTo(aSeekTimeUs); |
|
1023 err = mAudioSource->read(&mAudioBuffer, &options); |
|
1024 } else { |
|
1025 err = mAudioSource->read(&mAudioBuffer); |
|
1026 } |
|
1027 } |
|
1028 mAudioMetadataRead = false; |
|
1029 |
|
1030 aSeekTimeUs = -1; |
|
1031 |
|
1032 if (err == OK && mAudioBuffer->range_length() != 0) { |
|
1033 int64_t timeUs; |
|
1034 if (!mAudioBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) { |
|
1035 LOG("no frame time"); |
|
1036 return false; |
|
1037 } |
|
1038 |
|
1039 if (timeUs < 0) { |
|
1040 LOG("frame time %lld must be nonnegative", timeUs); |
|
1041 return false; |
|
1042 } |
|
1043 |
|
1044 return ToAudioFrame(aFrame, timeUs, |
|
1045 mAudioBuffer->data(), |
|
1046 mAudioBuffer->range_offset(), |
|
1047 mAudioBuffer->range_length(), |
|
1048 mAudioChannels, mAudioSampleRate); |
|
1049 } |
|
1050 else if (err == INFO_FORMAT_CHANGED) { |
|
1051 // If the format changed, update our cached info. |
|
1052 LOG("mAudioSource INFO_FORMAT_CHANGED"); |
|
1053 if (!SetAudioFormat()) |
|
1054 return false; |
|
1055 else |
|
1056 return ReadAudio(aFrame, aSeekTimeUs); |
|
1057 } |
|
1058 else if (err == ERROR_END_OF_STREAM) { |
|
1059 LOG("mAudioSource END_OF_STREAM"); |
|
1060 } |
|
1061 else if (err != OK) { |
|
1062 LOG("mAudioSource ERROR %#x", err); |
|
1063 } |
|
1064 |
|
1065 return err == OK; |
|
1066 } |
|
1067 |
|
1068 static OmxDecoder *cast(Decoder *decoder) { |
|
1069 return reinterpret_cast<OmxDecoder *>(decoder->mPrivate); |
|
1070 } |
|
1071 |
|
1072 static void GetDuration(Decoder *aDecoder, int64_t *durationUs) { |
|
1073 cast(aDecoder)->GetDuration(durationUs); |
|
1074 } |
|
1075 |
|
1076 static void GetVideoParameters(Decoder *aDecoder, int32_t *width, int32_t *height) { |
|
1077 cast(aDecoder)->GetVideoParameters(width, height); |
|
1078 } |
|
1079 |
|
1080 static void GetAudioParameters(Decoder *aDecoder, int32_t *numChannels, int32_t *sampleRate) { |
|
1081 cast(aDecoder)->GetAudioParameters(numChannels, sampleRate); |
|
1082 } |
|
1083 |
|
1084 static bool HasVideo(Decoder *aDecoder) { |
|
1085 return cast(aDecoder)->HasVideo(); |
|
1086 } |
|
1087 |
|
1088 static bool HasAudio(Decoder *aDecoder) { |
|
1089 return cast(aDecoder)->HasAudio(); |
|
1090 } |
|
1091 |
|
1092 static bool ReadVideo(Decoder *aDecoder, VideoFrame *aFrame, int64_t aSeekTimeUs, BufferCallback *aBufferCallback) |
|
1093 { |
|
1094 return cast(aDecoder)->ReadVideo(aFrame, aSeekTimeUs, aBufferCallback); |
|
1095 } |
|
1096 |
|
1097 static bool ReadAudio(Decoder *aDecoder, AudioFrame *aFrame, int64_t aSeekTimeUs) |
|
1098 { |
|
1099 return cast(aDecoder)->ReadAudio(aFrame, aSeekTimeUs); |
|
1100 } |
|
1101 |
|
1102 static void DestroyDecoder(Decoder *aDecoder) |
|
1103 { |
|
1104 if (aDecoder->mPrivate) |
|
1105 delete reinterpret_cast<OmxDecoder *>(aDecoder->mPrivate); |
|
1106 } |
|
1107 |
|
1108 static bool Match(const char *aMimeChars, size_t aMimeLen, const char *aNeedle) |
|
1109 { |
|
1110 return !strncmp(aMimeChars, aNeedle, aMimeLen); |
|
1111 } |
|
1112 |
|
1113 static const char* const gCodecs[] = { |
|
1114 "avc1.42E01E", // H.264 Constrained Baseline Profile Level 3.0 |
|
1115 "avc1.42001E", // H.264 Baseline Profile Level 3.0 |
|
1116 "avc1.42001F", // H.264 Baseline Profile Level 3.1 |
|
1117 "avc1.4D401E", // H.264 Main Profile Level 3.0 |
|
1118 "avc1.4D401F", // H.264 Main Profile Level 3.1 |
|
1119 "mp4a.40.2", // AAC-LC |
|
1120 nullptr |
|
1121 }; |
|
1122 |
|
1123 static bool CanDecode(const char *aMimeChars, size_t aMimeLen, const char* const**aCodecs) |
|
1124 { |
|
1125 if (!Match(aMimeChars, aMimeLen, "video/mp4") && |
|
1126 !Match(aMimeChars, aMimeLen, "audio/mp4") && |
|
1127 !Match(aMimeChars, aMimeLen, "audio/mpeg") && |
|
1128 !Match(aMimeChars, aMimeLen, "application/octet-stream")) { // file urls |
|
1129 return false; |
|
1130 } |
|
1131 *aCodecs = gCodecs; |
|
1132 |
|
1133 return true; |
|
1134 } |
|
1135 |
|
1136 static bool CreateDecoder(PluginHost *aPluginHost, Decoder *aDecoder, const char *aMimeChars, size_t aMimeLen) |
|
1137 { |
|
1138 OmxDecoder *omx = new OmxDecoder(aPluginHost, aDecoder); |
|
1139 if (!omx || !omx->Init()) { |
|
1140 if (omx) |
|
1141 delete omx; |
|
1142 return false; |
|
1143 } |
|
1144 |
|
1145 aDecoder->mPrivate = omx; |
|
1146 aDecoder->GetDuration = GetDuration; |
|
1147 aDecoder->GetVideoParameters = GetVideoParameters; |
|
1148 aDecoder->GetAudioParameters = GetAudioParameters; |
|
1149 aDecoder->HasVideo = HasVideo; |
|
1150 aDecoder->HasAudio = HasAudio; |
|
1151 aDecoder->ReadVideo = ReadVideo; |
|
1152 aDecoder->ReadAudio = ReadAudio; |
|
1153 aDecoder->DestroyDecoder = DestroyDecoder; |
|
1154 |
|
1155 return true; |
|
1156 } |
|
1157 |
|
1158 } // namespace OmxPlugin |
|
1159 |
|
1160 // Export the manifest so MPAPI can find our entry points. |
|
1161 Manifest MOZ_EXPORT MPAPI_MANIFEST = { |
|
1162 OmxPlugin::CanDecode, |
|
1163 OmxPlugin::CreateDecoder |
|
1164 }; |