media/omx-plugin/OmxPlugin.cpp

Thu, 22 Jan 2015 13:21:57 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Thu, 22 Jan 2015 13:21:57 +0100
branch
TOR_BUG_9701
changeset 15
b8a032363ba2
permissions
-rw-r--r--

Incorporate requested changes from Mozilla in review:
https://bugzilla.mozilla.org/show_bug.cgi?id=1123480#c6

michael@0 1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
michael@0 2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
michael@0 3 /* This Source Code Form is subject to the terms of the Mozilla Public
michael@0 4 * License, v. 2.0. If a copy of the MPL was not distributed with this file,
michael@0 5 * You can obtain one at http://mozilla.org/MPL/2.0/. */
michael@0 6
michael@0 7 #include <stagefright/ColorConverter.h>
michael@0 8 #include <stagefright/DataSource.h>
michael@0 9 #include <stagefright/MediaExtractor.h>
michael@0 10 #include <stagefright/MetaData.h>
michael@0 11 #include <stagefright/OMXCodec.h>
michael@0 12 #include <media/stagefright/MediaErrors.h>
michael@0 13 #ifdef MOZ_WIDGET_GONK
michael@0 14 #include <OMX.h>
michael@0 15 #else
michael@0 16 #include <stagefright/OMXClient.h>
michael@0 17 #endif
michael@0 18 #include <algorithm>
michael@0 19
michael@0 20 #include "mozilla/Assertions.h"
michael@0 21 #include "mozilla/Types.h"
michael@0 22 #include "MPAPI.h"
michael@0 23
michael@0 24 #include "android/log.h"
michael@0 25
michael@0 26 #define MAX_DECODER_NAME_LEN 256
michael@0 27 #define AVC_MIME_TYPE "video/avc"
michael@0 28
michael@0 29 #if !defined(MOZ_ANDROID_FROYO)
michael@0 30 #define DEFAULT_STAGEFRIGHT_FLAGS OMXCodec::kClientNeedsFramebuffer
michael@0 31 #else
michael@0 32 #define DEFAULT_STAGEFRIGHT_FLAGS 0
michael@0 33 #endif
michael@0 34
michael@0 35 #undef LOG
michael@0 36 #define LOG(args...) __android_log_print(ANDROID_LOG_INFO, "OmxPlugin" , ## args)
michael@0 37
michael@0 38 #if defined(MOZ_ANDROID_FROYO) || defined(MOZ_ANDROID_GB)
michael@0 39 // Android versions 2.x.x have common API differences
michael@0 40 #define MOZ_ANDROID_V2_X_X
michael@0 41 #endif
michael@0 42
michael@0 43 #if !defined(MOZ_ANDROID_V2_X_X) && !defined(MOZ_ANDROID_HC)
michael@0 44 #define MOZ_ANDROID_V4_OR_ABOVE
michael@0 45 #endif
michael@0 46
michael@0 47 #if defined(MOZ_ANDROID_V4_OR_ABOVE)
michael@0 48 #include <I420ColorConverter.h>
michael@0 49 #endif
michael@0 50
michael@0 51 using namespace MPAPI;
michael@0 52
michael@0 53 #if !defined(MOZ_STAGEFRIGHT_OFF_T)
michael@0 54 #define MOZ_STAGEFRIGHT_OFF_T off64_t
michael@0 55 #endif
michael@0 56
michael@0 57 using namespace android;
michael@0 58
michael@0 59 namespace OmxPlugin {
michael@0 60
michael@0 61 const int OMX_QCOM_COLOR_FormatYVU420PackedSemiPlanar32m4ka = 0x7FA30C01;
michael@0 62 const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00;
michael@0 63 const int OMX_TI_COLOR_FormatYUV420PackedSemiPlanar = 0x7F000100;
michael@0 64
michael@0 65 class OmxDecoder {
michael@0 66 PluginHost *mPluginHost;
michael@0 67 Decoder *mDecoder;
michael@0 68 sp<MediaSource> mVideoTrack;
michael@0 69 sp<MediaSource> mVideoSource;
michael@0 70 sp<MediaSource> mAudioTrack;
michael@0 71 sp<MediaSource> mAudioSource;
michael@0 72 int32_t mVideoWidth;
michael@0 73 int32_t mVideoHeight;
michael@0 74 int32_t mVideoColorFormat;
michael@0 75 int32_t mVideoStride;
michael@0 76 int32_t mVideoSliceHeight;
michael@0 77 int32_t mVideoCropLeft;
michael@0 78 int32_t mVideoCropTop;
michael@0 79 int32_t mVideoCropRight;
michael@0 80 int32_t mVideoCropBottom;
michael@0 81 int32_t mVideoRotation;
michael@0 82 int32_t mAudioChannels;
michael@0 83 int32_t mAudioSampleRate;
michael@0 84 int64_t mDurationUs;
michael@0 85 MediaBuffer *mVideoBuffer;
michael@0 86 VideoFrame mVideoFrame;
michael@0 87 MediaBuffer *mAudioBuffer;
michael@0 88 AudioFrame mAudioFrame;
michael@0 89 ColorConverter *mColorConverter;
michael@0 90
michael@0 91 // 'true' if a read from the audio stream was done while reading the metadata
michael@0 92 bool mAudioMetadataRead;
michael@0 93
michael@0 94 void ReleaseVideoBuffer();
michael@0 95 void ReleaseAudioBuffer();
michael@0 96
michael@0 97 void ToVideoFrame_YUV420Planar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
michael@0 98 void ToVideoFrame_CbYCrY(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
michael@0 99 void ToVideoFrame_YUV420SemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
michael@0 100 void ToVideoFrame_YVU420SemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
michael@0 101 void ToVideoFrame_YUV420PackedSemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
michael@0 102 void ToVideoFrame_YVU420PackedSemiPlanar32m4ka(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
michael@0 103 bool ToVideoFrame_RGB565(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback);
michael@0 104 bool ToVideoFrame_ColorConverter(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback);
michael@0 105 bool ToVideoFrame_I420ColorConverter(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback);
michael@0 106 bool ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback);
michael@0 107 bool ToAudioFrame(AudioFrame *aFrame, int64_t aTimeUs, void *aData, size_t aDataOffset, size_t aSize,
michael@0 108 int32_t aAudioChannels, int32_t aAudioSampleRate);
michael@0 109 public:
michael@0 110 OmxDecoder(PluginHost *aPluginHost, Decoder *aDecoder);
michael@0 111 ~OmxDecoder();
michael@0 112
michael@0 113 bool Init();
michael@0 114 bool SetVideoFormat();
michael@0 115 bool SetAudioFormat();
michael@0 116
michael@0 117 void GetDuration(int64_t *durationUs) {
michael@0 118 *durationUs = mDurationUs;
michael@0 119 }
michael@0 120
michael@0 121 void GetVideoParameters(int32_t *width, int32_t *height) {
michael@0 122 *width = mVideoWidth;
michael@0 123 *height = mVideoHeight;
michael@0 124 }
michael@0 125
michael@0 126 void GetAudioParameters(int32_t *numChannels, int32_t *sampleRate) {
michael@0 127 *numChannels = mAudioChannels;
michael@0 128 *sampleRate = mAudioSampleRate;
michael@0 129 }
michael@0 130
michael@0 131 bool HasVideo() {
michael@0 132 return mVideoSource != nullptr;
michael@0 133 }
michael@0 134
michael@0 135 bool HasAudio() {
michael@0 136 return mAudioSource != nullptr;
michael@0 137 }
michael@0 138
michael@0 139 bool ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs, BufferCallback *aBufferCallback);
michael@0 140 bool ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs);
michael@0 141 };
michael@0 142
michael@0 143 #if !defined(MOZ_WIDGET_GONK)
michael@0 144 static class OmxClientInstance {
michael@0 145 public:
michael@0 146 OmxClientInstance()
michael@0 147 : mClient(new OMXClient())
michael@0 148 , mStatus(mClient->connect())
michael@0 149 {
michael@0 150 }
michael@0 151
michael@0 152 status_t IsValid()
michael@0 153 {
michael@0 154 return mStatus == OK;
michael@0 155 }
michael@0 156
michael@0 157 OMXClient *get()
michael@0 158 {
michael@0 159 return mClient;
michael@0 160 }
michael@0 161
michael@0 162 ~OmxClientInstance()
michael@0 163 {
michael@0 164 if (mStatus == OK) {
michael@0 165 mClient->disconnect();
michael@0 166 }
michael@0 167 delete mClient;
michael@0 168 }
michael@0 169
michael@0 170 private:
michael@0 171 OMXClient *mClient;
michael@0 172 status_t mStatus;
michael@0 173 } sClientInstance;
michael@0 174 #endif
michael@0 175
michael@0 176 OmxDecoder::OmxDecoder(PluginHost *aPluginHost, Decoder *aDecoder) :
michael@0 177 mPluginHost(aPluginHost),
michael@0 178 mDecoder(aDecoder),
michael@0 179 mVideoWidth(0),
michael@0 180 mVideoHeight(0),
michael@0 181 mVideoColorFormat(0),
michael@0 182 mVideoStride(0),
michael@0 183 mVideoSliceHeight(0),
michael@0 184 mVideoCropLeft(0),
michael@0 185 mVideoCropTop(0),
michael@0 186 mVideoCropRight(0),
michael@0 187 mVideoCropBottom(0),
michael@0 188 mVideoRotation(0),
michael@0 189 mAudioChannels(-1),
michael@0 190 mAudioSampleRate(-1),
michael@0 191 mDurationUs(-1),
michael@0 192 mVideoBuffer(nullptr),
michael@0 193 mAudioBuffer(nullptr),
michael@0 194 mColorConverter(nullptr),
michael@0 195 mAudioMetadataRead(false)
michael@0 196 {
michael@0 197 }
michael@0 198
michael@0 199 OmxDecoder::~OmxDecoder()
michael@0 200 {
michael@0 201 ReleaseVideoBuffer();
michael@0 202 ReleaseAudioBuffer();
michael@0 203
michael@0 204 if (mVideoSource.get()) {
michael@0 205 mVideoSource->stop();
michael@0 206 }
michael@0 207
michael@0 208 if (mAudioSource.get()) {
michael@0 209 mAudioSource->stop();
michael@0 210 }
michael@0 211
michael@0 212 #ifndef MOZ_ANDROID_HC
michael@0 213 if (mColorConverter) {
michael@0 214 delete mColorConverter;
michael@0 215 }
michael@0 216 #endif
michael@0 217 }
michael@0 218
michael@0 219 class AutoStopMediaSource {
michael@0 220 sp<MediaSource> mMediaSource;
michael@0 221 public:
michael@0 222 AutoStopMediaSource(sp<MediaSource> aMediaSource) : mMediaSource(aMediaSource) {
michael@0 223 }
michael@0 224
michael@0 225 ~AutoStopMediaSource() {
michael@0 226 mMediaSource->stop();
michael@0 227 }
michael@0 228 };
michael@0 229
michael@0 230 #ifdef MOZ_WIDGET_GONK
michael@0 231 static sp<IOMX> sOMX = nullptr;
michael@0 232 static sp<IOMX> GetOMX() {
michael@0 233 if(sOMX.get() == nullptr) {
michael@0 234 sOMX = reinterpret_cast<IOMX*>(new OMX);
michael@0 235 }
michael@0 236 return sOMX;
michael@0 237 }
michael@0 238 #endif
michael@0 239
michael@0 240 static uint32_t
michael@0 241 GetDefaultStagefrightFlags(PluginHost *aPluginHost)
michael@0 242 {
michael@0 243 uint32_t flags = DEFAULT_STAGEFRIGHT_FLAGS;
michael@0 244
michael@0 245 #if !defined(MOZ_ANDROID_FROYO)
michael@0 246
michael@0 247 char hardware[256] = "";
michael@0 248 aPluginHost->GetSystemInfoString("hardware", hardware, sizeof(hardware));
michael@0 249
michael@0 250 if (!strcmp("qcom", hardware)) {
michael@0 251 // Qualcomm's OMXCodec implementation interprets this flag to mean that we
michael@0 252 // only want a thumbnail and therefore only need one frame. After the first
michael@0 253 // frame it returns EOS.
michael@0 254 // All other OMXCodec implementations seen so far interpret this flag
michael@0 255 // sanely; some do not return full framebuffers unless this flag is passed.
michael@0 256 flags &= ~OMXCodec::kClientNeedsFramebuffer;
michael@0 257 }
michael@0 258
michael@0 259 LOG("Hardware %s; using default flags %#x\n", hardware, flags);
michael@0 260
michael@0 261 #endif
michael@0 262
michael@0 263 return flags;
michael@0 264 }
michael@0 265
michael@0 266 static uint32_t GetVideoCreationFlags(PluginHost* aPluginHost)
michael@0 267 {
michael@0 268 #ifdef MOZ_WIDGET_GONK
michael@0 269 // Flag value of zero means return a hardware or software decoder
michael@0 270 // depending on what the device supports.
michael@0 271 return 0;
michael@0 272 #else
michael@0 273 // Check whether the user has set a pref to override our default OMXCodec
michael@0 274 // CreationFlags flags. This is useful for A/B testing hardware and software
michael@0 275 // decoders for performance and bugs. The interesting flag values are:
michael@0 276 // 0 = Let Stagefright choose hardware or software decoding (default)
michael@0 277 // 8 = Force software decoding
michael@0 278 // 16 = Force hardware decoding
michael@0 279 int32_t flags = 0;
michael@0 280 aPluginHost->GetIntPref("media.stagefright.omxcodec.flags", &flags);
michael@0 281 if (flags != 0) {
michael@0 282 #if !defined(MOZ_ANDROID_V2_X_X)
michael@0 283 LOG("media.stagefright.omxcodec.flags=%d", flags);
michael@0 284 if ((flags & OMXCodec::kHardwareCodecsOnly) != 0) {
michael@0 285 LOG("FORCE HARDWARE DECODING");
michael@0 286 } else if ((flags & OMXCodec::kSoftwareCodecsOnly) != 0) {
michael@0 287 LOG("FORCE SOFTWARE DECODING");
michael@0 288 }
michael@0 289 #endif
michael@0 290 }
michael@0 291
michael@0 292 flags |= GetDefaultStagefrightFlags(aPluginHost);
michael@0 293
michael@0 294 return static_cast<uint32_t>(flags);
michael@0 295 #endif
michael@0 296 }
michael@0 297
michael@0 298 enum ColorFormatSupport {
michael@0 299 ColorFormatNotSupported = 0,
michael@0 300 ColorFormatSupportOK,
michael@0 301 ColorFormatSupportPreferred,
michael@0 302 };
michael@0 303
michael@0 304 static ColorFormatSupport
michael@0 305 IsColorFormatSupported(OMX_COLOR_FORMATTYPE aColorFormat)
michael@0 306 {
michael@0 307 switch (static_cast<int>(aColorFormat)) {
michael@0 308 case OMX_COLOR_FormatCbYCrY:
michael@0 309 case OMX_COLOR_FormatYUV420Planar:
michael@0 310 case OMX_COLOR_FormatYUV420SemiPlanar:
michael@0 311 case OMX_QCOM_COLOR_FormatYVU420PackedSemiPlanar32m4ka:
michael@0 312 case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
michael@0 313 case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
michael@0 314 LOG("Colour format %#x supported natively.", aColorFormat);
michael@0 315 // Prefer natively supported colour formats over formats that need another
michael@0 316 // slow software conversion.
michael@0 317 return ColorFormatSupportPreferred;
michael@0 318 default:
michael@0 319 break;
michael@0 320 }
michael@0 321
michael@0 322 // These formats are okay if we can't find a better one; Android provides a
michael@0 323 // software conversion to a sane colour format.
michael@0 324 #if !defined(MOZ_ANDROID_HC)
michael@0 325 if (ColorConverter(aColorFormat, OMX_COLOR_Format16bitRGB565).isValid()) {
michael@0 326 LOG("Colour format %#x supported by Android ColorConverter.", aColorFormat);
michael@0 327 return ColorFormatSupportOK;
michael@0 328 }
michael@0 329 #endif
michael@0 330
michael@0 331 #if defined(MOZ_ANDROID_V4_OR_ABOVE)
michael@0 332 I420ColorConverter yuvConverter;
michael@0 333
michael@0 334 if (yuvConverter.isLoaded() &&
michael@0 335 yuvConverter.getDecoderOutputFormat() == aColorFormat) {
michael@0 336 LOG("Colour format %#x supported by Android I420ColorConverter.", aColorFormat);
michael@0 337 return ColorFormatSupportOK;
michael@0 338 }
michael@0 339 #endif
michael@0 340
michael@0 341 return ColorFormatNotSupported;
michael@0 342 }
michael@0 343
michael@0 344 #if defined(MOZ_ANDROID_KK)
michael@0 345 /**
michael@0 346 * Look for a decoder that supports a colour format that we support.
michael@0 347 */
michael@0 348 static bool
michael@0 349 FindPreferredDecoderAndColorFormat(const sp<IOMX>& aOmx,
michael@0 350 char *aDecoderName,
michael@0 351 size_t aDecoderLen,
michael@0 352 OMX_COLOR_FORMATTYPE *aColorFormat)
michael@0 353 {
michael@0 354 Vector<CodecCapabilities> codecs;
michael@0 355
michael@0 356 // Get all AVC decoder/colour format pairs that this device supports.
michael@0 357 QueryCodecs(aOmx, AVC_MIME_TYPE, true /* queryDecoders */, &codecs);
michael@0 358
michael@0 359 // We assume that faster (hardware accelerated) decoders come first in the
michael@0 360 // list, so we choose the first decoder with a colour format we can use.
michael@0 361 for (uint32_t i = 0; i < codecs.size(); i++) {
michael@0 362 const CodecCapabilities &caps = codecs[i];
michael@0 363 const Vector<OMX_U32> &colors = caps.mColorFormats;
michael@0 364
michael@0 365 bool found = false;
michael@0 366 for (uint32_t j = 0; j < colors.size(); j++) {
michael@0 367 OMX_COLOR_FORMATTYPE color = (OMX_COLOR_FORMATTYPE)colors[j];
michael@0 368
michael@0 369 LOG("Decoder %s can output colour format %#x.\n",
michael@0 370 caps.mComponentName.string(), color);
michael@0 371
michael@0 372 ColorFormatSupport supported = IsColorFormatSupported(color);
michael@0 373
michael@0 374 if (supported) {
michael@0 375 strncpy(aDecoderName, caps.mComponentName.string(), aDecoderLen);
michael@0 376 *aColorFormat = color;
michael@0 377 found = true;
michael@0 378 }
michael@0 379
michael@0 380 if (supported == ColorFormatSupportPreferred) {
michael@0 381 // The colour format is natively supported -- that's as good as we're
michael@0 382 // going to get.
michael@0 383 break;
michael@0 384 }
michael@0 385 }
michael@0 386
michael@0 387 if (found) {
michael@0 388 return true;
michael@0 389 }
michael@0 390 }
michael@0 391
michael@0 392 return false;
michael@0 393 }
michael@0 394 #endif
michael@0 395
michael@0 396 static sp<MediaSource> CreateVideoSource(PluginHost* aPluginHost,
michael@0 397 const sp<IOMX>& aOmx,
michael@0 398 const sp<MediaSource>& aVideoTrack)
michael@0 399 {
michael@0 400 uint32_t flags = GetVideoCreationFlags(aPluginHost);
michael@0 401
michael@0 402 char decoderName[MAX_DECODER_NAME_LEN] = "";
michael@0 403 sp<MetaData> videoFormat = aVideoTrack->getFormat();
michael@0 404
michael@0 405 #if defined(MOZ_ANDROID_KK)
michael@0 406 OMX_COLOR_FORMATTYPE colorFormat = (OMX_COLOR_FORMATTYPE)0;
michael@0 407 if (FindPreferredDecoderAndColorFormat(aOmx,
michael@0 408 decoderName, sizeof(decoderName),
michael@0 409 &colorFormat)) {
michael@0 410 // We found a colour format that we can handle. Tell OMXCodec to use it in
michael@0 411 // case it isn't the default.
michael@0 412 videoFormat->setInt32(kKeyColorFormat, colorFormat);
michael@0 413
michael@0 414 LOG("Found compatible decoder %s with colour format %#x.\n",
michael@0 415 decoderName, colorFormat);
michael@0 416 }
michael@0 417 #endif
michael@0 418
michael@0 419 if (flags == DEFAULT_STAGEFRIGHT_FLAGS) {
michael@0 420 // Let Stagefright choose hardware or software decoder.
michael@0 421 sp<MediaSource> videoSource = OMXCodec::Create(aOmx, videoFormat,
michael@0 422 false, aVideoTrack,
michael@0 423 decoderName[0] ? decoderName : nullptr,
michael@0 424 flags);
michael@0 425 if (videoSource == nullptr)
michael@0 426 return nullptr;
michael@0 427
michael@0 428 // Now that OMXCodec has parsed the video's AVCDecoderConfigurationRecord,
michael@0 429 // check whether we know how to decode this video.
michael@0 430 int32_t videoColorFormat;
michael@0 431 if (videoSource->getFormat()->findInt32(kKeyColorFormat, &videoColorFormat)) {
michael@0 432
michael@0 433 if (IsColorFormatSupported((OMX_COLOR_FORMATTYPE)videoColorFormat)) {
michael@0 434 return videoSource;
michael@0 435 }
michael@0 436
michael@0 437 // We need to implement a ToVideoFrame_*() color conversion
michael@0 438 // function for this video color format.
michael@0 439 LOG("Unknown video color format: %#x", videoColorFormat);
michael@0 440 } else {
michael@0 441 LOG("Video color format not found");
michael@0 442 }
michael@0 443
michael@0 444 // Throw away the videoSource and try again with new flags.
michael@0 445 LOG("Falling back to software decoder");
michael@0 446 videoSource.clear();
michael@0 447 #if defined(MOZ_ANDROID_V2_X_X)
michael@0 448 flags = DEFAULT_STAGEFRIGHT_FLAGS | OMXCodec::kPreferSoftwareCodecs;
michael@0 449 #else
michael@0 450 flags = DEFAULT_STAGEFRIGHT_FLAGS | OMXCodec::kSoftwareCodecsOnly;
michael@0 451 #endif
michael@0 452 }
michael@0 453
michael@0 454 MOZ_ASSERT(flags != DEFAULT_STAGEFRIGHT_FLAGS);
michael@0 455 return OMXCodec::Create(aOmx, aVideoTrack->getFormat(), false, aVideoTrack,
michael@0 456 nullptr, flags);
michael@0 457 }
michael@0 458
michael@0 459 bool OmxDecoder::Init()
michael@0 460 {
michael@0 461 #if defined(MOZ_WIDGET_ANDROID)
michael@0 462 // OMXClient::connect() always returns OK and aborts fatally if
michael@0 463 // it can't connect. We may need to implement the connect functionality
michael@0 464 // ourselves if this proves to be an issue.
michael@0 465 if (!sClientInstance.IsValid()) {
michael@0 466 LOG("OMXClient failed to connect");
michael@0 467 return false;
michael@0 468 }
michael@0 469 #endif
michael@0 470
michael@0 471 //register sniffers, if they are not registered in this process.
michael@0 472 DataSource::RegisterDefaultSniffers();
michael@0 473
michael@0 474 sp<DataSource> dataSource =
michael@0 475 DataSource::CreateFromURI(static_cast<char*>(mDecoder->mResource));
michael@0 476 if (!dataSource.get() || dataSource->initCheck()) {
michael@0 477 return false;
michael@0 478 }
michael@0 479
michael@0 480 sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
michael@0 481 if (extractor == nullptr) {
michael@0 482 return false;
michael@0 483 }
michael@0 484
michael@0 485 ssize_t audioTrackIndex = -1;
michael@0 486 ssize_t videoTrackIndex = -1;
michael@0 487 const char *audioMime = nullptr;
michael@0 488 const char *videoMime = nullptr;
michael@0 489
michael@0 490 for (size_t i = 0; i < extractor->countTracks(); ++i) {
michael@0 491 sp<MetaData> meta = extractor->getTrackMetaData(i);
michael@0 492
michael@0 493 const char *mime;
michael@0 494 if (!meta->findCString(kKeyMIMEType, &mime)) {
michael@0 495 continue;
michael@0 496 }
michael@0 497
michael@0 498 if (videoTrackIndex == -1 && !strncasecmp(mime, "video/", 6)) {
michael@0 499 videoTrackIndex = i;
michael@0 500 videoMime = mime;
michael@0 501 } else if (audioTrackIndex == -1 && !strncasecmp(mime, "audio/", 6)) {
michael@0 502 audioTrackIndex = i;
michael@0 503 audioMime = mime;
michael@0 504 }
michael@0 505 }
michael@0 506
michael@0 507 if (videoTrackIndex == -1 && audioTrackIndex == -1) {
michael@0 508 return false;
michael@0 509 }
michael@0 510
michael@0 511 int64_t totalDurationUs = 0;
michael@0 512
michael@0 513 #ifdef MOZ_WIDGET_GONK
michael@0 514 sp<IOMX> omx = GetOMX();
michael@0 515 #else
michael@0 516 sp<IOMX> omx = sClientInstance.get()->interface();
michael@0 517 #endif
michael@0 518
michael@0 519 sp<MediaSource> videoTrack;
michael@0 520 sp<MediaSource> videoSource;
michael@0 521 if (videoTrackIndex != -1 && (videoTrack = extractor->getTrack(videoTrackIndex)) != nullptr) {
michael@0 522 #if defined(MOZ_ANDROID_FROYO)
michael@0 523 // Allow up to 720P video.
michael@0 524 sp<MetaData> meta = extractor->getTrackMetaData(videoTrackIndex);
michael@0 525 meta->setInt32(kKeyMaxInputSize, (1280 * 720 * 3) / 2);
michael@0 526 #endif
michael@0 527 videoSource = CreateVideoSource(mPluginHost, omx, videoTrack);
michael@0 528 if (videoSource == nullptr) {
michael@0 529 LOG("OMXCodec failed to initialize video decoder for \"%s\"", videoMime);
michael@0 530 return false;
michael@0 531 }
michael@0 532 status_t status = videoSource->start();
michael@0 533 if (status != OK) {
michael@0 534 LOG("videoSource->start() failed with status %#x", status);
michael@0 535 return false;
michael@0 536 }
michael@0 537 int64_t durationUs;
michael@0 538 if (videoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
michael@0 539 if (durationUs < 0)
michael@0 540 LOG("video duration %lld should be nonnegative", durationUs);
michael@0 541 if (durationUs > totalDurationUs)
michael@0 542 totalDurationUs = durationUs;
michael@0 543 }
michael@0 544 }
michael@0 545
michael@0 546 sp<MediaSource> audioTrack;
michael@0 547 sp<MediaSource> audioSource;
michael@0 548 if (audioTrackIndex != -1 && (audioTrack = extractor->getTrack(audioTrackIndex)) != nullptr)
michael@0 549 {
michael@0 550 if (!strcasecmp(audioMime, "audio/raw")) {
michael@0 551 audioSource = audioTrack;
michael@0 552 } else {
michael@0 553 audioSource = OMXCodec::Create(omx,
michael@0 554 audioTrack->getFormat(),
michael@0 555 false, // decoder
michael@0 556 audioTrack);
michael@0 557 }
michael@0 558
michael@0 559 if (audioSource == nullptr) {
michael@0 560 LOG("OMXCodec failed to initialize audio decoder for \"%s\"", audioMime);
michael@0 561 return false;
michael@0 562 }
michael@0 563
michael@0 564 status_t status = audioSource->start();
michael@0 565 if (status != OK) {
michael@0 566 LOG("audioSource->start() failed with status %#x", status);
michael@0 567 return false;
michael@0 568 }
michael@0 569
michael@0 570 int64_t durationUs;
michael@0 571 if (audioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
michael@0 572 if (durationUs < 0)
michael@0 573 LOG("audio duration %lld should be nonnegative", durationUs);
michael@0 574 if (durationUs > totalDurationUs)
michael@0 575 totalDurationUs = durationUs;
michael@0 576 }
michael@0 577 }
michael@0 578
michael@0 579 // set decoder state
michael@0 580 mVideoTrack = videoTrack;
michael@0 581 mVideoSource = videoSource;
michael@0 582 mAudioTrack = audioTrack;
michael@0 583 mAudioSource = audioSource;
michael@0 584 mDurationUs = totalDurationUs;
michael@0 585
michael@0 586 if (mVideoSource.get() && !SetVideoFormat())
michael@0 587 return false;
michael@0 588
michael@0 589 // To reliably get the channel and sample rate data we need to read from the
michael@0 590 // audio source until we get a INFO_FORMAT_CHANGE status
michael@0 591 if (mAudioSource.get()) {
michael@0 592 if (mAudioSource->read(&mAudioBuffer) != INFO_FORMAT_CHANGED) {
michael@0 593 sp<MetaData> meta = mAudioSource->getFormat();
michael@0 594 if (!meta->findInt32(kKeyChannelCount, &mAudioChannels) ||
michael@0 595 !meta->findInt32(kKeySampleRate, &mAudioSampleRate)) {
michael@0 596 return false;
michael@0 597 }
michael@0 598 mAudioMetadataRead = true;
michael@0 599
michael@0 600 if (mAudioChannels < 0) {
michael@0 601 LOG("audio channel count %d must be nonnegative", mAudioChannels);
michael@0 602 return false;
michael@0 603 }
michael@0 604
michael@0 605 if (mAudioSampleRate < 0) {
michael@0 606 LOG("audio sample rate %d must be nonnegative", mAudioSampleRate);
michael@0 607 return false;
michael@0 608 }
michael@0 609 }
michael@0 610 else if (!SetAudioFormat()) {
michael@0 611 return false;
michael@0 612 }
michael@0 613 }
michael@0 614 return true;
michael@0 615 }
michael@0 616
michael@0 617 bool OmxDecoder::SetVideoFormat() {
michael@0 618 sp<MetaData> format = mVideoSource->getFormat();
michael@0 619
michael@0 620 // Stagefright's kKeyWidth and kKeyHeight are what MPAPI calls stride and
michael@0 621 // slice height. Stagefright only seems to use its kKeyStride and
michael@0 622 // kKeySliceHeight to initialize camera video formats.
michael@0 623
michael@0 624 #if defined(DEBUG) && !defined(MOZ_ANDROID_FROYO)
michael@0 625 int32_t unexpected;
michael@0 626 if (format->findInt32(kKeyStride, &unexpected))
michael@0 627 LOG("Expected kKeyWidth, but found kKeyStride %d", unexpected);
michael@0 628 if (format->findInt32(kKeySliceHeight, &unexpected))
michael@0 629 LOG("Expected kKeyHeight, but found kKeySliceHeight %d", unexpected);
michael@0 630 #endif // DEBUG
michael@0 631
michael@0 632 const char *componentName;
michael@0 633
michael@0 634 if (!format->findInt32(kKeyWidth, &mVideoStride) ||
michael@0 635 !format->findInt32(kKeyHeight, &mVideoSliceHeight) ||
michael@0 636 !format->findCString(kKeyDecoderComponent, &componentName) ||
michael@0 637 !format->findInt32(kKeyColorFormat, &mVideoColorFormat) ) {
michael@0 638 return false;
michael@0 639 }
michael@0 640
michael@0 641 if (mVideoStride <= 0) {
michael@0 642 LOG("stride %d must be positive", mVideoStride);
michael@0 643 return false;
michael@0 644 }
michael@0 645
michael@0 646 if (mVideoSliceHeight <= 0) {
michael@0 647 LOG("slice height %d must be positive", mVideoSliceHeight);
michael@0 648 return false;
michael@0 649 }
michael@0 650
michael@0 651 // Gingerbread does not support the kKeyCropRect key
michael@0 652 #if !defined(MOZ_ANDROID_V2_X_X)
michael@0 653 if (!format->findRect(kKeyCropRect, &mVideoCropLeft, &mVideoCropTop,
michael@0 654 &mVideoCropRight, &mVideoCropBottom)) {
michael@0 655 #endif
michael@0 656 mVideoCropLeft = 0;
michael@0 657 mVideoCropTop = 0;
michael@0 658 mVideoCropRight = mVideoStride - 1;
michael@0 659 mVideoCropBottom = mVideoSliceHeight - 1;
michael@0 660 LOG("crop rect not available, assuming no cropping");
michael@0 661 #if !defined(MOZ_ANDROID_V2_X_X)
michael@0 662 }
michael@0 663 #endif
michael@0 664
michael@0 665 if (mVideoCropLeft < 0 || mVideoCropLeft >= mVideoCropRight || mVideoCropRight >= mVideoStride ||
michael@0 666 mVideoCropTop < 0 || mVideoCropTop >= mVideoCropBottom || mVideoCropBottom >= mVideoSliceHeight) {
michael@0 667 LOG("invalid crop rect %d,%d-%d,%d", mVideoCropLeft, mVideoCropTop, mVideoCropRight, mVideoCropBottom);
michael@0 668 return false;
michael@0 669 }
michael@0 670
michael@0 671 mVideoWidth = mVideoCropRight - mVideoCropLeft + 1;
michael@0 672 mVideoHeight = mVideoCropBottom - mVideoCropTop + 1;
michael@0 673 MOZ_ASSERT(mVideoWidth > 0 && mVideoWidth <= mVideoStride);
michael@0 674 MOZ_ASSERT(mVideoHeight > 0 && mVideoHeight <= mVideoSliceHeight);
michael@0 675
michael@0 676 #if !defined(MOZ_ANDROID_FROYO)
michael@0 677 if (!format->findInt32(kKeyRotation, &mVideoRotation)) {
michael@0 678 #endif
michael@0 679 mVideoRotation = 0;
michael@0 680 #if !defined(MOZ_ANDROID_FROYO)
michael@0 681 LOG("rotation not available, assuming 0");
michael@0 682 }
michael@0 683 #endif
michael@0 684
michael@0 685 if (mVideoRotation != 0 && mVideoRotation != 90 &&
michael@0 686 mVideoRotation != 180 && mVideoRotation != 270) {
michael@0 687 LOG("invalid rotation %d, assuming 0", mVideoRotation);
michael@0 688 }
michael@0 689
michael@0 690 LOG("width: %d height: %d component: %s format: %#x stride: %d sliceHeight: %d rotation: %d crop: %d,%d-%d,%d",
michael@0 691 mVideoWidth, mVideoHeight, componentName, mVideoColorFormat,
michael@0 692 mVideoStride, mVideoSliceHeight, mVideoRotation,
michael@0 693 mVideoCropLeft, mVideoCropTop, mVideoCropRight, mVideoCropBottom);
michael@0 694
michael@0 695 return true;
michael@0 696 }
michael@0 697
michael@0 698 bool OmxDecoder::SetAudioFormat() {
michael@0 699 // If the format changed, update our cached info.
michael@0 700 if (!mAudioSource->getFormat()->findInt32(kKeyChannelCount, &mAudioChannels) ||
michael@0 701 !mAudioSource->getFormat()->findInt32(kKeySampleRate, &mAudioSampleRate)) {
michael@0 702 return false;
michael@0 703 }
michael@0 704
michael@0 705 LOG("channelCount: %d sampleRate: %d", mAudioChannels, mAudioSampleRate);
michael@0 706
michael@0 707 if (mAudioChannels < 0) {
michael@0 708 LOG("audio channel count %d must be nonnegative", mAudioChannels);
michael@0 709 return false;
michael@0 710 }
michael@0 711
michael@0 712 if (mAudioSampleRate < 0) {
michael@0 713 LOG("audio sample rate %d must be nonnegative", mAudioSampleRate);
michael@0 714 return false;
michael@0 715 }
michael@0 716
michael@0 717 return true;
michael@0 718 }
michael@0 719
michael@0 720 void OmxDecoder::ReleaseVideoBuffer() {
michael@0 721 if (mVideoBuffer) {
michael@0 722 mVideoBuffer->release();
michael@0 723 mVideoBuffer = nullptr;
michael@0 724 }
michael@0 725 }
michael@0 726
michael@0 727 void OmxDecoder::ReleaseAudioBuffer() {
michael@0 728 if (mAudioBuffer) {
michael@0 729 mAudioBuffer->release();
michael@0 730 mAudioBuffer = nullptr;
michael@0 731 }
michael@0 732 }
michael@0 733
michael@0 734 void OmxDecoder::ToVideoFrame_YUV420Planar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
michael@0 735 void *y = aData;
michael@0 736 void *u = static_cast<uint8_t *>(y) + mVideoStride * mVideoSliceHeight;
michael@0 737 void *v = static_cast<uint8_t *>(u) + mVideoStride/2 * mVideoSliceHeight/2;
michael@0 738 aFrame->Set(aTimeUs, aKeyFrame,
michael@0 739 aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation,
michael@0 740 y, mVideoStride, mVideoWidth, mVideoHeight, 0, 0,
michael@0 741 u, mVideoStride/2, mVideoWidth/2, mVideoHeight/2, 0, 0,
michael@0 742 v, mVideoStride/2, mVideoWidth/2, mVideoHeight/2, 0, 0);
michael@0 743 }
michael@0 744
michael@0 745 void OmxDecoder::ToVideoFrame_CbYCrY(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
michael@0 746 aFrame->Set(aTimeUs, aKeyFrame,
michael@0 747 aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation,
michael@0 748 aData, mVideoStride, mVideoWidth, mVideoHeight, 1, 1,
michael@0 749 aData, mVideoStride, mVideoWidth/2, mVideoHeight/2, 0, 3,
michael@0 750 aData, mVideoStride, mVideoWidth/2, mVideoHeight/2, 2, 3);
michael@0 751 }
michael@0 752
michael@0 753 void OmxDecoder::ToVideoFrame_YUV420SemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
michael@0 754 int32_t videoStride = mVideoStride;
michael@0 755 int32_t videoSliceHeight = mVideoSliceHeight;
michael@0 756
michael@0 757 // OMX.SEC.avcdec rounds mVideoStride and mVideoSliceHeight up to the nearest
michael@0 758 // multiple of 16 but the data itself is too small to fit. What we do is check
michael@0 759 // to see if the video size patches the raw width and height. If so we can
michael@0 760 // use those figures instead.
michael@0 761
michael@0 762 if (static_cast<int>(aSize) == mVideoWidth * mVideoHeight * 3 / 2) {
michael@0 763 videoStride = mVideoWidth;
michael@0 764 videoSliceHeight = mVideoHeight;
michael@0 765 }
michael@0 766
michael@0 767 void *y = aData;
michael@0 768 void *uv = static_cast<uint8_t *>(y) + (videoStride * videoSliceHeight);
michael@0 769 aFrame->Set(aTimeUs, aKeyFrame,
michael@0 770 aData, aSize, videoStride, videoSliceHeight, mVideoRotation,
michael@0 771 y, videoStride, mVideoWidth, mVideoHeight, 0, 0,
michael@0 772 uv, videoStride, mVideoWidth/2, mVideoHeight/2, 0, 1,
michael@0 773 uv, videoStride, mVideoWidth/2, mVideoHeight/2, 1, 1);
michael@0 774 }
michael@0 775
michael@0 776 void OmxDecoder::ToVideoFrame_YVU420SemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
michael@0 777 ToVideoFrame_YUV420SemiPlanar(aFrame, aTimeUs, aData, aSize, aKeyFrame);
michael@0 778 aFrame->Cb.mOffset = 1;
michael@0 779 aFrame->Cr.mOffset = 0;
michael@0 780 }
michael@0 781
michael@0 782 void OmxDecoder::ToVideoFrame_YUV420PackedSemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
michael@0 783 void *y = aData;
michael@0 784 void *uv = static_cast<uint8_t *>(y) + mVideoStride * (mVideoSliceHeight - mVideoCropTop/2);
michael@0 785 aFrame->Set(aTimeUs, aKeyFrame,
michael@0 786 aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation,
michael@0 787 y, mVideoStride, mVideoWidth, mVideoHeight, 0, 0,
michael@0 788 uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 0, 1,
michael@0 789 uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 1, 1);
michael@0 790 }
michael@0 791
michael@0 792 void OmxDecoder::ToVideoFrame_YVU420PackedSemiPlanar32m4ka(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
michael@0 793 size_t roundedSliceHeight = (mVideoSliceHeight + 31) & ~31;
michael@0 794 size_t roundedStride = (mVideoStride + 31) & ~31;
michael@0 795 void *y = aData;
michael@0 796 void *uv = static_cast<uint8_t *>(y) + (roundedStride * roundedSliceHeight);
michael@0 797 aFrame->Set(aTimeUs, aKeyFrame,
michael@0 798 aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation,
michael@0 799 y, mVideoStride, mVideoWidth, mVideoHeight, 0, 0,
michael@0 800 uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 1, 1,
michael@0 801 uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 0, 1);
michael@0 802 }
michael@0 803
michael@0 804 bool OmxDecoder::ToVideoFrame_RGB565(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback) {
michael@0 805 void *buffer = (*aBufferCallback)(mVideoWidth, mVideoHeight, MPAPI::RGB565);
michael@0 806
michael@0 807 if (!buffer) {
michael@0 808 return false;
michael@0 809 }
michael@0 810
michael@0 811 aFrame->mTimeUs = aTimeUs;
michael@0 812
michael@0 813 memcpy(buffer, aData, mVideoWidth * mVideoHeight * 2);
michael@0 814
michael@0 815 aFrame->mSize = mVideoWidth * mVideoHeight * 2;
michael@0 816
michael@0 817 return true;
michael@0 818 }
michael@0 819
michael@0 820 bool OmxDecoder::ToVideoFrame_ColorConverter(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback) {
michael@0 821 #ifdef MOZ_ANDROID_HC
michael@0 822 return false;
michael@0 823 #else
michael@0 824 if (!mColorConverter) {
michael@0 825 mColorConverter = new ColorConverter((OMX_COLOR_FORMATTYPE)mVideoColorFormat,
michael@0 826 OMX_COLOR_Format16bitRGB565);
michael@0 827 }
michael@0 828
michael@0 829 if (!mColorConverter->isValid()) {
michael@0 830 return false;
michael@0 831 }
michael@0 832
michael@0 833 aFrame->mTimeUs = aTimeUs;
michael@0 834
michael@0 835 void *buffer = (*aBufferCallback)(mVideoWidth, mVideoHeight, MPAPI::RGB565);
michael@0 836
michael@0 837 if (!buffer) {
michael@0 838 return false;
michael@0 839 }
michael@0 840
michael@0 841 aFrame->mSize = mVideoWidth * mVideoHeight * 2;
michael@0 842
michael@0 843 #if defined(MOZ_ANDROID_V2_X_X)
michael@0 844 mColorConverter->convert(mVideoWidth, mVideoHeight,
michael@0 845 aData, 0 /* srcSkip */,
michael@0 846 buffer, mVideoWidth * 2);
michael@0 847 #else
michael@0 848 mColorConverter->convert(aData, mVideoStride, mVideoSliceHeight,
michael@0 849 mVideoCropLeft, mVideoCropTop,
michael@0 850 mVideoCropLeft + mVideoWidth - 1,
michael@0 851 mVideoCropTop + mVideoHeight - 1,
michael@0 852 buffer, mVideoWidth, mVideoHeight,
michael@0 853 0, 0, mVideoWidth - 1, mVideoHeight - 1);
michael@0 854 #endif
michael@0 855
michael@0 856 return true;
michael@0 857 #endif
michael@0 858 }
michael@0 859
michael@0 860 bool OmxDecoder::ToVideoFrame_I420ColorConverter(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback)
michael@0 861 {
michael@0 862 #if defined(MOZ_ANDROID_V4_OR_ABOVE)
michael@0 863 I420ColorConverter yuvConverter;
michael@0 864
michael@0 865 if (!yuvConverter.isLoaded()) {
michael@0 866 return false;
michael@0 867 }
michael@0 868
michael@0 869 if (yuvConverter.getDecoderOutputFormat() != mVideoColorFormat) {
michael@0 870 return false;
michael@0 871 }
michael@0 872
michael@0 873 void *buffer = (*aBufferCallback)(mVideoWidth, mVideoHeight, MPAPI::I420);
michael@0 874
michael@0 875 ARect crop = { mVideoCropLeft, mVideoCropTop, mVideoCropRight, mVideoCropBottom };
michael@0 876 int result = yuvConverter.convertDecoderOutputToI420(aData,
michael@0 877 mVideoWidth,
michael@0 878 mVideoHeight,
michael@0 879 crop,
michael@0 880 buffer);
michael@0 881
michael@0 882 // result is 0 on success, -1 otherwise.
michael@0 883 if (result == OK) {
michael@0 884 aFrame->mTimeUs = aTimeUs;
michael@0 885 aFrame->mSize = mVideoWidth * mVideoHeight * 3 / 2;
michael@0 886 }
michael@0 887
michael@0 888 return result == OK;
michael@0 889 #else
michael@0 890 return false;
michael@0 891 #endif
michael@0 892 }
michael@0 893
michael@0 894 bool OmxDecoder::ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback) {
michael@0 895 switch (mVideoColorFormat) {
michael@0 896 // Froyo support is best handled with the android color conversion code. I
michael@0 897 // get corrupted videos when using our own routines below.
michael@0 898 #if !defined(MOZ_ANDROID_FROYO)
michael@0 899 case OMX_COLOR_FormatYUV420Planar: // e.g. Asus Transformer, Stagefright's software decoder
michael@0 900 ToVideoFrame_YUV420Planar(aFrame, aTimeUs, aData, aSize, aKeyFrame);
michael@0 901 break;
michael@0 902 case OMX_COLOR_FormatCbYCrY: // e.g. Droid 1
michael@0 903 ToVideoFrame_CbYCrY(aFrame, aTimeUs, aData, aSize, aKeyFrame);
michael@0 904 break;
michael@0 905 case OMX_COLOR_FormatYUV420SemiPlanar: // e.g. Galaxy S III
michael@0 906 ToVideoFrame_YUV420SemiPlanar(aFrame, aTimeUs, aData, aSize, aKeyFrame);
michael@0 907 break;
michael@0 908 case OMX_QCOM_COLOR_FormatYVU420SemiPlanar: // e.g. Nexus One
michael@0 909 ToVideoFrame_YVU420SemiPlanar(aFrame, aTimeUs, aData, aSize, aKeyFrame);
michael@0 910 break;
michael@0 911 case OMX_QCOM_COLOR_FormatYVU420PackedSemiPlanar32m4ka: // e.g. Otoro
michael@0 912 ToVideoFrame_YVU420PackedSemiPlanar32m4ka(aFrame, aTimeUs, aData, aSize, aKeyFrame);
michael@0 913 break;
michael@0 914 case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar: // e.g. Galaxy Nexus
michael@0 915 ToVideoFrame_YUV420PackedSemiPlanar(aFrame, aTimeUs, aData, aSize, aKeyFrame);
michael@0 916 break;
michael@0 917 case OMX_COLOR_Format16bitRGB565:
michael@0 918 return ToVideoFrame_RGB565(aFrame, aTimeUs, aData, aSize, aKeyFrame, aBufferCallback);
michael@0 919 break;
michael@0 920 #endif
michael@0 921 default:
michael@0 922 if (!ToVideoFrame_ColorConverter(aFrame, aTimeUs, aData, aSize, aKeyFrame, aBufferCallback) &&
michael@0 923 !ToVideoFrame_I420ColorConverter(aFrame, aTimeUs, aData, aSize, aKeyFrame, aBufferCallback)) {
michael@0 924 LOG("Unknown video color format: %#x", mVideoColorFormat);
michael@0 925 return false;
michael@0 926 }
michael@0 927 }
michael@0 928 return true;
michael@0 929 }
michael@0 930
michael@0 931 bool OmxDecoder::ToAudioFrame(AudioFrame *aFrame, int64_t aTimeUs, void *aData, size_t aDataOffset, size_t aSize, int32_t aAudioChannels, int32_t aAudioSampleRate)
michael@0 932 {
michael@0 933 aFrame->Set(aTimeUs, reinterpret_cast<char *>(aData) + aDataOffset, aSize, aAudioChannels, aAudioSampleRate);
michael@0 934 return true;
michael@0 935 }
michael@0 936
michael@0 937 class ReadOptions : public MediaSource::ReadOptions
michael@0 938 {
michael@0 939 // HTC have their own version of ReadOptions with extra fields. If we don't
michael@0 940 // have this here, HTCOMXCodec will corrupt our stack.
michael@0 941 uint32_t sadface[16];
michael@0 942 };
michael@0 943
michael@0 944 bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs,
michael@0 945 BufferCallback *aBufferCallback)
michael@0 946 {
michael@0 947 MOZ_ASSERT(aSeekTimeUs >= -1);
michael@0 948
michael@0 949 if (!mVideoSource.get())
michael@0 950 return false;
michael@0 951
michael@0 952 ReleaseVideoBuffer();
michael@0 953
michael@0 954 status_t err;
michael@0 955
michael@0 956 if (aSeekTimeUs != -1) {
michael@0 957 ReadOptions options;
michael@0 958 options.setSeekTo(aSeekTimeUs);
michael@0 959 err = mVideoSource->read(&mVideoBuffer, &options);
michael@0 960 } else {
michael@0 961 err = mVideoSource->read(&mVideoBuffer);
michael@0 962 }
michael@0 963
michael@0 964 aFrame->mSize = 0;
michael@0 965
michael@0 966 if (err == OK && mVideoBuffer->range_length() > 0) {
michael@0 967 int64_t timeUs;
michael@0 968 int32_t keyFrame;
michael@0 969
michael@0 970 if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs) ) {
michael@0 971 LOG("no frame time");
michael@0 972 return false;
michael@0 973 }
michael@0 974
michael@0 975 if (timeUs < 0) {
michael@0 976 LOG("frame time %lld must be nonnegative", timeUs);
michael@0 977 return false;
michael@0 978 }
michael@0 979
michael@0 980 if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
michael@0 981 keyFrame = 0;
michael@0 982 }
michael@0 983
michael@0 984 char *data = reinterpret_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset();
michael@0 985 size_t length = mVideoBuffer->range_length();
michael@0 986
michael@0 987 if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame, aBufferCallback)) {
michael@0 988 return false;
michael@0 989 }
michael@0 990 }
michael@0 991 else if (err == INFO_FORMAT_CHANGED) {
michael@0 992 // If the format changed, update our cached info.
michael@0 993 LOG("mVideoSource INFO_FORMAT_CHANGED");
michael@0 994 if (!SetVideoFormat())
michael@0 995 return false;
michael@0 996 else
michael@0 997 return ReadVideo(aFrame, aSeekTimeUs, aBufferCallback);
michael@0 998 }
michael@0 999 else if (err == ERROR_END_OF_STREAM) {
michael@0 1000 LOG("mVideoSource END_OF_STREAM");
michael@0 1001 }
michael@0 1002 else if (err != OK) {
michael@0 1003 LOG("mVideoSource ERROR %#x", err);
michael@0 1004 }
michael@0 1005
michael@0 1006 return err == OK;
michael@0 1007 }
michael@0 1008
michael@0 1009 bool OmxDecoder::ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs)
michael@0 1010 {
michael@0 1011 MOZ_ASSERT(aSeekTimeUs >= -1);
michael@0 1012
michael@0 1013 status_t err;
michael@0 1014 if (mAudioMetadataRead && aSeekTimeUs == -1) {
michael@0 1015 // Use the data read into the buffer during metadata time
michael@0 1016 err = OK;
michael@0 1017 }
michael@0 1018 else {
michael@0 1019 ReleaseAudioBuffer();
michael@0 1020 if (aSeekTimeUs != -1) {
michael@0 1021 ReadOptions options;
michael@0 1022 options.setSeekTo(aSeekTimeUs);
michael@0 1023 err = mAudioSource->read(&mAudioBuffer, &options);
michael@0 1024 } else {
michael@0 1025 err = mAudioSource->read(&mAudioBuffer);
michael@0 1026 }
michael@0 1027 }
michael@0 1028 mAudioMetadataRead = false;
michael@0 1029
michael@0 1030 aSeekTimeUs = -1;
michael@0 1031
michael@0 1032 if (err == OK && mAudioBuffer->range_length() != 0) {
michael@0 1033 int64_t timeUs;
michael@0 1034 if (!mAudioBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) {
michael@0 1035 LOG("no frame time");
michael@0 1036 return false;
michael@0 1037 }
michael@0 1038
michael@0 1039 if (timeUs < 0) {
michael@0 1040 LOG("frame time %lld must be nonnegative", timeUs);
michael@0 1041 return false;
michael@0 1042 }
michael@0 1043
michael@0 1044 return ToAudioFrame(aFrame, timeUs,
michael@0 1045 mAudioBuffer->data(),
michael@0 1046 mAudioBuffer->range_offset(),
michael@0 1047 mAudioBuffer->range_length(),
michael@0 1048 mAudioChannels, mAudioSampleRate);
michael@0 1049 }
michael@0 1050 else if (err == INFO_FORMAT_CHANGED) {
michael@0 1051 // If the format changed, update our cached info.
michael@0 1052 LOG("mAudioSource INFO_FORMAT_CHANGED");
michael@0 1053 if (!SetAudioFormat())
michael@0 1054 return false;
michael@0 1055 else
michael@0 1056 return ReadAudio(aFrame, aSeekTimeUs);
michael@0 1057 }
michael@0 1058 else if (err == ERROR_END_OF_STREAM) {
michael@0 1059 LOG("mAudioSource END_OF_STREAM");
michael@0 1060 }
michael@0 1061 else if (err != OK) {
michael@0 1062 LOG("mAudioSource ERROR %#x", err);
michael@0 1063 }
michael@0 1064
michael@0 1065 return err == OK;
michael@0 1066 }
michael@0 1067
michael@0 1068 static OmxDecoder *cast(Decoder *decoder) {
michael@0 1069 return reinterpret_cast<OmxDecoder *>(decoder->mPrivate);
michael@0 1070 }
michael@0 1071
michael@0 1072 static void GetDuration(Decoder *aDecoder, int64_t *durationUs) {
michael@0 1073 cast(aDecoder)->GetDuration(durationUs);
michael@0 1074 }
michael@0 1075
michael@0 1076 static void GetVideoParameters(Decoder *aDecoder, int32_t *width, int32_t *height) {
michael@0 1077 cast(aDecoder)->GetVideoParameters(width, height);
michael@0 1078 }
michael@0 1079
michael@0 1080 static void GetAudioParameters(Decoder *aDecoder, int32_t *numChannels, int32_t *sampleRate) {
michael@0 1081 cast(aDecoder)->GetAudioParameters(numChannels, sampleRate);
michael@0 1082 }
michael@0 1083
michael@0 1084 static bool HasVideo(Decoder *aDecoder) {
michael@0 1085 return cast(aDecoder)->HasVideo();
michael@0 1086 }
michael@0 1087
michael@0 1088 static bool HasAudio(Decoder *aDecoder) {
michael@0 1089 return cast(aDecoder)->HasAudio();
michael@0 1090 }
michael@0 1091
michael@0 1092 static bool ReadVideo(Decoder *aDecoder, VideoFrame *aFrame, int64_t aSeekTimeUs, BufferCallback *aBufferCallback)
michael@0 1093 {
michael@0 1094 return cast(aDecoder)->ReadVideo(aFrame, aSeekTimeUs, aBufferCallback);
michael@0 1095 }
michael@0 1096
michael@0 1097 static bool ReadAudio(Decoder *aDecoder, AudioFrame *aFrame, int64_t aSeekTimeUs)
michael@0 1098 {
michael@0 1099 return cast(aDecoder)->ReadAudio(aFrame, aSeekTimeUs);
michael@0 1100 }
michael@0 1101
michael@0 1102 static void DestroyDecoder(Decoder *aDecoder)
michael@0 1103 {
michael@0 1104 if (aDecoder->mPrivate)
michael@0 1105 delete reinterpret_cast<OmxDecoder *>(aDecoder->mPrivate);
michael@0 1106 }
michael@0 1107
michael@0 1108 static bool Match(const char *aMimeChars, size_t aMimeLen, const char *aNeedle)
michael@0 1109 {
michael@0 1110 return !strncmp(aMimeChars, aNeedle, aMimeLen);
michael@0 1111 }
michael@0 1112
michael@0 1113 static const char* const gCodecs[] = {
michael@0 1114 "avc1.42E01E", // H.264 Constrained Baseline Profile Level 3.0
michael@0 1115 "avc1.42001E", // H.264 Baseline Profile Level 3.0
michael@0 1116 "avc1.42001F", // H.264 Baseline Profile Level 3.1
michael@0 1117 "avc1.4D401E", // H.264 Main Profile Level 3.0
michael@0 1118 "avc1.4D401F", // H.264 Main Profile Level 3.1
michael@0 1119 "mp4a.40.2", // AAC-LC
michael@0 1120 nullptr
michael@0 1121 };
michael@0 1122
michael@0 1123 static bool CanDecode(const char *aMimeChars, size_t aMimeLen, const char* const**aCodecs)
michael@0 1124 {
michael@0 1125 if (!Match(aMimeChars, aMimeLen, "video/mp4") &&
michael@0 1126 !Match(aMimeChars, aMimeLen, "audio/mp4") &&
michael@0 1127 !Match(aMimeChars, aMimeLen, "audio/mpeg") &&
michael@0 1128 !Match(aMimeChars, aMimeLen, "application/octet-stream")) { // file urls
michael@0 1129 return false;
michael@0 1130 }
michael@0 1131 *aCodecs = gCodecs;
michael@0 1132
michael@0 1133 return true;
michael@0 1134 }
michael@0 1135
michael@0 1136 static bool CreateDecoder(PluginHost *aPluginHost, Decoder *aDecoder, const char *aMimeChars, size_t aMimeLen)
michael@0 1137 {
michael@0 1138 OmxDecoder *omx = new OmxDecoder(aPluginHost, aDecoder);
michael@0 1139 if (!omx || !omx->Init()) {
michael@0 1140 if (omx)
michael@0 1141 delete omx;
michael@0 1142 return false;
michael@0 1143 }
michael@0 1144
michael@0 1145 aDecoder->mPrivate = omx;
michael@0 1146 aDecoder->GetDuration = GetDuration;
michael@0 1147 aDecoder->GetVideoParameters = GetVideoParameters;
michael@0 1148 aDecoder->GetAudioParameters = GetAudioParameters;
michael@0 1149 aDecoder->HasVideo = HasVideo;
michael@0 1150 aDecoder->HasAudio = HasAudio;
michael@0 1151 aDecoder->ReadVideo = ReadVideo;
michael@0 1152 aDecoder->ReadAudio = ReadAudio;
michael@0 1153 aDecoder->DestroyDecoder = DestroyDecoder;
michael@0 1154
michael@0 1155 return true;
michael@0 1156 }
michael@0 1157
michael@0 1158 } // namespace OmxPlugin
michael@0 1159
michael@0 1160 // Export the manifest so MPAPI can find our entry points.
michael@0 1161 Manifest MOZ_EXPORT MPAPI_MANIFEST = {
michael@0 1162 OmxPlugin::CanDecode,
michael@0 1163 OmxPlugin::CreateDecoder
michael@0 1164 };

mercurial