media/omx-plugin/OmxPlugin.cpp

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/media/omx-plugin/OmxPlugin.cpp	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,1164 @@
     1.4 +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
     1.5 +/* vim:set ts=2 sw=2 sts=2 et cindent: */
     1.6 +/* This Source Code Form is subject to the terms of the Mozilla Public
     1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this file,
     1.8 + * You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.9 +
    1.10 +#include <stagefright/ColorConverter.h>
    1.11 +#include <stagefright/DataSource.h>
    1.12 +#include <stagefright/MediaExtractor.h>
    1.13 +#include <stagefright/MetaData.h>
    1.14 +#include <stagefright/OMXCodec.h>
    1.15 +#include <media/stagefright/MediaErrors.h>
    1.16 +#ifdef MOZ_WIDGET_GONK
    1.17 +#include <OMX.h>
    1.18 +#else
    1.19 +#include <stagefright/OMXClient.h>
    1.20 +#endif
    1.21 +#include <algorithm>
    1.22 +
    1.23 +#include "mozilla/Assertions.h"
    1.24 +#include "mozilla/Types.h"
    1.25 +#include "MPAPI.h"
    1.26 +
    1.27 +#include "android/log.h"
    1.28 +
    1.29 +#define MAX_DECODER_NAME_LEN 256
    1.30 +#define AVC_MIME_TYPE "video/avc"
    1.31 +
    1.32 +#if !defined(MOZ_ANDROID_FROYO)
    1.33 +#define DEFAULT_STAGEFRIGHT_FLAGS OMXCodec::kClientNeedsFramebuffer
    1.34 +#else
    1.35 +#define DEFAULT_STAGEFRIGHT_FLAGS 0
    1.36 +#endif
    1.37 +
    1.38 +#undef LOG
    1.39 +#define LOG(args...)  __android_log_print(ANDROID_LOG_INFO, "OmxPlugin" , ## args)
    1.40 +
    1.41 +#if defined(MOZ_ANDROID_FROYO) || defined(MOZ_ANDROID_GB)
    1.42 +// Android versions 2.x.x have common API differences
    1.43 +#define MOZ_ANDROID_V2_X_X
    1.44 +#endif
    1.45 +
    1.46 +#if !defined(MOZ_ANDROID_V2_X_X) && !defined(MOZ_ANDROID_HC)
    1.47 +#define MOZ_ANDROID_V4_OR_ABOVE
    1.48 +#endif
    1.49 +
    1.50 +#if defined(MOZ_ANDROID_V4_OR_ABOVE)
    1.51 +#include <I420ColorConverter.h>
    1.52 +#endif
    1.53 +
    1.54 +using namespace MPAPI;
    1.55 +
    1.56 +#if !defined(MOZ_STAGEFRIGHT_OFF_T)
    1.57 +#define MOZ_STAGEFRIGHT_OFF_T off64_t
    1.58 +#endif
    1.59 +
    1.60 +using namespace android;
    1.61 +
    1.62 +namespace OmxPlugin {
    1.63 +
    1.64 +const int OMX_QCOM_COLOR_FormatYVU420PackedSemiPlanar32m4ka = 0x7FA30C01;
    1.65 +const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00;
    1.66 +const int OMX_TI_COLOR_FormatYUV420PackedSemiPlanar = 0x7F000100;
    1.67 +
    1.68 +class OmxDecoder {
    1.69 +  PluginHost *mPluginHost;
    1.70 +  Decoder *mDecoder;
    1.71 +  sp<MediaSource> mVideoTrack;
    1.72 +  sp<MediaSource> mVideoSource;
    1.73 +  sp<MediaSource> mAudioTrack;
    1.74 +  sp<MediaSource> mAudioSource;
    1.75 +  int32_t mVideoWidth;
    1.76 +  int32_t mVideoHeight;
    1.77 +  int32_t mVideoColorFormat;
    1.78 +  int32_t mVideoStride;
    1.79 +  int32_t mVideoSliceHeight;
    1.80 +  int32_t mVideoCropLeft;
    1.81 +  int32_t mVideoCropTop;
    1.82 +  int32_t mVideoCropRight;
    1.83 +  int32_t mVideoCropBottom;
    1.84 +  int32_t mVideoRotation;
    1.85 +  int32_t mAudioChannels;
    1.86 +  int32_t mAudioSampleRate;
    1.87 +  int64_t mDurationUs;
    1.88 +  MediaBuffer *mVideoBuffer;
    1.89 +  VideoFrame mVideoFrame;
    1.90 +  MediaBuffer *mAudioBuffer;
    1.91 +  AudioFrame mAudioFrame;
    1.92 +  ColorConverter *mColorConverter;
    1.93 +
    1.94 +  // 'true' if a read from the audio stream was done while reading the metadata
    1.95 +  bool mAudioMetadataRead;
    1.96 +
    1.97 +  void ReleaseVideoBuffer();
    1.98 +  void ReleaseAudioBuffer();
    1.99 +
   1.100 +  void ToVideoFrame_YUV420Planar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
   1.101 +  void ToVideoFrame_CbYCrY(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
   1.102 +  void ToVideoFrame_YUV420SemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
   1.103 +  void ToVideoFrame_YVU420SemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
   1.104 +  void ToVideoFrame_YUV420PackedSemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
   1.105 +  void ToVideoFrame_YVU420PackedSemiPlanar32m4ka(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
   1.106 +  bool ToVideoFrame_RGB565(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback);
   1.107 +  bool ToVideoFrame_ColorConverter(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback);
   1.108 +  bool ToVideoFrame_I420ColorConverter(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback);
   1.109 +  bool ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback);
   1.110 +  bool ToAudioFrame(AudioFrame *aFrame, int64_t aTimeUs, void *aData, size_t aDataOffset, size_t aSize,
   1.111 +                    int32_t aAudioChannels, int32_t aAudioSampleRate);
   1.112 +public:
   1.113 +  OmxDecoder(PluginHost *aPluginHost, Decoder *aDecoder);
   1.114 +  ~OmxDecoder();
   1.115 +
   1.116 +  bool Init();
   1.117 +  bool SetVideoFormat();
   1.118 +  bool SetAudioFormat();
   1.119 +
   1.120 +  void GetDuration(int64_t *durationUs) {
   1.121 +    *durationUs = mDurationUs;
   1.122 +  }
   1.123 +
   1.124 +  void GetVideoParameters(int32_t *width, int32_t *height) {
   1.125 +    *width = mVideoWidth;
   1.126 +    *height = mVideoHeight;
   1.127 +  }
   1.128 +
   1.129 +  void GetAudioParameters(int32_t *numChannels, int32_t *sampleRate) {
   1.130 +    *numChannels = mAudioChannels;
   1.131 +    *sampleRate = mAudioSampleRate;
   1.132 +  }
   1.133 +
   1.134 +  bool HasVideo() {
   1.135 +    return mVideoSource != nullptr;
   1.136 +  }
   1.137 +
   1.138 +  bool HasAudio() {
   1.139 +    return mAudioSource != nullptr;
   1.140 +  }
   1.141 +
   1.142 +  bool ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs, BufferCallback *aBufferCallback);
   1.143 +  bool ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs);
   1.144 +};
   1.145 +
   1.146 +#if !defined(MOZ_WIDGET_GONK)
   1.147 +static class OmxClientInstance {
   1.148 +public:
   1.149 +  OmxClientInstance()
   1.150 +    : mClient(new OMXClient())
   1.151 +    , mStatus(mClient->connect())
   1.152 +  {
   1.153 +  }
   1.154 +
   1.155 +  status_t IsValid()
   1.156 +  {
   1.157 +    return mStatus == OK;
   1.158 +  }
   1.159 +
   1.160 +  OMXClient *get()
   1.161 +  {
   1.162 +    return mClient;
   1.163 +  }
   1.164 +
   1.165 +  ~OmxClientInstance()
   1.166 +  {
   1.167 +    if (mStatus == OK) {
   1.168 +      mClient->disconnect();
   1.169 +    }
   1.170 +    delete mClient;
   1.171 +  }
   1.172 +
   1.173 +private:
   1.174 +  OMXClient *mClient;
   1.175 +  status_t mStatus;
   1.176 +} sClientInstance;
   1.177 +#endif
   1.178 +
   1.179 +OmxDecoder::OmxDecoder(PluginHost *aPluginHost, Decoder *aDecoder) :
   1.180 +  mPluginHost(aPluginHost),
   1.181 +  mDecoder(aDecoder),
   1.182 +  mVideoWidth(0),
   1.183 +  mVideoHeight(0),
   1.184 +  mVideoColorFormat(0),
   1.185 +  mVideoStride(0),
   1.186 +  mVideoSliceHeight(0),
   1.187 +  mVideoCropLeft(0),
   1.188 +  mVideoCropTop(0),
   1.189 +  mVideoCropRight(0),
   1.190 +  mVideoCropBottom(0),
   1.191 +  mVideoRotation(0),
   1.192 +  mAudioChannels(-1),
   1.193 +  mAudioSampleRate(-1),
   1.194 +  mDurationUs(-1),
   1.195 +  mVideoBuffer(nullptr),
   1.196 +  mAudioBuffer(nullptr),
   1.197 +  mColorConverter(nullptr),
   1.198 +  mAudioMetadataRead(false)
   1.199 +{
   1.200 +}
   1.201 +
   1.202 +OmxDecoder::~OmxDecoder()
   1.203 +{
   1.204 +  ReleaseVideoBuffer();
   1.205 +  ReleaseAudioBuffer();
   1.206 +
   1.207 +  if (mVideoSource.get()) {
   1.208 +    mVideoSource->stop();
   1.209 +  }
   1.210 +
   1.211 +  if (mAudioSource.get()) {
   1.212 +    mAudioSource->stop();
   1.213 +  }
   1.214 +
   1.215 +#ifndef MOZ_ANDROID_HC
   1.216 +  if (mColorConverter) {
   1.217 +    delete mColorConverter;
   1.218 +  }
   1.219 +#endif
   1.220 +}
   1.221 +
   1.222 +class AutoStopMediaSource {
   1.223 +  sp<MediaSource> mMediaSource;
   1.224 +public:
   1.225 +  AutoStopMediaSource(sp<MediaSource> aMediaSource) : mMediaSource(aMediaSource) {
   1.226 +  }
   1.227 +
   1.228 +  ~AutoStopMediaSource() {
   1.229 +    mMediaSource->stop();
   1.230 +  }
   1.231 +};
   1.232 +
   1.233 +#ifdef MOZ_WIDGET_GONK
   1.234 +static sp<IOMX> sOMX = nullptr;
   1.235 +static sp<IOMX> GetOMX() {
   1.236 +  if(sOMX.get() == nullptr) {
   1.237 +    sOMX = reinterpret_cast<IOMX*>(new OMX);
   1.238 +  }
   1.239 +  return sOMX;
   1.240 +}
   1.241 +#endif
   1.242 +
   1.243 +static uint32_t
   1.244 +GetDefaultStagefrightFlags(PluginHost *aPluginHost)
   1.245 +{
   1.246 +  uint32_t flags = DEFAULT_STAGEFRIGHT_FLAGS;
   1.247 +
   1.248 +#if !defined(MOZ_ANDROID_FROYO)
   1.249 +
   1.250 +  char hardware[256] = "";
   1.251 +  aPluginHost->GetSystemInfoString("hardware", hardware, sizeof(hardware));
   1.252 +
   1.253 +  if (!strcmp("qcom", hardware)) {
   1.254 +    // Qualcomm's OMXCodec implementation interprets this flag to mean that we
   1.255 +    // only want a thumbnail and therefore only need one frame. After the first
   1.256 +    // frame it returns EOS.
   1.257 +    // All other OMXCodec implementations seen so far interpret this flag
   1.258 +    // sanely; some do not return full framebuffers unless this flag is passed.
   1.259 +    flags &= ~OMXCodec::kClientNeedsFramebuffer;
   1.260 +  }
   1.261 +
   1.262 +  LOG("Hardware %s; using default flags %#x\n", hardware, flags);
   1.263 +
   1.264 +#endif
   1.265 +
   1.266 +  return flags;
   1.267 +}
   1.268 +
   1.269 +static uint32_t GetVideoCreationFlags(PluginHost* aPluginHost)
   1.270 +{
   1.271 +#ifdef MOZ_WIDGET_GONK
   1.272 +  // Flag value of zero means return a hardware or software decoder
   1.273 +  // depending on what the device supports.
   1.274 +  return 0;
   1.275 +#else
   1.276 +  // Check whether the user has set a pref to override our default OMXCodec
   1.277 +  // CreationFlags flags. This is useful for A/B testing hardware and software
   1.278 +  // decoders for performance and bugs. The interesting flag values are:
   1.279 +  //  0 = Let Stagefright choose hardware or software decoding (default)
   1.280 +  //  8 = Force software decoding
   1.281 +  // 16 = Force hardware decoding
   1.282 +  int32_t flags = 0;
   1.283 +  aPluginHost->GetIntPref("media.stagefright.omxcodec.flags", &flags);
   1.284 +  if (flags != 0) {
   1.285 +#if !defined(MOZ_ANDROID_V2_X_X)
   1.286 +    LOG("media.stagefright.omxcodec.flags=%d", flags);
   1.287 +    if ((flags & OMXCodec::kHardwareCodecsOnly) != 0) {
   1.288 +      LOG("FORCE HARDWARE DECODING");
   1.289 +    } else if ((flags & OMXCodec::kSoftwareCodecsOnly) != 0) {
   1.290 +      LOG("FORCE SOFTWARE DECODING");
   1.291 +    }
   1.292 +#endif
   1.293 +  }
   1.294 +
   1.295 +  flags |= GetDefaultStagefrightFlags(aPluginHost);
   1.296 +
   1.297 +  return static_cast<uint32_t>(flags);
   1.298 +#endif
   1.299 +}
   1.300 +
   1.301 +enum ColorFormatSupport {
   1.302 +  ColorFormatNotSupported = 0,
   1.303 +  ColorFormatSupportOK,
   1.304 +  ColorFormatSupportPreferred,
   1.305 +};
   1.306 +
   1.307 +static ColorFormatSupport
   1.308 +IsColorFormatSupported(OMX_COLOR_FORMATTYPE aColorFormat)
   1.309 +{
   1.310 +  switch (static_cast<int>(aColorFormat)) {
   1.311 +    case OMX_COLOR_FormatCbYCrY:
   1.312 +    case OMX_COLOR_FormatYUV420Planar:
   1.313 +    case OMX_COLOR_FormatYUV420SemiPlanar:
   1.314 +    case OMX_QCOM_COLOR_FormatYVU420PackedSemiPlanar32m4ka:
   1.315 +    case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
   1.316 +    case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
   1.317 +      LOG("Colour format %#x supported natively.", aColorFormat);
   1.318 +      // Prefer natively supported colour formats over formats that need another
   1.319 +      // slow software conversion.
   1.320 +      return ColorFormatSupportPreferred;
   1.321 +    default:
   1.322 +      break;
   1.323 +  }
   1.324 +
   1.325 +  // These formats are okay if we can't find a better one; Android provides a
   1.326 +  // software conversion to a sane colour format.
   1.327 +#if !defined(MOZ_ANDROID_HC)
   1.328 +  if (ColorConverter(aColorFormat, OMX_COLOR_Format16bitRGB565).isValid()) {
   1.329 +    LOG("Colour format %#x supported by Android ColorConverter.", aColorFormat);
   1.330 +    return ColorFormatSupportOK;
   1.331 +  }
   1.332 +#endif
   1.333 +
   1.334 +#if defined(MOZ_ANDROID_V4_OR_ABOVE)
   1.335 +  I420ColorConverter yuvConverter;
   1.336 +
   1.337 +  if (yuvConverter.isLoaded() &&
   1.338 +      yuvConverter.getDecoderOutputFormat() == aColorFormat) {
   1.339 +    LOG("Colour format %#x supported by Android I420ColorConverter.", aColorFormat);
   1.340 +    return ColorFormatSupportOK;
   1.341 +  }
   1.342 +#endif
   1.343 +
   1.344 +  return ColorFormatNotSupported;
   1.345 +}
   1.346 +
   1.347 +#if defined(MOZ_ANDROID_KK)
   1.348 +/**
   1.349 + * Look for a decoder that supports a colour format that we support.
   1.350 + */
   1.351 +static bool
   1.352 +FindPreferredDecoderAndColorFormat(const sp<IOMX>& aOmx,
   1.353 +                                   char *aDecoderName,
   1.354 +                                   size_t aDecoderLen,
   1.355 +                                   OMX_COLOR_FORMATTYPE *aColorFormat)
   1.356 +{
   1.357 +  Vector<CodecCapabilities> codecs;
   1.358 +
   1.359 +  // Get all AVC decoder/colour format pairs that this device supports.
   1.360 +  QueryCodecs(aOmx, AVC_MIME_TYPE, true /* queryDecoders */, &codecs);
   1.361 +
   1.362 +  // We assume that faster (hardware accelerated) decoders come first in the
   1.363 +  // list, so we choose the first decoder with a colour format we can use.
   1.364 +  for (uint32_t i = 0; i < codecs.size(); i++) {
   1.365 +    const CodecCapabilities &caps = codecs[i];
   1.366 +    const Vector<OMX_U32> &colors = caps.mColorFormats;
   1.367 +
   1.368 +    bool found = false;
   1.369 +    for (uint32_t j = 0; j < colors.size(); j++) {
   1.370 +      OMX_COLOR_FORMATTYPE color = (OMX_COLOR_FORMATTYPE)colors[j];
   1.371 +
   1.372 +      LOG("Decoder %s can output colour format %#x.\n",
   1.373 +          caps.mComponentName.string(), color);
   1.374 +
   1.375 +      ColorFormatSupport supported = IsColorFormatSupported(color);
   1.376 +
   1.377 +      if (supported) {
   1.378 +        strncpy(aDecoderName, caps.mComponentName.string(), aDecoderLen);
   1.379 +        *aColorFormat = color;
   1.380 +        found = true;
   1.381 +      }
   1.382 +
   1.383 +      if (supported == ColorFormatSupportPreferred) {
   1.384 +        // The colour format is natively supported -- that's as good as we're
   1.385 +        // going to get.
   1.386 +        break;
   1.387 +      }
   1.388 +    }
   1.389 +
   1.390 +    if (found) {
   1.391 +      return true;
   1.392 +    }
   1.393 +  }
   1.394 +
   1.395 +  return false;
   1.396 +}
   1.397 +#endif
   1.398 +
   1.399 +static sp<MediaSource> CreateVideoSource(PluginHost* aPluginHost,
   1.400 +                                         const sp<IOMX>& aOmx,
   1.401 +                                         const sp<MediaSource>& aVideoTrack)
   1.402 +{
   1.403 +  uint32_t flags = GetVideoCreationFlags(aPluginHost);
   1.404 +
   1.405 +  char decoderName[MAX_DECODER_NAME_LEN] = "";
   1.406 +  sp<MetaData> videoFormat = aVideoTrack->getFormat();
   1.407 +
   1.408 +#if defined(MOZ_ANDROID_KK)
   1.409 +  OMX_COLOR_FORMATTYPE colorFormat = (OMX_COLOR_FORMATTYPE)0;
   1.410 +  if (FindPreferredDecoderAndColorFormat(aOmx,
   1.411 +                                         decoderName, sizeof(decoderName),
   1.412 +                                         &colorFormat)) {
   1.413 +    // We found a colour format that we can handle. Tell OMXCodec to use it in
   1.414 +    // case it isn't the default.
   1.415 +    videoFormat->setInt32(kKeyColorFormat, colorFormat);
   1.416 +
   1.417 +    LOG("Found compatible decoder %s with colour format %#x.\n",
   1.418 +        decoderName, colorFormat);
   1.419 +  }
   1.420 +#endif
   1.421 +
   1.422 +  if (flags == DEFAULT_STAGEFRIGHT_FLAGS) {
   1.423 +    // Let Stagefright choose hardware or software decoder.
   1.424 +    sp<MediaSource> videoSource = OMXCodec::Create(aOmx, videoFormat,
   1.425 +                                                   false, aVideoTrack,
   1.426 +                                                   decoderName[0] ? decoderName : nullptr,
   1.427 +                                                   flags);
   1.428 +    if (videoSource == nullptr)
   1.429 +      return nullptr;
   1.430 +
   1.431 +    // Now that OMXCodec has parsed the video's AVCDecoderConfigurationRecord,
   1.432 +    // check whether we know how to decode this video.
   1.433 +    int32_t videoColorFormat;
   1.434 +    if (videoSource->getFormat()->findInt32(kKeyColorFormat, &videoColorFormat)) {
   1.435 +
   1.436 +      if (IsColorFormatSupported((OMX_COLOR_FORMATTYPE)videoColorFormat)) {
   1.437 +        return videoSource;
   1.438 +      }
   1.439 +
   1.440 +      // We need to implement a ToVideoFrame_*() color conversion
   1.441 +      // function for this video color format.
   1.442 +      LOG("Unknown video color format: %#x", videoColorFormat);
   1.443 +    } else {
   1.444 +      LOG("Video color format not found");
   1.445 +    }
   1.446 +
   1.447 +    // Throw away the videoSource and try again with new flags.
   1.448 +    LOG("Falling back to software decoder");
   1.449 +    videoSource.clear();
   1.450 +#if defined(MOZ_ANDROID_V2_X_X)
   1.451 +    flags = DEFAULT_STAGEFRIGHT_FLAGS | OMXCodec::kPreferSoftwareCodecs;
   1.452 +#else
   1.453 +    flags = DEFAULT_STAGEFRIGHT_FLAGS | OMXCodec::kSoftwareCodecsOnly;
   1.454 +#endif
   1.455 +  }
   1.456 +
   1.457 +  MOZ_ASSERT(flags != DEFAULT_STAGEFRIGHT_FLAGS);
   1.458 +  return OMXCodec::Create(aOmx, aVideoTrack->getFormat(), false, aVideoTrack,
   1.459 +                          nullptr, flags);
   1.460 +}
   1.461 +
   1.462 +bool OmxDecoder::Init()
   1.463 +{
   1.464 +#if defined(MOZ_WIDGET_ANDROID)
   1.465 +  // OMXClient::connect() always returns OK and aborts fatally if
   1.466 +  // it can't connect. We may need to implement the connect functionality
   1.467 +  // ourselves if this proves to be an issue.
   1.468 +  if (!sClientInstance.IsValid()) {
   1.469 +    LOG("OMXClient failed to connect");
   1.470 +    return false;
   1.471 +  }
   1.472 +#endif
   1.473 +
   1.474 +  //register sniffers, if they are not registered in this process.
   1.475 +  DataSource::RegisterDefaultSniffers();
   1.476 +
   1.477 +  sp<DataSource> dataSource =
   1.478 +    DataSource::CreateFromURI(static_cast<char*>(mDecoder->mResource));
   1.479 +  if (!dataSource.get() || dataSource->initCheck()) {
   1.480 +    return false;
   1.481 +  }
   1.482 +
   1.483 +  sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
   1.484 +  if (extractor == nullptr) {
   1.485 +    return false;
   1.486 +  }
   1.487 +
   1.488 +  ssize_t audioTrackIndex = -1;
   1.489 +  ssize_t videoTrackIndex = -1;
   1.490 +  const char *audioMime = nullptr;
   1.491 +  const char *videoMime = nullptr;
   1.492 +
   1.493 +  for (size_t i = 0; i < extractor->countTracks(); ++i) {
   1.494 +    sp<MetaData> meta = extractor->getTrackMetaData(i);
   1.495 +
   1.496 +    const char *mime;
   1.497 +    if (!meta->findCString(kKeyMIMEType, &mime)) {
   1.498 +      continue;
   1.499 +    }
   1.500 +
   1.501 +    if (videoTrackIndex == -1 && !strncasecmp(mime, "video/", 6)) {
   1.502 +      videoTrackIndex = i;
   1.503 +      videoMime = mime;
   1.504 +    } else if (audioTrackIndex == -1 && !strncasecmp(mime, "audio/", 6)) {
   1.505 +      audioTrackIndex = i;
   1.506 +      audioMime = mime;
   1.507 +    }
   1.508 +  }
   1.509 +
   1.510 +  if (videoTrackIndex == -1 && audioTrackIndex == -1) {
   1.511 +    return false;
   1.512 +  }
   1.513 +
   1.514 +  int64_t totalDurationUs = 0;
   1.515 +
   1.516 +#ifdef MOZ_WIDGET_GONK
   1.517 +  sp<IOMX> omx = GetOMX();
   1.518 +#else
   1.519 +  sp<IOMX> omx = sClientInstance.get()->interface();
   1.520 +#endif
   1.521 +
   1.522 +  sp<MediaSource> videoTrack;
   1.523 +  sp<MediaSource> videoSource;
   1.524 +  if (videoTrackIndex != -1 && (videoTrack = extractor->getTrack(videoTrackIndex)) != nullptr) {
   1.525 +#if defined(MOZ_ANDROID_FROYO)
   1.526 +    // Allow up to 720P video.
   1.527 +    sp<MetaData> meta = extractor->getTrackMetaData(videoTrackIndex);
   1.528 +    meta->setInt32(kKeyMaxInputSize, (1280 * 720 * 3) / 2);
   1.529 +#endif
   1.530 +    videoSource = CreateVideoSource(mPluginHost, omx, videoTrack);
   1.531 +    if (videoSource == nullptr) {
   1.532 +      LOG("OMXCodec failed to initialize video decoder for \"%s\"", videoMime);
   1.533 +      return false;
   1.534 +    }
   1.535 +    status_t status = videoSource->start();
   1.536 +    if (status != OK) {
   1.537 +      LOG("videoSource->start() failed with status %#x", status);
   1.538 +      return false;
   1.539 +    }
   1.540 +    int64_t durationUs;
   1.541 +    if (videoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
   1.542 +      if (durationUs < 0)
   1.543 +        LOG("video duration %lld should be nonnegative", durationUs);
   1.544 +      if (durationUs > totalDurationUs)
   1.545 +        totalDurationUs = durationUs;
   1.546 +    }
   1.547 +  }
   1.548 +
   1.549 +  sp<MediaSource> audioTrack;
   1.550 +  sp<MediaSource> audioSource;
   1.551 +  if (audioTrackIndex != -1 && (audioTrack = extractor->getTrack(audioTrackIndex)) != nullptr)
   1.552 +  {
   1.553 +    if (!strcasecmp(audioMime, "audio/raw")) {
   1.554 +      audioSource = audioTrack;
   1.555 +    } else {
   1.556 +      audioSource = OMXCodec::Create(omx,
   1.557 +                                     audioTrack->getFormat(),
   1.558 +                                     false, // decoder
   1.559 +                                     audioTrack);
   1.560 +    }
   1.561 +
   1.562 +    if (audioSource == nullptr) {
   1.563 +      LOG("OMXCodec failed to initialize audio decoder for \"%s\"", audioMime);
   1.564 +      return false;
   1.565 +    }
   1.566 +
   1.567 +    status_t status = audioSource->start();
   1.568 +    if (status != OK) {
   1.569 +      LOG("audioSource->start() failed with status %#x", status);
   1.570 +      return false;
   1.571 +    }
   1.572 +
   1.573 +    int64_t durationUs;
   1.574 +    if (audioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
   1.575 +      if (durationUs < 0)
   1.576 +        LOG("audio duration %lld should be nonnegative", durationUs);
   1.577 +      if (durationUs > totalDurationUs)
   1.578 +        totalDurationUs = durationUs;
   1.579 +    }
   1.580 +  }
   1.581 +
   1.582 +  // set decoder state
   1.583 +  mVideoTrack = videoTrack;
   1.584 +  mVideoSource = videoSource;
   1.585 +  mAudioTrack = audioTrack;
   1.586 +  mAudioSource = audioSource;
   1.587 +  mDurationUs = totalDurationUs;
   1.588 +
   1.589 +  if (mVideoSource.get() && !SetVideoFormat())
   1.590 +    return false;
   1.591 +
   1.592 +  // To reliably get the channel and sample rate data we need to read from the
   1.593 +  // audio source until we get a INFO_FORMAT_CHANGE status
   1.594 +  if (mAudioSource.get()) {
   1.595 +    if (mAudioSource->read(&mAudioBuffer) != INFO_FORMAT_CHANGED) {
   1.596 +      sp<MetaData> meta = mAudioSource->getFormat();
   1.597 +      if (!meta->findInt32(kKeyChannelCount, &mAudioChannels) ||
   1.598 +          !meta->findInt32(kKeySampleRate, &mAudioSampleRate)) {
   1.599 +        return false;
   1.600 +      }
   1.601 +      mAudioMetadataRead = true;
   1.602 +
   1.603 +      if (mAudioChannels < 0) {
   1.604 +        LOG("audio channel count %d must be nonnegative", mAudioChannels);
   1.605 +        return false;
   1.606 +      }
   1.607 +
   1.608 +      if (mAudioSampleRate < 0) {
   1.609 +        LOG("audio sample rate %d must be nonnegative", mAudioSampleRate);
   1.610 +        return false;
   1.611 +      }
   1.612 +    }
   1.613 +    else if (!SetAudioFormat()) {
   1.614 +        return false;
   1.615 +    }
   1.616 +  }
   1.617 +  return true;
   1.618 +}
   1.619 +
   1.620 +bool OmxDecoder::SetVideoFormat() {
   1.621 +  sp<MetaData> format = mVideoSource->getFormat();
   1.622 +
   1.623 +  // Stagefright's kKeyWidth and kKeyHeight are what MPAPI calls stride and
   1.624 +  // slice height. Stagefright only seems to use its kKeyStride and
   1.625 +  // kKeySliceHeight to initialize camera video formats.
   1.626 +
   1.627 +#if defined(DEBUG) && !defined(MOZ_ANDROID_FROYO)
   1.628 +  int32_t unexpected;
   1.629 +  if (format->findInt32(kKeyStride, &unexpected))
   1.630 +    LOG("Expected kKeyWidth, but found kKeyStride %d", unexpected);
   1.631 +  if (format->findInt32(kKeySliceHeight, &unexpected))
   1.632 +    LOG("Expected kKeyHeight, but found kKeySliceHeight %d", unexpected);
   1.633 +#endif // DEBUG
   1.634 +
   1.635 +  const char *componentName;
   1.636 +
   1.637 +  if (!format->findInt32(kKeyWidth, &mVideoStride) ||
   1.638 +      !format->findInt32(kKeyHeight, &mVideoSliceHeight) ||
   1.639 +      !format->findCString(kKeyDecoderComponent, &componentName) ||
   1.640 +      !format->findInt32(kKeyColorFormat, &mVideoColorFormat) ) {
   1.641 +    return false;
   1.642 +  }
   1.643 +
   1.644 +  if (mVideoStride <= 0) {
   1.645 +    LOG("stride %d must be positive", mVideoStride);
   1.646 +    return false;
   1.647 +  }
   1.648 +
   1.649 +  if (mVideoSliceHeight <= 0) {
   1.650 +    LOG("slice height %d must be positive", mVideoSliceHeight);
   1.651 +    return false;
   1.652 +  }
   1.653 +
   1.654 +  // Gingerbread does not support the kKeyCropRect key
   1.655 +#if !defined(MOZ_ANDROID_V2_X_X)
   1.656 +  if (!format->findRect(kKeyCropRect, &mVideoCropLeft, &mVideoCropTop,
   1.657 +                                      &mVideoCropRight, &mVideoCropBottom)) {
   1.658 +#endif
   1.659 +    mVideoCropLeft = 0;
   1.660 +    mVideoCropTop = 0;
   1.661 +    mVideoCropRight = mVideoStride - 1;
   1.662 +    mVideoCropBottom = mVideoSliceHeight - 1;
   1.663 +    LOG("crop rect not available, assuming no cropping");
   1.664 +#if !defined(MOZ_ANDROID_V2_X_X)
   1.665 +  }
   1.666 +#endif
   1.667 +
   1.668 +  if (mVideoCropLeft < 0 || mVideoCropLeft >= mVideoCropRight || mVideoCropRight >= mVideoStride ||
   1.669 +      mVideoCropTop < 0 || mVideoCropTop >= mVideoCropBottom || mVideoCropBottom >= mVideoSliceHeight) {
   1.670 +    LOG("invalid crop rect %d,%d-%d,%d", mVideoCropLeft, mVideoCropTop, mVideoCropRight, mVideoCropBottom);
   1.671 +    return false;
   1.672 +  }
   1.673 +
   1.674 +  mVideoWidth = mVideoCropRight - mVideoCropLeft + 1;
   1.675 +  mVideoHeight = mVideoCropBottom - mVideoCropTop + 1;
   1.676 +  MOZ_ASSERT(mVideoWidth > 0 && mVideoWidth <= mVideoStride);
   1.677 +  MOZ_ASSERT(mVideoHeight > 0 && mVideoHeight <= mVideoSliceHeight);
   1.678 +
   1.679 +#if !defined(MOZ_ANDROID_FROYO)
   1.680 +  if (!format->findInt32(kKeyRotation, &mVideoRotation)) {
   1.681 +#endif
   1.682 +    mVideoRotation = 0;
   1.683 +#if !defined(MOZ_ANDROID_FROYO)
   1.684 +    LOG("rotation not available, assuming 0");
   1.685 +  }
   1.686 +#endif
   1.687 +
   1.688 +  if (mVideoRotation != 0 && mVideoRotation != 90 &&
   1.689 +      mVideoRotation != 180 && mVideoRotation != 270) {
   1.690 +    LOG("invalid rotation %d, assuming 0", mVideoRotation);
   1.691 +  }
   1.692 +
   1.693 +  LOG("width: %d height: %d component: %s format: %#x stride: %d sliceHeight: %d rotation: %d crop: %d,%d-%d,%d",
   1.694 +      mVideoWidth, mVideoHeight, componentName, mVideoColorFormat,
   1.695 +      mVideoStride, mVideoSliceHeight, mVideoRotation,
   1.696 +      mVideoCropLeft, mVideoCropTop, mVideoCropRight, mVideoCropBottom);
   1.697 +
   1.698 +  return true;
   1.699 +}
   1.700 +
   1.701 +bool OmxDecoder::SetAudioFormat() {
   1.702 +  // If the format changed, update our cached info.
   1.703 +  if (!mAudioSource->getFormat()->findInt32(kKeyChannelCount, &mAudioChannels) ||
   1.704 +      !mAudioSource->getFormat()->findInt32(kKeySampleRate, &mAudioSampleRate)) {
   1.705 +    return false;
   1.706 +  }
   1.707 +
   1.708 +  LOG("channelCount: %d sampleRate: %d", mAudioChannels, mAudioSampleRate);
   1.709 +
   1.710 +  if (mAudioChannels < 0) {
   1.711 +    LOG("audio channel count %d must be nonnegative", mAudioChannels);
   1.712 +    return false;
   1.713 +  }
   1.714 +
   1.715 +  if (mAudioSampleRate < 0) {
   1.716 +    LOG("audio sample rate %d must be nonnegative", mAudioSampleRate);
   1.717 +    return false;
   1.718 +  }
   1.719 +
   1.720 +  return true;
   1.721 +}
   1.722 +
   1.723 +void OmxDecoder::ReleaseVideoBuffer() {
   1.724 +  if (mVideoBuffer) {
   1.725 +    mVideoBuffer->release();
   1.726 +    mVideoBuffer = nullptr;
   1.727 +  }
   1.728 +}
   1.729 +
   1.730 +void OmxDecoder::ReleaseAudioBuffer() {
   1.731 +  if (mAudioBuffer) {
   1.732 +    mAudioBuffer->release();
   1.733 +    mAudioBuffer = nullptr;
   1.734 +  }
   1.735 +}
   1.736 +
   1.737 +void OmxDecoder::ToVideoFrame_YUV420Planar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
   1.738 +  void *y = aData;
   1.739 +  void *u = static_cast<uint8_t *>(y) + mVideoStride * mVideoSliceHeight;
   1.740 +  void *v = static_cast<uint8_t *>(u) + mVideoStride/2 * mVideoSliceHeight/2;
   1.741 +  aFrame->Set(aTimeUs, aKeyFrame,
   1.742 +              aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation,
   1.743 +              y, mVideoStride, mVideoWidth, mVideoHeight, 0, 0,
   1.744 +              u, mVideoStride/2, mVideoWidth/2, mVideoHeight/2, 0, 0,
   1.745 +              v, mVideoStride/2, mVideoWidth/2, mVideoHeight/2, 0, 0);
   1.746 +}
   1.747 +
   1.748 +void OmxDecoder::ToVideoFrame_CbYCrY(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
   1.749 +  aFrame->Set(aTimeUs, aKeyFrame,
   1.750 +              aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation,
   1.751 +              aData, mVideoStride, mVideoWidth, mVideoHeight, 1, 1,
   1.752 +              aData, mVideoStride, mVideoWidth/2, mVideoHeight/2, 0, 3,
   1.753 +              aData, mVideoStride, mVideoWidth/2, mVideoHeight/2, 2, 3);
   1.754 +}
   1.755 +
   1.756 +void OmxDecoder::ToVideoFrame_YUV420SemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
   1.757 +  int32_t videoStride = mVideoStride;
   1.758 +  int32_t videoSliceHeight = mVideoSliceHeight;
   1.759 +
   1.760 +  // OMX.SEC.avcdec rounds mVideoStride and mVideoSliceHeight up to the nearest
   1.761 +  // multiple of 16 but the data itself is too small to fit. What we do is check
   1.762 +  // to see if the video size patches the raw width and height. If so we can
   1.763 +  // use those figures instead.
   1.764 +
   1.765 +  if (static_cast<int>(aSize) == mVideoWidth * mVideoHeight * 3 / 2) {
   1.766 +    videoStride = mVideoWidth;
   1.767 +    videoSliceHeight = mVideoHeight;
   1.768 +  }
   1.769 +
   1.770 +  void *y = aData;
   1.771 +  void *uv = static_cast<uint8_t *>(y) + (videoStride * videoSliceHeight);
   1.772 +  aFrame->Set(aTimeUs, aKeyFrame,
   1.773 +              aData, aSize, videoStride, videoSliceHeight, mVideoRotation,
   1.774 +              y, videoStride, mVideoWidth, mVideoHeight, 0, 0,
   1.775 +              uv, videoStride, mVideoWidth/2, mVideoHeight/2, 0, 1,
   1.776 +              uv, videoStride, mVideoWidth/2, mVideoHeight/2, 1, 1);
   1.777 +}
   1.778 +
   1.779 +void OmxDecoder::ToVideoFrame_YVU420SemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
   1.780 +  ToVideoFrame_YUV420SemiPlanar(aFrame, aTimeUs, aData, aSize, aKeyFrame);
   1.781 +  aFrame->Cb.mOffset = 1;
   1.782 +  aFrame->Cr.mOffset = 0;
   1.783 +}
   1.784 +
   1.785 +void OmxDecoder::ToVideoFrame_YUV420PackedSemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
   1.786 +  void *y = aData;
   1.787 +  void *uv = static_cast<uint8_t *>(y) + mVideoStride * (mVideoSliceHeight - mVideoCropTop/2);
   1.788 +  aFrame->Set(aTimeUs, aKeyFrame,
   1.789 +              aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation,
   1.790 +              y, mVideoStride, mVideoWidth, mVideoHeight, 0, 0,
   1.791 +              uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 0, 1,
   1.792 +              uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 1, 1);
   1.793 +}
   1.794 +
   1.795 +void OmxDecoder::ToVideoFrame_YVU420PackedSemiPlanar32m4ka(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
   1.796 +  size_t roundedSliceHeight = (mVideoSliceHeight + 31) & ~31;
   1.797 +  size_t roundedStride = (mVideoStride + 31) & ~31;
   1.798 +  void *y = aData;
   1.799 +  void *uv = static_cast<uint8_t *>(y) + (roundedStride * roundedSliceHeight);
   1.800 +  aFrame->Set(aTimeUs, aKeyFrame,
   1.801 +              aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation,
   1.802 +              y, mVideoStride, mVideoWidth, mVideoHeight, 0, 0,
   1.803 +              uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 1, 1,
   1.804 +              uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 0, 1);
   1.805 +}
   1.806 +
   1.807 +bool OmxDecoder::ToVideoFrame_RGB565(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback) {
   1.808 +  void *buffer = (*aBufferCallback)(mVideoWidth, mVideoHeight, MPAPI::RGB565);
   1.809 +
   1.810 +  if (!buffer) {
   1.811 +    return false;
   1.812 +  }
   1.813 +
   1.814 +  aFrame->mTimeUs = aTimeUs;
   1.815 +
   1.816 +  memcpy(buffer, aData, mVideoWidth * mVideoHeight * 2);
   1.817 +
   1.818 +  aFrame->mSize = mVideoWidth * mVideoHeight * 2;
   1.819 +
   1.820 +  return true;
   1.821 +}
   1.822 +
   1.823 +bool OmxDecoder::ToVideoFrame_ColorConverter(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback) {
   1.824 +#ifdef MOZ_ANDROID_HC
   1.825 +  return false;
   1.826 +#else
   1.827 +  if (!mColorConverter) {
   1.828 +    mColorConverter = new ColorConverter((OMX_COLOR_FORMATTYPE)mVideoColorFormat,
   1.829 +                                         OMX_COLOR_Format16bitRGB565);
   1.830 +  }
   1.831 +
   1.832 +  if (!mColorConverter->isValid()) {
   1.833 +    return false;
   1.834 +  }
   1.835 +
   1.836 +  aFrame->mTimeUs = aTimeUs;
   1.837 +
   1.838 +  void *buffer = (*aBufferCallback)(mVideoWidth, mVideoHeight, MPAPI::RGB565);
   1.839 +
   1.840 +  if (!buffer) {
   1.841 +    return false;
   1.842 +  }
   1.843 +
   1.844 +  aFrame->mSize = mVideoWidth * mVideoHeight * 2;
   1.845 +
   1.846 +#if defined(MOZ_ANDROID_V2_X_X)
   1.847 +  mColorConverter->convert(mVideoWidth, mVideoHeight,
   1.848 +                           aData, 0 /* srcSkip */,
   1.849 +                           buffer, mVideoWidth * 2);
   1.850 +#else
   1.851 +  mColorConverter->convert(aData, mVideoStride, mVideoSliceHeight,
   1.852 +                           mVideoCropLeft, mVideoCropTop,
   1.853 +                           mVideoCropLeft + mVideoWidth - 1,
   1.854 +                           mVideoCropTop + mVideoHeight - 1,
   1.855 +                           buffer, mVideoWidth, mVideoHeight,
   1.856 +                           0, 0, mVideoWidth - 1, mVideoHeight - 1);
   1.857 +#endif
   1.858 +
   1.859 +  return true;
   1.860 +#endif
   1.861 +}
   1.862 +
   1.863 +bool OmxDecoder::ToVideoFrame_I420ColorConverter(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback)
   1.864 +{
   1.865 +#if defined(MOZ_ANDROID_V4_OR_ABOVE)
   1.866 +  I420ColorConverter yuvConverter;
   1.867 +
   1.868 +  if (!yuvConverter.isLoaded()) {
   1.869 +    return false;
   1.870 +  }
   1.871 +
   1.872 +  if (yuvConverter.getDecoderOutputFormat() != mVideoColorFormat) {
   1.873 +    return false;
   1.874 +  }
   1.875 +
   1.876 +  void *buffer = (*aBufferCallback)(mVideoWidth, mVideoHeight, MPAPI::I420);
   1.877 +
   1.878 +  ARect crop = { mVideoCropLeft, mVideoCropTop, mVideoCropRight, mVideoCropBottom };
   1.879 +  int result = yuvConverter.convertDecoderOutputToI420(aData,
   1.880 +                                                       mVideoWidth,
   1.881 +                                                       mVideoHeight,
   1.882 +                                                       crop,
   1.883 +                                                       buffer);
   1.884 +
   1.885 +  // result is 0 on success, -1 otherwise.
   1.886 +  if (result == OK) {
   1.887 +    aFrame->mTimeUs = aTimeUs;
   1.888 +    aFrame->mSize = mVideoWidth * mVideoHeight * 3 / 2;
   1.889 +  }
   1.890 +
   1.891 +  return result == OK;
   1.892 +#else
   1.893 +  return false;
   1.894 +#endif
   1.895 +}
   1.896 +
   1.897 +bool OmxDecoder::ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame, BufferCallback *aBufferCallback) {
   1.898 +  switch (mVideoColorFormat) {
   1.899 +// Froyo support is best handled with the android color conversion code. I
   1.900 +// get corrupted videos when using our own routines below.
   1.901 +#if !defined(MOZ_ANDROID_FROYO)
   1.902 +  case OMX_COLOR_FormatYUV420Planar: // e.g. Asus Transformer, Stagefright's software decoder
   1.903 +    ToVideoFrame_YUV420Planar(aFrame, aTimeUs, aData, aSize, aKeyFrame);
   1.904 +    break;
   1.905 +  case OMX_COLOR_FormatCbYCrY: // e.g. Droid 1
   1.906 +    ToVideoFrame_CbYCrY(aFrame, aTimeUs, aData, aSize, aKeyFrame);
   1.907 +    break;
   1.908 +  case OMX_COLOR_FormatYUV420SemiPlanar: // e.g. Galaxy S III
   1.909 +    ToVideoFrame_YUV420SemiPlanar(aFrame, aTimeUs, aData, aSize, aKeyFrame);
   1.910 +    break;
   1.911 +  case OMX_QCOM_COLOR_FormatYVU420SemiPlanar: // e.g. Nexus One
   1.912 +    ToVideoFrame_YVU420SemiPlanar(aFrame, aTimeUs, aData, aSize, aKeyFrame);
   1.913 +    break;
   1.914 +  case OMX_QCOM_COLOR_FormatYVU420PackedSemiPlanar32m4ka: // e.g. Otoro
   1.915 +    ToVideoFrame_YVU420PackedSemiPlanar32m4ka(aFrame, aTimeUs, aData, aSize, aKeyFrame);
   1.916 +    break;
   1.917 +  case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar: // e.g. Galaxy Nexus
   1.918 +    ToVideoFrame_YUV420PackedSemiPlanar(aFrame, aTimeUs, aData, aSize, aKeyFrame);
   1.919 +    break;
   1.920 +  case OMX_COLOR_Format16bitRGB565:
   1.921 +    return ToVideoFrame_RGB565(aFrame, aTimeUs, aData, aSize, aKeyFrame, aBufferCallback);
   1.922 +    break;
   1.923 +#endif
   1.924 +  default:
   1.925 +    if (!ToVideoFrame_ColorConverter(aFrame, aTimeUs, aData, aSize, aKeyFrame, aBufferCallback) &&
   1.926 +        !ToVideoFrame_I420ColorConverter(aFrame, aTimeUs, aData, aSize, aKeyFrame, aBufferCallback)) {
   1.927 +      LOG("Unknown video color format: %#x", mVideoColorFormat);
   1.928 +      return false;
   1.929 +    }
   1.930 +  }
   1.931 +  return true;
   1.932 +}
   1.933 +
   1.934 +bool OmxDecoder::ToAudioFrame(AudioFrame *aFrame, int64_t aTimeUs, void *aData, size_t aDataOffset, size_t aSize, int32_t aAudioChannels, int32_t aAudioSampleRate)
   1.935 +{
   1.936 +  aFrame->Set(aTimeUs, reinterpret_cast<char *>(aData) + aDataOffset, aSize, aAudioChannels, aAudioSampleRate);
   1.937 +  return true;
   1.938 +}
   1.939 +
   1.940 +class ReadOptions : public MediaSource::ReadOptions
   1.941 +{
   1.942 +  // HTC have their own version of ReadOptions with extra fields. If we don't
   1.943 +  // have this here, HTCOMXCodec will corrupt our stack.
   1.944 +  uint32_t sadface[16];
   1.945 +};
   1.946 +
   1.947 +bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs,
   1.948 +                           BufferCallback *aBufferCallback)
   1.949 +{
   1.950 +  MOZ_ASSERT(aSeekTimeUs >= -1);
   1.951 +
   1.952 +  if (!mVideoSource.get())
   1.953 +    return false;
   1.954 +
   1.955 +  ReleaseVideoBuffer();
   1.956 +
   1.957 +  status_t err;
   1.958 +
   1.959 +  if (aSeekTimeUs != -1) {
   1.960 +    ReadOptions options;
   1.961 +    options.setSeekTo(aSeekTimeUs);
   1.962 +    err = mVideoSource->read(&mVideoBuffer, &options);
   1.963 +  } else {
   1.964 +    err = mVideoSource->read(&mVideoBuffer);
   1.965 +  }
   1.966 +
   1.967 +  aFrame->mSize = 0;
   1.968 +
   1.969 +  if (err == OK && mVideoBuffer->range_length() > 0) {
   1.970 +    int64_t timeUs;
   1.971 +    int32_t keyFrame;
   1.972 +
   1.973 +    if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs) ) {
   1.974 +      LOG("no frame time");
   1.975 +      return false;
   1.976 +    }
   1.977 +
   1.978 +    if (timeUs < 0) {
   1.979 +      LOG("frame time %lld must be nonnegative", timeUs);
   1.980 +      return false;
   1.981 +    }
   1.982 +
   1.983 +    if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
   1.984 +       keyFrame = 0;
   1.985 +    }
   1.986 +
   1.987 +    char *data = reinterpret_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset();
   1.988 +    size_t length = mVideoBuffer->range_length();
   1.989 +
   1.990 +    if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame, aBufferCallback)) {
   1.991 +      return false;
   1.992 +    }
   1.993 +  }
   1.994 +  else if (err == INFO_FORMAT_CHANGED) {
   1.995 +    // If the format changed, update our cached info.
   1.996 +    LOG("mVideoSource INFO_FORMAT_CHANGED");
   1.997 +    if (!SetVideoFormat())
   1.998 +      return false;
   1.999 +    else
  1.1000 +      return ReadVideo(aFrame, aSeekTimeUs, aBufferCallback);
  1.1001 +  }
  1.1002 +  else if (err == ERROR_END_OF_STREAM) {
  1.1003 +    LOG("mVideoSource END_OF_STREAM");
  1.1004 +  }
  1.1005 +  else if (err != OK) {
  1.1006 +    LOG("mVideoSource ERROR %#x", err);
  1.1007 +  }
  1.1008 +
  1.1009 +  return err == OK;
  1.1010 +}
  1.1011 +
  1.1012 +bool OmxDecoder::ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs)
  1.1013 +{
  1.1014 +  MOZ_ASSERT(aSeekTimeUs >= -1);
  1.1015 +
  1.1016 +  status_t err;
  1.1017 +  if (mAudioMetadataRead && aSeekTimeUs == -1) {
  1.1018 +    // Use the data read into the buffer during metadata time
  1.1019 +    err = OK;
  1.1020 +  }
  1.1021 +  else {
  1.1022 +    ReleaseAudioBuffer();
  1.1023 +    if (aSeekTimeUs != -1) {
  1.1024 +      ReadOptions options;
  1.1025 +      options.setSeekTo(aSeekTimeUs);
  1.1026 +      err = mAudioSource->read(&mAudioBuffer, &options);
  1.1027 +    } else {
  1.1028 +      err = mAudioSource->read(&mAudioBuffer);
  1.1029 +    }
  1.1030 +  }
  1.1031 +  mAudioMetadataRead = false;
  1.1032 +
  1.1033 +  aSeekTimeUs = -1;
  1.1034 +
  1.1035 +  if (err == OK && mAudioBuffer->range_length() != 0) {
  1.1036 +    int64_t timeUs;
  1.1037 +    if (!mAudioBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) {
  1.1038 +      LOG("no frame time");
  1.1039 +      return false;
  1.1040 +    }
  1.1041 +
  1.1042 +    if (timeUs < 0) {
  1.1043 +      LOG("frame time %lld must be nonnegative", timeUs);
  1.1044 +      return false;
  1.1045 +    }
  1.1046 +
  1.1047 +    return ToAudioFrame(aFrame, timeUs,
  1.1048 +                        mAudioBuffer->data(),
  1.1049 +                        mAudioBuffer->range_offset(),
  1.1050 +                        mAudioBuffer->range_length(),
  1.1051 +                        mAudioChannels, mAudioSampleRate);
  1.1052 +  }
  1.1053 +  else if (err == INFO_FORMAT_CHANGED) {
  1.1054 +    // If the format changed, update our cached info.
  1.1055 +    LOG("mAudioSource INFO_FORMAT_CHANGED");
  1.1056 +    if (!SetAudioFormat())
  1.1057 +      return false;
  1.1058 +    else
  1.1059 +      return ReadAudio(aFrame, aSeekTimeUs);
  1.1060 +  }
  1.1061 +  else if (err == ERROR_END_OF_STREAM) {
  1.1062 +    LOG("mAudioSource END_OF_STREAM");
  1.1063 +  }
  1.1064 +  else if (err != OK) {
  1.1065 +    LOG("mAudioSource ERROR %#x", err);
  1.1066 +  }
  1.1067 +
  1.1068 +  return err == OK;
  1.1069 +}
  1.1070 +
  1.1071 +static OmxDecoder *cast(Decoder *decoder) {
  1.1072 +  return reinterpret_cast<OmxDecoder *>(decoder->mPrivate);
  1.1073 +}
  1.1074 +
  1.1075 +static void GetDuration(Decoder *aDecoder, int64_t *durationUs) {
  1.1076 +  cast(aDecoder)->GetDuration(durationUs);
  1.1077 +}
  1.1078 +
  1.1079 +static void GetVideoParameters(Decoder *aDecoder, int32_t *width, int32_t *height) {
  1.1080 +  cast(aDecoder)->GetVideoParameters(width, height);
  1.1081 +}
  1.1082 +
  1.1083 +static void GetAudioParameters(Decoder *aDecoder, int32_t *numChannels, int32_t *sampleRate) {
  1.1084 +  cast(aDecoder)->GetAudioParameters(numChannels, sampleRate);
  1.1085 +}
  1.1086 +
  1.1087 +static bool HasVideo(Decoder *aDecoder) {
  1.1088 +  return cast(aDecoder)->HasVideo();
  1.1089 +}
  1.1090 +
  1.1091 +static bool HasAudio(Decoder *aDecoder) {
  1.1092 +  return cast(aDecoder)->HasAudio();
  1.1093 +}
  1.1094 +
  1.1095 +static bool ReadVideo(Decoder *aDecoder, VideoFrame *aFrame, int64_t aSeekTimeUs, BufferCallback *aBufferCallback)
  1.1096 +{
  1.1097 +  return cast(aDecoder)->ReadVideo(aFrame, aSeekTimeUs, aBufferCallback);
  1.1098 +}
  1.1099 +
  1.1100 +static bool ReadAudio(Decoder *aDecoder, AudioFrame *aFrame, int64_t aSeekTimeUs)
  1.1101 +{
  1.1102 +  return cast(aDecoder)->ReadAudio(aFrame, aSeekTimeUs);
  1.1103 +}
  1.1104 +
  1.1105 +static void DestroyDecoder(Decoder *aDecoder)
  1.1106 +{
  1.1107 +  if (aDecoder->mPrivate)
  1.1108 +    delete reinterpret_cast<OmxDecoder *>(aDecoder->mPrivate);
  1.1109 +}
  1.1110 +
  1.1111 +static bool Match(const char *aMimeChars, size_t aMimeLen, const char *aNeedle)
  1.1112 +{
  1.1113 +  return !strncmp(aMimeChars, aNeedle, aMimeLen);
  1.1114 +}
  1.1115 +
  1.1116 +static const char* const gCodecs[] = {
  1.1117 +  "avc1.42E01E",  // H.264 Constrained Baseline Profile Level 3.0
  1.1118 +  "avc1.42001E",  // H.264 Baseline Profile Level 3.0
  1.1119 +  "avc1.42001F",  // H.264 Baseline Profile Level 3.1
  1.1120 +  "avc1.4D401E",  // H.264 Main Profile Level 3.0
  1.1121 +  "avc1.4D401F",  // H.264 Main Profile Level 3.1
  1.1122 +  "mp4a.40.2",    // AAC-LC
  1.1123 +  nullptr
  1.1124 +};
  1.1125 +
  1.1126 +static bool CanDecode(const char *aMimeChars, size_t aMimeLen, const char* const**aCodecs)
  1.1127 +{
  1.1128 +  if (!Match(aMimeChars, aMimeLen, "video/mp4") &&
  1.1129 +      !Match(aMimeChars, aMimeLen, "audio/mp4") &&
  1.1130 +      !Match(aMimeChars, aMimeLen, "audio/mpeg") &&
  1.1131 +      !Match(aMimeChars, aMimeLen, "application/octet-stream")) { // file urls
  1.1132 +    return false;
  1.1133 +  }
  1.1134 +  *aCodecs = gCodecs;
  1.1135 +
  1.1136 +  return true;
  1.1137 +}
  1.1138 +
  1.1139 +static bool CreateDecoder(PluginHost *aPluginHost, Decoder *aDecoder, const char *aMimeChars, size_t aMimeLen)
  1.1140 +{
  1.1141 +  OmxDecoder *omx = new OmxDecoder(aPluginHost, aDecoder);
  1.1142 +  if (!omx || !omx->Init()) {
  1.1143 +    if (omx)
  1.1144 +      delete omx;
  1.1145 +    return false;
  1.1146 +  }
  1.1147 +
  1.1148 +  aDecoder->mPrivate = omx;
  1.1149 +  aDecoder->GetDuration = GetDuration;
  1.1150 +  aDecoder->GetVideoParameters = GetVideoParameters;
  1.1151 +  aDecoder->GetAudioParameters = GetAudioParameters;
  1.1152 +  aDecoder->HasVideo = HasVideo;
  1.1153 +  aDecoder->HasAudio = HasAudio;
  1.1154 +  aDecoder->ReadVideo = ReadVideo;
  1.1155 +  aDecoder->ReadAudio = ReadAudio;
  1.1156 +  aDecoder->DestroyDecoder = DestroyDecoder;
  1.1157 +
  1.1158 +  return true;
  1.1159 +}
  1.1160 +
  1.1161 +} // namespace OmxPlugin
  1.1162 +
  1.1163 +// Export the manifest so MPAPI can find our entry points.
  1.1164 +Manifest MOZ_EXPORT MPAPI_MANIFEST = {
  1.1165 +  OmxPlugin::CanDecode,
  1.1166 +  OmxPlugin::CreateDecoder
  1.1167 +};

mercurial