media/webrtc/signaling/src/media-conduit/WebrtcOMXH264VideoCodec.cpp

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/media/webrtc/signaling/src/media-conduit/WebrtcOMXH264VideoCodec.cpp	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,875 @@
     1.4 +/* This Source Code Form is subject to the terms of the Mozilla Public
     1.5 + * License, v. 2.0. If a copy of the MPL was not distributed with this file,
     1.6 + * You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.7 +
     1.8 +#include "CSFLog.h"
     1.9 +
    1.10 +#include "WebrtcOMXH264VideoCodec.h"
    1.11 +
    1.12 +// Android/Stagefright
    1.13 +#include <avc_utils.h>
    1.14 +#include <binder/ProcessState.h>
    1.15 +#include <foundation/ABuffer.h>
    1.16 +#include <foundation/AMessage.h>
    1.17 +#include <gui/Surface.h>
    1.18 +#include <media/ICrypto.h>
    1.19 +#include <MediaCodec.h>
    1.20 +#include <MediaDefs.h>
    1.21 +#include <MediaErrors.h>
    1.22 +#include <MetaData.h>
    1.23 +#include <OMX_Component.h>
    1.24 +using namespace android;
    1.25 +
    1.26 +// WebRTC
    1.27 +#include "common_video/interface/texture_video_frame.h"
    1.28 +#include "video_engine/include/vie_external_codec.h"
    1.29 +
    1.30 +// Gecko
    1.31 +#include "GonkNativeWindow.h"
    1.32 +#include "GonkNativeWindowClient.h"
    1.33 +#include "mozilla/Atomics.h"
    1.34 +#include "mozilla/Mutex.h"
    1.35 +#include "nsThreadUtils.h"
    1.36 +#include "OMXCodecWrapper.h"
    1.37 +#include "TextureClient.h"
    1.38 +
    1.39 +#define DEQUEUE_BUFFER_TIMEOUT_US (100 * 1000ll) // 100ms.
    1.40 +#define START_DEQUEUE_BUFFER_TIMEOUT_US (10 * DEQUEUE_BUFFER_TIMEOUT_US) // 1s.
    1.41 +#define DRAIN_THREAD_TIMEOUT_US  (1000 * 1000ll) // 1s.
    1.42 +
    1.43 +#define LOG_TAG "WebrtcOMXH264VideoCodec"
    1.44 +#define CODEC_LOGV(...) CSFLogInfo(LOG_TAG, __VA_ARGS__)
    1.45 +#define CODEC_LOGD(...) CSFLogDebug(LOG_TAG, __VA_ARGS__)
    1.46 +#define CODEC_LOGI(...) CSFLogInfo(LOG_TAG, __VA_ARGS__)
    1.47 +#define CODEC_LOGW(...) CSFLogWarn(LOG_TAG, __VA_ARGS__)
    1.48 +#define CODEC_LOGE(...) CSFLogError(LOG_TAG, __VA_ARGS__)
    1.49 +
    1.50 +namespace mozilla {
    1.51 +
    1.52 +// NS_INLINE_DECL_THREADSAFE_REFCOUNTING() cannot be used directly in
    1.53 +// ImageNativeHandle below because the return type of webrtc::NativeHandle
    1.54 +// AddRef()/Release() conflicts with those defined in macro. To avoid another
    1.55 +// copy/paste of ref-counting implementation here, this dummy base class
    1.56 +// is created to proivde another level of indirection.
    1.57 +class DummyRefCountBase {
    1.58 +public:
    1.59 +  // Use the name of real class for logging.
    1.60 +  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(ImageNativeHandle)
    1.61 +  // To make sure subclass will be deleted/destructed properly.
    1.62 +  virtual ~DummyRefCountBase() {}
    1.63 +};
    1.64 +
    1.65 +// This function implements 2 interafces:
    1.66 +// 1. webrtc::NativeHandle: to wrap layers::Image object so decoded frames can
    1.67 +//    be passed through WebRTC rendering pipeline using TextureVideoFrame.
    1.68 +// 2. ImageHandle: for renderer to get the image object inside without knowledge
    1.69 +//    about webrtc::NativeHandle.
    1.70 +class ImageNativeHandle MOZ_FINAL
    1.71 +  : public webrtc::NativeHandle
    1.72 +  , public DummyRefCountBase
    1.73 +{
    1.74 +public:
    1.75 +  ImageNativeHandle(layers::Image* aImage)
    1.76 +    : mImage(aImage)
    1.77 +  {}
    1.78 +
    1.79 +  // Implement webrtc::NativeHandle.
    1.80 +  virtual void* GetHandle() MOZ_OVERRIDE { return mImage.get(); }
    1.81 +
    1.82 +  virtual int AddRef() MOZ_OVERRIDE
    1.83 +  {
    1.84 +    return DummyRefCountBase::AddRef();
    1.85 +  }
    1.86 +
    1.87 +  virtual int Release() MOZ_OVERRIDE
    1.88 +  {
    1.89 +    return DummyRefCountBase::Release();
    1.90 +  }
    1.91 +
    1.92 +private:
    1.93 +  RefPtr<layers::Image> mImage;
    1.94 +};
    1.95 +
    1.96 +// Graphic buffer lifecycle management.
    1.97 +// Return buffer to OMX codec when renderer is done with it.
    1.98 +class RecycleCallback
    1.99 +{
   1.100 +public:
   1.101 +  RecycleCallback(const sp<MediaCodec>& aOmx, uint32_t aBufferIndex)
   1.102 +    : mOmx(aOmx)
   1.103 +    , mBufferIndex(aBufferIndex)
   1.104 +  {}
   1.105 +  typedef void* CallbackPtr;
   1.106 +  static void ReturnOMXBuffer(layers::TextureClient* aClient, CallbackPtr aClosure)
   1.107 +  {
   1.108 +    aClient->ClearRecycleCallback();
   1.109 +    RecycleCallback* self = static_cast<RecycleCallback*>(aClosure);
   1.110 +    self->mOmx->releaseOutputBuffer(self->mBufferIndex);
   1.111 +    delete self;
   1.112 +  }
   1.113 +
   1.114 +private:
   1.115 +  sp<MediaCodec> mOmx;
   1.116 +  uint32_t mBufferIndex;
   1.117 +};
   1.118 +
   1.119 +struct EncodedFrame
   1.120 +{
   1.121 +  uint32_t mWidth;
   1.122 +  uint32_t mHeight;
   1.123 +  uint32_t mTimestamp;
   1.124 +  int64_t mRenderTimeMs;
   1.125 +};
   1.126 +
   1.127 +// Base runnable class to repeatly pull OMX output buffers in seperate thread.
   1.128 +// How to use:
   1.129 +// - implementing DrainOutput() to get output. Remember to return false to tell
   1.130 +//   drain not to pop input queue.
   1.131 +// - call QueueInput() to schedule a run to drain output. The input, aFrame,
   1.132 +//   should contains corresponding info such as image size and timestamps for
   1.133 +//   DrainOutput() implementation to construct data needed by encoded/decoded
   1.134 +//   callbacks.
   1.135 +// TODO: Bug 997110 - Revisit queue/drain logic. Current design assumes that
   1.136 +//       encoder only generate one output buffer per input frame and won't work
   1.137 +//       if encoder drops frames or generates multiple output per input.
   1.138 +class OMXOutputDrain : public nsRunnable
   1.139 +{
   1.140 +public:
   1.141 +  void Start() {
   1.142 +    MonitorAutoLock lock(mMonitor);
   1.143 +    if (mThread == nullptr) {
   1.144 +      NS_NewNamedThread("OMXOutputDrain", getter_AddRefs(mThread));
   1.145 +    }
   1.146 +    CODEC_LOGD("OMXOutputDrain started");
   1.147 +    mEnding = false;
   1.148 +    mThread->Dispatch(this, NS_DISPATCH_NORMAL);
   1.149 +  }
   1.150 +
   1.151 +  void Stop() {
   1.152 +    MonitorAutoLock lock(mMonitor);
   1.153 +    mEnding = true;
   1.154 +    lock.NotifyAll(); // In case Run() is waiting.
   1.155 +
   1.156 +    if (mThread != nullptr) {
   1.157 +      mThread->Shutdown();
   1.158 +      mThread = nullptr;
   1.159 +    }
   1.160 +    CODEC_LOGD("OMXOutputDrain stopped");
   1.161 +  }
   1.162 +
   1.163 +  void QueueInput(const EncodedFrame& aFrame)
   1.164 +  {
   1.165 +    MonitorAutoLock lock(mMonitor);
   1.166 +
   1.167 +    MOZ_ASSERT(mThread);
   1.168 +
   1.169 +    mInputFrames.push(aFrame);
   1.170 +    // Notify Run() about queued input and it can start working.
   1.171 +    lock.NotifyAll();
   1.172 +  }
   1.173 +
   1.174 +  NS_IMETHODIMP Run() MOZ_OVERRIDE
   1.175 +  {
   1.176 +    MOZ_ASSERT(mThread);
   1.177 +
   1.178 +    MonitorAutoLock lock(mMonitor);
   1.179 +    while (true) {
   1.180 +      if (mInputFrames.empty()) {
   1.181 +        ALOGE("Waiting OMXOutputDrain");
   1.182 +        // Wait for new input.
   1.183 +        lock.Wait();
   1.184 +      }
   1.185 +
   1.186 +      if (mEnding) {
   1.187 +        ALOGE("Ending OMXOutputDrain");
   1.188 +        // Stop draining.
   1.189 +        break;
   1.190 +      }
   1.191 +
   1.192 +      MOZ_ASSERT(!mInputFrames.empty());
   1.193 +      EncodedFrame frame = mInputFrames.front();
   1.194 +      bool shouldPop = false;
   1.195 +      {
   1.196 +        // Release monitor while draining because it's blocking.
   1.197 +        MonitorAutoUnlock unlock(mMonitor);
   1.198 +        // |frame| provides size and time of corresponding input.
   1.199 +        shouldPop = DrainOutput(frame);
   1.200 +      }
   1.201 +      if (shouldPop) {
   1.202 +        mInputFrames.pop();
   1.203 +      }
   1.204 +    }
   1.205 +
   1.206 +    CODEC_LOGD("OMXOutputDrain Ended");
   1.207 +    return NS_OK;
   1.208 +  }
   1.209 +
   1.210 +protected:
   1.211 +  OMXOutputDrain()
   1.212 +    : mMonitor("OMXOutputDrain monitor")
   1.213 +    , mEnding(false)
   1.214 +  {}
   1.215 +
   1.216 +  // Drain output buffer for input frame aFrame.
   1.217 +  // aFrame contains info such as size and time of the input frame and can be
   1.218 +  // used to construct data for encoded/decoded callbacks if needed.
   1.219 +  // Return true to indicate we should pop input queue, and return false to
   1.220 +  // indicate aFrame should not be removed from input queue (either output is
   1.221 +  // not ready yet and should try again later, or the drained output is SPS/PPS
   1.222 +  // NALUs that has no corresponding input in queue).
   1.223 +  virtual bool DrainOutput(const EncodedFrame& aFrame) = 0;
   1.224 +
   1.225 +private:
   1.226 +  // This monitor protects all things below it, and is also used to
   1.227 +  // wait/notify queued input.
   1.228 +  Monitor mMonitor;
   1.229 +  nsCOMPtr<nsIThread> mThread;
   1.230 +  std::queue<EncodedFrame> mInputFrames;
   1.231 +  bool mEnding;
   1.232 +};
   1.233 +
   1.234 +// H.264 decoder using stagefright.
   1.235 +class WebrtcOMXDecoder MOZ_FINAL
   1.236 +{
   1.237 +  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(WebrtcOMXDecoder)
   1.238 +public:
   1.239 +  WebrtcOMXDecoder(const char* aMimeType)
   1.240 +    : mWidth(0)
   1.241 +    , mHeight(0)
   1.242 +    , mStarted(false)
   1.243 +  {
   1.244 +    // Create binder thread pool required by stagefright.
   1.245 +    android::ProcessState::self()->startThreadPool();
   1.246 +
   1.247 +    mLooper = new ALooper;
   1.248 +    mLooper->start();
   1.249 +    mCodec = MediaCodec::CreateByType(mLooper, aMimeType, false /* encoder */);
   1.250 +  }
   1.251 +
   1.252 +  virtual ~WebrtcOMXDecoder()
   1.253 +  {
   1.254 +    if (mStarted) {
   1.255 +      Stop();
   1.256 +    }
   1.257 +    if (mCodec != nullptr) {
   1.258 +      mCodec->release();
   1.259 +      mCodec.clear();
   1.260 +    }
   1.261 +    mLooper.clear();
   1.262 +  }
   1.263 +
   1.264 +  // Parse SPS/PPS NALUs.
   1.265 +  static sp<MetaData> ParseParamSets(sp<ABuffer>& aParamSets)
   1.266 +  {
   1.267 +    return MakeAVCCodecSpecificData(aParamSets);
   1.268 +  }
   1.269 +
   1.270 +  // Configure decoder using data returned by ParseParamSets().
   1.271 +  status_t ConfigureWithParamSets(const sp<MetaData>& aParamSets)
   1.272 +  {
   1.273 +    MOZ_ASSERT(mCodec != nullptr);
   1.274 +    if (mCodec == nullptr) {
   1.275 +      return INVALID_OPERATION;
   1.276 +    }
   1.277 +
   1.278 +    int32_t width = 0;
   1.279 +    bool ok = aParamSets->findInt32(kKeyWidth, &width);
   1.280 +    MOZ_ASSERT(ok && width > 0);
   1.281 +    int32_t height = 0;
   1.282 +    ok = aParamSets->findInt32(kKeyHeight, &height);
   1.283 +    MOZ_ASSERT(ok && height > 0);
   1.284 +    CODEC_LOGD("OMX:%p decoder config width:%d height:%d", this, width, height);
   1.285 +
   1.286 +    sp<AMessage> config = new AMessage();
   1.287 +    config->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC);
   1.288 +    config->setInt32("width", width);
   1.289 +    config->setInt32("height", height);
   1.290 +    mWidth = width;
   1.291 +    mHeight = height;
   1.292 +
   1.293 +    sp<Surface> surface = nullptr;
   1.294 +    mNativeWindow = new GonkNativeWindow();
   1.295 +    if (mNativeWindow.get()) {
   1.296 +      mNativeWindowClient = new GonkNativeWindowClient(mNativeWindow->getBufferQueue());
   1.297 +      if (mNativeWindowClient.get()) {
   1.298 +        surface = new Surface(mNativeWindowClient->getIGraphicBufferProducer());
   1.299 +      }
   1.300 +    }
   1.301 +    status_t result = mCodec->configure(config, surface, nullptr, 0);
   1.302 +    if (result == OK) {
   1.303 +      result = Start();
   1.304 +    }
   1.305 +    return result;
   1.306 +  }
   1.307 +
   1.308 +  status_t
   1.309 +  FillInput(const webrtc::EncodedImage& aEncoded, bool aIsFirstFrame,
   1.310 +            int64_t& aRenderTimeMs, webrtc::DecodedImageCallback* aCallback)
   1.311 +  {
   1.312 +    MOZ_ASSERT(mCodec != nullptr);
   1.313 +    if (mCodec == nullptr) {
   1.314 +      return INVALID_OPERATION;
   1.315 +    }
   1.316 +
   1.317 +    size_t index;
   1.318 +    status_t err = mCodec->dequeueInputBuffer(&index,
   1.319 +      aIsFirstFrame ? START_DEQUEUE_BUFFER_TIMEOUT_US : DEQUEUE_BUFFER_TIMEOUT_US);
   1.320 +    if (err != OK) {
   1.321 +      CODEC_LOGE("decode dequeue input buffer error:%d", err);
   1.322 +      return err;
   1.323 +    }
   1.324 +
   1.325 +    uint32_t flags = 0;
   1.326 +    if (aEncoded._frameType == webrtc::kKeyFrame) {
   1.327 +      flags = aIsFirstFrame ? MediaCodec::BUFFER_FLAG_CODECCONFIG : MediaCodec::BUFFER_FLAG_SYNCFRAME;
   1.328 +    }
   1.329 +    size_t size = aEncoded._length;
   1.330 +    MOZ_ASSERT(size);
   1.331 +    const sp<ABuffer>& omxIn = mInputBuffers.itemAt(index);
   1.332 +    MOZ_ASSERT(omxIn->capacity() >= size);
   1.333 +    omxIn->setRange(0, size);
   1.334 +    // Copying is needed because MediaCodec API doesn't support externallay
   1.335 +    // allocated buffer as input.
   1.336 +    memcpy(omxIn->data(), aEncoded._buffer, size);
   1.337 +    int64_t inputTimeUs = aEncoded._timeStamp * 1000 / 90; // 90kHz -> us.
   1.338 +    err = mCodec->queueInputBuffer(index, 0, size, inputTimeUs, flags);
   1.339 +    if (err == OK && !(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
   1.340 +      if (mOutputDrain == nullptr) {
   1.341 +        mOutputDrain = new OutputDrain(this, aCallback);
   1.342 +        mOutputDrain->Start();
   1.343 +      }
   1.344 +      EncodedFrame frame;
   1.345 +      frame.mWidth = mWidth;
   1.346 +      frame.mHeight = mHeight;
   1.347 +      frame.mTimestamp = aEncoded._timeStamp;
   1.348 +      frame.mRenderTimeMs = aRenderTimeMs;
   1.349 +      mOutputDrain->QueueInput(frame);
   1.350 +    }
   1.351 +
   1.352 +    return err;
   1.353 +  }
   1.354 +
   1.355 +  status_t
   1.356 +  DrainOutput(const EncodedFrame& aFrame, webrtc::DecodedImageCallback* aCallback)
   1.357 +  {
   1.358 +    MOZ_ASSERT(mCodec != nullptr);
   1.359 +    if (mCodec == nullptr) {
   1.360 +      return INVALID_OPERATION;
   1.361 +    }
   1.362 +
   1.363 +    size_t index = 0;
   1.364 +    size_t outOffset = 0;
   1.365 +    size_t outSize = 0;
   1.366 +    int64_t outTime = -1ll;
   1.367 +    uint32_t outFlags = 0;
   1.368 +    status_t err = mCodec->dequeueOutputBuffer(&index, &outOffset, &outSize,
   1.369 +                                               &outTime, &outFlags,
   1.370 +                                               DRAIN_THREAD_TIMEOUT_US);
   1.371 +    switch (err) {
   1.372 +      case OK:
   1.373 +        break;
   1.374 +      case -EAGAIN:
   1.375 +        // Not an error: output not available yet. Try later.
   1.376 +        CODEC_LOGI("decode dequeue OMX output buffer timed out. Try later.");
   1.377 +        return err;
   1.378 +      case INFO_FORMAT_CHANGED:
   1.379 +        // Not an error: will get this value when OMX output buffer is enabled,
   1.380 +        // or when input size changed.
   1.381 +        CODEC_LOGD("decode dequeue OMX output buffer format change");
   1.382 +        return err;
   1.383 +      case INFO_OUTPUT_BUFFERS_CHANGED:
   1.384 +        // Not an error: will get this value when OMX output buffer changed
   1.385 +        // (probably because of input size change).
   1.386 +        CODEC_LOGD("decode dequeue OMX output buffer change");
   1.387 +        err = mCodec->getOutputBuffers(&mOutputBuffers);
   1.388 +        MOZ_ASSERT(err == OK);
   1.389 +        return INFO_OUTPUT_BUFFERS_CHANGED;
   1.390 +      default:
   1.391 +        CODEC_LOGE("decode dequeue OMX output buffer error:%d", err);
   1.392 +        // Return OK to instruct OutputDrain to drop input from queue.
   1.393 +        return OK;
   1.394 +    }
   1.395 +
   1.396 +    sp<ABuffer> omxOut = mOutputBuffers.itemAt(index);
   1.397 +    nsAutoPtr<webrtc::I420VideoFrame> videoFrame(GenerateVideoFrame(aFrame,
   1.398 +                                                                    index,
   1.399 +                                                                    omxOut));
   1.400 +    if (videoFrame == nullptr) {
   1.401 +      mCodec->releaseOutputBuffer(index);
   1.402 +    } else if (aCallback) {
   1.403 +      aCallback->Decoded(*videoFrame);
   1.404 +      // OMX buffer will be released by RecycleCallback after rendered.
   1.405 +    }
   1.406 +
   1.407 +    return err;
   1.408 +  }
   1.409 +
   1.410 +private:
   1.411 +  class OutputDrain : public OMXOutputDrain
   1.412 +  {
   1.413 +  public:
   1.414 +    OutputDrain(WebrtcOMXDecoder* aOMX, webrtc::DecodedImageCallback* aCallback)
   1.415 +      : OMXOutputDrain()
   1.416 +      , mOMX(aOMX)
   1.417 +      , mCallback(aCallback)
   1.418 +    {}
   1.419 +
   1.420 +  protected:
   1.421 +    virtual bool DrainOutput(const EncodedFrame& aFrame) MOZ_OVERRIDE
   1.422 +    {
   1.423 +      return (mOMX->DrainOutput(aFrame, mCallback) == OK);
   1.424 +    }
   1.425 +
   1.426 +  private:
   1.427 +    WebrtcOMXDecoder* mOMX;
   1.428 +    webrtc::DecodedImageCallback* mCallback;
   1.429 +  };
   1.430 +
   1.431 +  status_t Start()
   1.432 +  {
   1.433 +    MOZ_ASSERT(!mStarted);
   1.434 +    if (mStarted) {
   1.435 +      return OK;
   1.436 +    }
   1.437 +
   1.438 +    status_t err = mCodec->start();
   1.439 +    if (err == OK) {
   1.440 +      mStarted = true;
   1.441 +      mCodec->getInputBuffers(&mInputBuffers);
   1.442 +      mCodec->getOutputBuffers(&mOutputBuffers);
   1.443 +    }
   1.444 +
   1.445 +    return err;
   1.446 +  }
   1.447 +
   1.448 +  status_t Stop()
   1.449 +  {
   1.450 +    MOZ_ASSERT(mStarted);
   1.451 +    if (!mStarted) {
   1.452 +      return OK;
   1.453 +    }
   1.454 +    if (mOutputDrain != nullptr) {
   1.455 +      mOutputDrain->Stop();
   1.456 +      mOutputDrain = nullptr;
   1.457 +    }
   1.458 +
   1.459 +    status_t err = mCodec->stop();
   1.460 +    if (err == OK) {
   1.461 +      mInputBuffers.clear();
   1.462 +      mOutputBuffers.clear();
   1.463 +      mStarted = false;
   1.464 +    } else {
   1.465 +      MOZ_ASSERT(false);
   1.466 +    }
   1.467 +
   1.468 +    return err;
   1.469 +  }
   1.470 +
   1.471 +  webrtc::I420VideoFrame*
   1.472 +  GenerateVideoFrame(const EncodedFrame& aEncoded, uint32_t aBufferIndex,
   1.473 +                     const sp<ABuffer>& aOMXBuffer)
   1.474 +  {
   1.475 +    // TODO: Get decoded frame buffer through native window to obsolete
   1.476 +    //       changes to stagefright code.
   1.477 +    sp<RefBase> obj;
   1.478 +    bool hasGraphicBuffer = aOMXBuffer->meta()->findObject("graphic-buffer", &obj);
   1.479 +    if (!hasGraphicBuffer) {
   1.480 +      MOZ_ASSERT(false, "Decoder doesn't produce graphic buffer");
   1.481 +      // Nothing to render.
   1.482 +      return nullptr;
   1.483 +    }
   1.484 +
   1.485 +    sp<GraphicBuffer> gb = static_cast<GraphicBuffer*>(obj.get());
   1.486 +    if (!gb.get()) {
   1.487 +      MOZ_ASSERT(false, "Null graphic buffer");
   1.488 +      return nullptr;
   1.489 +    }
   1.490 +
   1.491 +    RefPtr<mozilla::layers::TextureClient> textureClient =
   1.492 +      mNativeWindow->getTextureClientFromBuffer(gb.get());
   1.493 +    textureClient->SetRecycleCallback(RecycleCallback::ReturnOMXBuffer,
   1.494 +                                      new RecycleCallback(mCodec, aBufferIndex));
   1.495 +
   1.496 +    int width = gb->getWidth();
   1.497 +    int height = gb->getHeight();
   1.498 +    layers::GrallocImage::GrallocData grallocData;
   1.499 +    grallocData.mPicSize = gfx::IntSize(width, height);
   1.500 +    grallocData.mGraphicBuffer = textureClient;
   1.501 +
   1.502 +    layers::GrallocImage* grallocImage = new layers::GrallocImage();
   1.503 +    grallocImage->SetData(grallocData);
   1.504 +
   1.505 +    nsAutoPtr<webrtc::I420VideoFrame> videoFrame(
   1.506 +      new webrtc::TextureVideoFrame(new ImageNativeHandle(grallocImage),
   1.507 +                                    width, height,
   1.508 +                                    aEncoded.mTimestamp,
   1.509 +                                    aEncoded.mRenderTimeMs));
   1.510 +
   1.511 +    return videoFrame.forget();
   1.512 +  }
   1.513 +
   1.514 +  sp<ALooper> mLooper;
   1.515 +  sp<MediaCodec> mCodec; // OMXCodec
   1.516 +  int mWidth;
   1.517 +  int mHeight;
   1.518 +  android::Vector<sp<ABuffer> > mInputBuffers;
   1.519 +  android::Vector<sp<ABuffer> > mOutputBuffers;
   1.520 +  bool mStarted;
   1.521 +
   1.522 +  sp<GonkNativeWindow> mNativeWindow;
   1.523 +  sp<GonkNativeWindowClient> mNativeWindowClient;
   1.524 +
   1.525 +  RefPtr<OutputDrain> mOutputDrain;
   1.526 +};
   1.527 +
   1.528 +class EncOutputDrain : public OMXOutputDrain
   1.529 +{
   1.530 +public:
   1.531 +  EncOutputDrain(OMXVideoEncoder* aOMX, webrtc::EncodedImageCallback* aCallback)
   1.532 +    : OMXOutputDrain()
   1.533 +    , mOMX(aOMX)
   1.534 +    , mCallback(aCallback)
   1.535 +    , mIsPrevOutputParamSets(false)
   1.536 +  {}
   1.537 +
   1.538 +protected:
   1.539 +  virtual bool DrainOutput(const EncodedFrame& aInputFrame) MOZ_OVERRIDE
   1.540 +  {
   1.541 +    nsTArray<uint8_t> output;
   1.542 +    int64_t timeUs = -1ll;
   1.543 +    int flags = 0;
   1.544 +    nsresult rv = mOMX->GetNextEncodedFrame(&output, &timeUs, &flags,
   1.545 +                                            DRAIN_THREAD_TIMEOUT_US);
   1.546 +    if (NS_WARN_IF(NS_FAILED(rv))) {
   1.547 +      // Fail to get encoded frame. The corresponding input frame should be
   1.548 +      // removed.
   1.549 +      return true;
   1.550 +    }
   1.551 +
   1.552 +    if (output.Length() == 0) {
   1.553 +      // No encoded data yet. Try later.
   1.554 +      CODEC_LOGD("OMX:%p (encode no output available this time)", mOMX);
   1.555 +      return false;
   1.556 +    }
   1.557 +
   1.558 +    bool isParamSets = (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG);
   1.559 +    bool isIFrame = (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME);
   1.560 +    // Should not be parameter sets and I-frame at the same time.
   1.561 +    MOZ_ASSERT(!(isParamSets && isIFrame));
   1.562 +
   1.563 +    if (mCallback) {
   1.564 +      // Implementation here assumes encoder output to be a buffer containing
   1.565 +      // parameter sets(SPS + PPS) followed by a series of buffers, each for
   1.566 +      // one input frame.
   1.567 +      // TODO: handle output violating this assumpton in bug 997110.
   1.568 +      webrtc::EncodedImage encoded(output.Elements(), output.Length(),
   1.569 +                                   output.Capacity());
   1.570 +      encoded._frameType = (isParamSets || isIFrame) ?
   1.571 +                           webrtc::kKeyFrame : webrtc::kDeltaFrame;
   1.572 +      encoded._encodedWidth = aInputFrame.mWidth;
   1.573 +      encoded._encodedHeight = aInputFrame.mHeight;
   1.574 +      encoded._timeStamp = aInputFrame.mTimestamp;
   1.575 +      encoded.capture_time_ms_ = aInputFrame.mRenderTimeMs;
   1.576 +      encoded._completeFrame = true;
   1.577 +
   1.578 +      ALOGE("OMX:%p encode frame type:%d size:%u", mOMX, encoded._frameType, encoded._length);
   1.579 +
   1.580 +      // Prepend SPS/PPS to I-frames unless they were sent last time.
   1.581 +      SendEncodedDataToCallback(encoded, isIFrame && !mIsPrevOutputParamSets);
   1.582 +      mIsPrevOutputParamSets = isParamSets;
   1.583 +    }
   1.584 +
   1.585 +    // Tell base class not to pop input for parameter sets blob because they
   1.586 +    // don't have corresponding input.
   1.587 +    return !isParamSets;
   1.588 +  }
   1.589 +
   1.590 +private:
   1.591 +  // Send encoded data to callback.The data will be broken into individual NALUs
   1.592 +  // if necessary and sent to callback one by one. This function can also insert
   1.593 +  // SPS/PPS NALUs in front of input data if requested.
   1.594 +  void SendEncodedDataToCallback(webrtc::EncodedImage& aEncodedImage,
   1.595 +                                 bool aPrependParamSets)
   1.596 +  {
   1.597 +    // Individual NALU inherits metadata from input encoded data.
   1.598 +    webrtc::EncodedImage nalu(aEncodedImage);
   1.599 +
   1.600 +    if (aPrependParamSets) {
   1.601 +      // Insert current parameter sets in front of the input encoded data.
   1.602 +      nsTArray<uint8_t> paramSets;
   1.603 +      mOMX->GetCodecConfig(&paramSets);
   1.604 +      MOZ_ASSERT(paramSets.Length() > 4); // Start code + ...
   1.605 +      // Set buffer range.
   1.606 +      nalu._buffer = paramSets.Elements();
   1.607 +      nalu._length = paramSets.Length();
   1.608 +      // Break into NALUs and send.
   1.609 +      SendEncodedDataToCallback(nalu, false);
   1.610 +    }
   1.611 +
   1.612 +    // Break input encoded data into NALUs and send each one to callback.
   1.613 +    const uint8_t* data = aEncodedImage._buffer;
   1.614 +    size_t size = aEncodedImage._length;
   1.615 +    const uint8_t* nalStart = nullptr;
   1.616 +    size_t nalSize = 0;
   1.617 +    while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
   1.618 +      nalu._buffer = const_cast<uint8_t*>(nalStart);
   1.619 +      nalu._length = nalSize;
   1.620 +      mCallback->Encoded(nalu, nullptr, nullptr);
   1.621 +    }
   1.622 +  }
   1.623 +
   1.624 +  OMXVideoEncoder* mOMX;
   1.625 +  webrtc::EncodedImageCallback* mCallback;
   1.626 +  bool mIsPrevOutputParamSets;
   1.627 +};
   1.628 +
   1.629 +// Encoder.
   1.630 +WebrtcOMXH264VideoEncoder::WebrtcOMXH264VideoEncoder()
   1.631 +  : mOMX(nullptr)
   1.632 +  , mCallback(nullptr)
   1.633 +  , mWidth(0)
   1.634 +  , mHeight(0)
   1.635 +  , mFrameRate(0)
   1.636 +  , mOMXConfigured(false)
   1.637 +{
   1.638 +  CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p constructed", this);
   1.639 +}
   1.640 +
   1.641 +int32_t
   1.642 +WebrtcOMXH264VideoEncoder::InitEncode(const webrtc::VideoCodec* aCodecSettings,
   1.643 +                                      int32_t aNumOfCores,
   1.644 +                                      uint32_t aMaxPayloadSize)
   1.645 +{
   1.646 +  CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p init", this);
   1.647 +
   1.648 +  if (mOMX == nullptr) {
   1.649 +    nsAutoPtr<OMXVideoEncoder> omx(OMXCodecWrapper::CreateAVCEncoder());
   1.650 +    if (NS_WARN_IF(omx == nullptr)) {
   1.651 +      return WEBRTC_VIDEO_CODEC_ERROR;
   1.652 +    }
   1.653 +    mOMX = omx.forget();
   1.654 +  }
   1.655 +
   1.656 +  // Defer configuration until 1st frame is received because this function will
   1.657 +  // be called more than once, and unfortunately with incorrect setting values
   1.658 +  // at first.
   1.659 +  mWidth = aCodecSettings->width;
   1.660 +  mHeight = aCodecSettings->height;
   1.661 +  mFrameRate = aCodecSettings->maxFramerate;
   1.662 +
   1.663 +  return WEBRTC_VIDEO_CODEC_OK;
   1.664 +}
   1.665 +
   1.666 +int32_t
   1.667 +WebrtcOMXH264VideoEncoder::Encode(const webrtc::I420VideoFrame& aInputImage,
   1.668 +                                  const webrtc::CodecSpecificInfo* aCodecSpecificInfo,
   1.669 +                                  const std::vector<webrtc::VideoFrameType>* aFrameTypes)
   1.670 +{
   1.671 +  MOZ_ASSERT(mOMX != nullptr);
   1.672 +  if (mOMX == nullptr) {
   1.673 +    return WEBRTC_VIDEO_CODEC_ERROR;
   1.674 +  }
   1.675 +
   1.676 +  if (!mOMXConfigured) {
   1.677 +    mOMX->Configure(mWidth, mHeight, mFrameRate,
   1.678 +                    OMXVideoEncoder::BlobFormat::AVC_NAL);
   1.679 +    mOMXConfigured = true;
   1.680 +    CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p start OMX with image size:%ux%u",
   1.681 +               this, mWidth, mHeight);
   1.682 +  }
   1.683 +
   1.684 +  // Wrap I420VideoFrame input with PlanarYCbCrImage for OMXVideoEncoder.
   1.685 +  layers::PlanarYCbCrData yuvData;
   1.686 +  yuvData.mYChannel = const_cast<uint8_t*>(aInputImage.buffer(webrtc::kYPlane));
   1.687 +  yuvData.mYSize = gfx::IntSize(aInputImage.width(), aInputImage.height());
   1.688 +  yuvData.mYStride = aInputImage.stride(webrtc::kYPlane);
   1.689 +  MOZ_ASSERT(aInputImage.stride(webrtc::kUPlane) == aInputImage.stride(webrtc::kVPlane));
   1.690 +  yuvData.mCbCrStride = aInputImage.stride(webrtc::kUPlane);
   1.691 +  yuvData.mCbChannel = const_cast<uint8_t*>(aInputImage.buffer(webrtc::kUPlane));
   1.692 +  yuvData.mCrChannel = const_cast<uint8_t*>(aInputImage.buffer(webrtc::kVPlane));
   1.693 +  yuvData.mCbCrSize = gfx::IntSize((yuvData.mYSize.width + 1) / 2,
   1.694 +                                   (yuvData.mYSize.height + 1) / 2);
   1.695 +  yuvData.mPicSize = yuvData.mYSize;
   1.696 +  yuvData.mStereoMode = StereoMode::MONO;
   1.697 +  layers::PlanarYCbCrImage img(nullptr);
   1.698 +  img.SetDataNoCopy(yuvData);
   1.699 +
   1.700 +  nsresult rv = mOMX->Encode(&img,
   1.701 +                             yuvData.mYSize.width,
   1.702 +                             yuvData.mYSize.height,
   1.703 +                             aInputImage.timestamp() * 1000 / 90, // 90kHz -> us.
   1.704 +                             0);
   1.705 +  if (rv == NS_OK) {
   1.706 +    if (mOutputDrain == nullptr) {
   1.707 +      mOutputDrain = new EncOutputDrain(mOMX, mCallback);
   1.708 +      mOutputDrain->Start();
   1.709 +    }
   1.710 +    EncodedFrame frame;
   1.711 +    frame.mWidth = mWidth;
   1.712 +    frame.mHeight = mHeight;
   1.713 +    frame.mTimestamp = aInputImage.timestamp();
   1.714 +    frame.mRenderTimeMs = aInputImage.render_time_ms();
   1.715 +    mOutputDrain->QueueInput(frame);
   1.716 +  }
   1.717 +
   1.718 +  return (rv == NS_OK) ? WEBRTC_VIDEO_CODEC_OK : WEBRTC_VIDEO_CODEC_ERROR;
   1.719 +}
   1.720 +
   1.721 +int32_t
   1.722 +WebrtcOMXH264VideoEncoder::RegisterEncodeCompleteCallback(
   1.723 +    webrtc::EncodedImageCallback* aCallback)
   1.724 +{
   1.725 +  CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p set callback:%p", this, aCallback);
   1.726 +  MOZ_ASSERT(aCallback);
   1.727 +  mCallback = aCallback;
   1.728 +
   1.729 +  return WEBRTC_VIDEO_CODEC_OK;
   1.730 +}
   1.731 +
   1.732 +int32_t
   1.733 +WebrtcOMXH264VideoEncoder::Release()
   1.734 +{
   1.735 +  CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p will be released", this);
   1.736 +
   1.737 +  if (mOutputDrain != nullptr) {
   1.738 +    mOutputDrain->Stop();
   1.739 +    mOutputDrain = nullptr;
   1.740 +  }
   1.741 +
   1.742 +  mOMX = nullptr;
   1.743 +
   1.744 +  return WEBRTC_VIDEO_CODEC_OK;
   1.745 +}
   1.746 +
   1.747 +WebrtcOMXH264VideoEncoder::~WebrtcOMXH264VideoEncoder()
   1.748 +{
   1.749 +  CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p will be destructed", this);
   1.750 +
   1.751 +  Release();
   1.752 +}
   1.753 +
   1.754 +// Inform the encoder of the new packet loss rate and the round-trip time of
   1.755 +// the network. aPacketLossRate is fraction lost and can be 0~255
   1.756 +// (255 means 100% lost).
   1.757 +// Note: stagefright doesn't handle these parameters.
   1.758 +int32_t
   1.759 +WebrtcOMXH264VideoEncoder::SetChannelParameters(uint32_t aPacketLossRate,
   1.760 +                                                int aRoundTripTimeMs)
   1.761 +{
   1.762 +  CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p set channel packet loss:%u, rtt:%d",
   1.763 +             this, aPacketLossRate, aRoundTripTimeMs);
   1.764 +
   1.765 +  return WEBRTC_VIDEO_CODEC_OK;
   1.766 +}
   1.767 +
   1.768 +// TODO: Bug 997567. Find the way to support frame rate change.
   1.769 +int32_t
   1.770 +WebrtcOMXH264VideoEncoder::SetRates(uint32_t aBitRate, uint32_t aFrameRate)
   1.771 +{
   1.772 +  CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p set bitrate:%u, frame rate:%u)",
   1.773 +             this, aBitRate, aFrameRate);
   1.774 +  MOZ_ASSERT(mOMX != nullptr);
   1.775 +  if (mOMX == nullptr) {
   1.776 +    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
   1.777 +  }
   1.778 +
   1.779 +  mOMX->SetBitrate(aBitRate);
   1.780 +
   1.781 +  return WEBRTC_VIDEO_CODEC_OK;
   1.782 +}
   1.783 +
   1.784 +// Decoder.
   1.785 +WebrtcOMXH264VideoDecoder::WebrtcOMXH264VideoDecoder()
   1.786 +  : mCallback(nullptr)
   1.787 +  , mOMX(nullptr)
   1.788 +{
   1.789 +  CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p will be constructed", this);
   1.790 +}
   1.791 +
   1.792 +int32_t
   1.793 +WebrtcOMXH264VideoDecoder::InitDecode(const webrtc::VideoCodec* aCodecSettings,
   1.794 +                                      int32_t aNumOfCores)
   1.795 +{
   1.796 +  CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p init OMX:%p", this, mOMX.get());
   1.797 +
   1.798 +  // Defer configuration until SPS/PPS NALUs (where actual decoder config
   1.799 +  // values can be extracted) are received.
   1.800 +
   1.801 +  return WEBRTC_VIDEO_CODEC_OK;
   1.802 +}
   1.803 +
   1.804 +int32_t
   1.805 +WebrtcOMXH264VideoDecoder::Decode(const webrtc::EncodedImage& aInputImage,
   1.806 +                                  bool aMissingFrames,
   1.807 +                                  const webrtc::RTPFragmentationHeader* aFragmentation,
   1.808 +                                  const webrtc::CodecSpecificInfo* aCodecSpecificInfo,
   1.809 +                                  int64_t aRenderTimeMs)
   1.810 +{
   1.811 +  if (aInputImage._length== 0 || !aInputImage._buffer) {
   1.812 +    return WEBRTC_VIDEO_CODEC_ERROR;
   1.813 +  }
   1.814 +
   1.815 +  ALOGE("WebrtcOMXH264VideoDecoder:%p will decode", this);
   1.816 +
   1.817 +  bool configured = !!mOMX;
   1.818 +  if (!configured) {
   1.819 +    // Search for SPS/PPS NALUs in input to get decoder config.
   1.820 +    sp<ABuffer> input = new ABuffer(aInputImage._buffer, aInputImage._length);
   1.821 +    sp<MetaData> paramSets = WebrtcOMXDecoder::ParseParamSets(input);
   1.822 +    if (NS_WARN_IF(paramSets == nullptr)) {
   1.823 +      // Cannot config decoder because SPS/PPS NALUs haven't been seen.
   1.824 +      return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
   1.825 +    }
   1.826 +    RefPtr<WebrtcOMXDecoder> omx = new WebrtcOMXDecoder(MEDIA_MIMETYPE_VIDEO_AVC);
   1.827 +    status_t result = omx->ConfigureWithParamSets(paramSets);
   1.828 +    if (NS_WARN_IF(result != OK)) {
   1.829 +      return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
   1.830 +    }
   1.831 +    CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p start OMX", this);
   1.832 +    mOMX = omx;
   1.833 +  }
   1.834 +
   1.835 +  bool feedFrame = true;
   1.836 +  while (feedFrame) {
   1.837 +    int64_t timeUs;
   1.838 +    status_t err = mOMX->FillInput(aInputImage, !configured, aRenderTimeMs, mCallback);
   1.839 +    feedFrame = (err == -EAGAIN); // No input buffer available. Try again.
   1.840 +  }
   1.841 +
   1.842 +  return WEBRTC_VIDEO_CODEC_OK;
   1.843 +}
   1.844 +
   1.845 +int32_t
   1.846 +WebrtcOMXH264VideoDecoder::RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback* aCallback)
   1.847 +{
   1.848 +  CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p set callback:%p", this, aCallback);
   1.849 +  MOZ_ASSERT(aCallback);
   1.850 +  mCallback = aCallback;
   1.851 +
   1.852 +  return WEBRTC_VIDEO_CODEC_OK;
   1.853 +}
   1.854 +
   1.855 +int32_t
   1.856 +WebrtcOMXH264VideoDecoder::Release()
   1.857 +{
   1.858 +  CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p will be released", this);
   1.859 +
   1.860 +  mOMX = nullptr;
   1.861 +
   1.862 +  return WEBRTC_VIDEO_CODEC_OK;
   1.863 +}
   1.864 +
   1.865 +WebrtcOMXH264VideoDecoder::~WebrtcOMXH264VideoDecoder()
   1.866 +{
   1.867 +  CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p will be destructed", this);
   1.868 +  Release();
   1.869 +}
   1.870 +
   1.871 +int32_t
   1.872 +WebrtcOMXH264VideoDecoder::Reset()
   1.873 +{
   1.874 +  CODEC_LOGW("WebrtcOMXH264VideoDecoder::Reset() will NOT reset decoder");
   1.875 +  return WEBRTC_VIDEO_CODEC_OK;
   1.876 +}
   1.877 +
   1.878 +}

mercurial