media/webrtc/signaling/src/media-conduit/WebrtcOMXH264VideoCodec.cpp

Wed, 31 Dec 2014 06:09:35 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Wed, 31 Dec 2014 06:09:35 +0100
changeset 0
6474c204b198
permissions
-rw-r--r--

Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.

michael@0 1 /* This Source Code Form is subject to the terms of the Mozilla Public
michael@0 2 * License, v. 2.0. If a copy of the MPL was not distributed with this file,
michael@0 3 * You can obtain one at http://mozilla.org/MPL/2.0/. */
michael@0 4
michael@0 5 #include "CSFLog.h"
michael@0 6
michael@0 7 #include "WebrtcOMXH264VideoCodec.h"
michael@0 8
michael@0 9 // Android/Stagefright
michael@0 10 #include <avc_utils.h>
michael@0 11 #include <binder/ProcessState.h>
michael@0 12 #include <foundation/ABuffer.h>
michael@0 13 #include <foundation/AMessage.h>
michael@0 14 #include <gui/Surface.h>
michael@0 15 #include <media/ICrypto.h>
michael@0 16 #include <MediaCodec.h>
michael@0 17 #include <MediaDefs.h>
michael@0 18 #include <MediaErrors.h>
michael@0 19 #include <MetaData.h>
michael@0 20 #include <OMX_Component.h>
michael@0 21 using namespace android;
michael@0 22
michael@0 23 // WebRTC
michael@0 24 #include "common_video/interface/texture_video_frame.h"
michael@0 25 #include "video_engine/include/vie_external_codec.h"
michael@0 26
michael@0 27 // Gecko
michael@0 28 #include "GonkNativeWindow.h"
michael@0 29 #include "GonkNativeWindowClient.h"
michael@0 30 #include "mozilla/Atomics.h"
michael@0 31 #include "mozilla/Mutex.h"
michael@0 32 #include "nsThreadUtils.h"
michael@0 33 #include "OMXCodecWrapper.h"
michael@0 34 #include "TextureClient.h"
michael@0 35
michael@0 36 #define DEQUEUE_BUFFER_TIMEOUT_US (100 * 1000ll) // 100ms.
michael@0 37 #define START_DEQUEUE_BUFFER_TIMEOUT_US (10 * DEQUEUE_BUFFER_TIMEOUT_US) // 1s.
michael@0 38 #define DRAIN_THREAD_TIMEOUT_US (1000 * 1000ll) // 1s.
michael@0 39
michael@0 40 #define LOG_TAG "WebrtcOMXH264VideoCodec"
michael@0 41 #define CODEC_LOGV(...) CSFLogInfo(LOG_TAG, __VA_ARGS__)
michael@0 42 #define CODEC_LOGD(...) CSFLogDebug(LOG_TAG, __VA_ARGS__)
michael@0 43 #define CODEC_LOGI(...) CSFLogInfo(LOG_TAG, __VA_ARGS__)
michael@0 44 #define CODEC_LOGW(...) CSFLogWarn(LOG_TAG, __VA_ARGS__)
michael@0 45 #define CODEC_LOGE(...) CSFLogError(LOG_TAG, __VA_ARGS__)
michael@0 46
michael@0 47 namespace mozilla {
michael@0 48
michael@0 49 // NS_INLINE_DECL_THREADSAFE_REFCOUNTING() cannot be used directly in
michael@0 50 // ImageNativeHandle below because the return type of webrtc::NativeHandle
michael@0 51 // AddRef()/Release() conflicts with those defined in macro. To avoid another
michael@0 52 // copy/paste of ref-counting implementation here, this dummy base class
michael@0 53 // is created to proivde another level of indirection.
michael@0 54 class DummyRefCountBase {
michael@0 55 public:
michael@0 56 // Use the name of real class for logging.
michael@0 57 NS_INLINE_DECL_THREADSAFE_REFCOUNTING(ImageNativeHandle)
michael@0 58 // To make sure subclass will be deleted/destructed properly.
michael@0 59 virtual ~DummyRefCountBase() {}
michael@0 60 };
michael@0 61
michael@0 62 // This function implements 2 interafces:
michael@0 63 // 1. webrtc::NativeHandle: to wrap layers::Image object so decoded frames can
michael@0 64 // be passed through WebRTC rendering pipeline using TextureVideoFrame.
michael@0 65 // 2. ImageHandle: for renderer to get the image object inside without knowledge
michael@0 66 // about webrtc::NativeHandle.
michael@0 67 class ImageNativeHandle MOZ_FINAL
michael@0 68 : public webrtc::NativeHandle
michael@0 69 , public DummyRefCountBase
michael@0 70 {
michael@0 71 public:
michael@0 72 ImageNativeHandle(layers::Image* aImage)
michael@0 73 : mImage(aImage)
michael@0 74 {}
michael@0 75
michael@0 76 // Implement webrtc::NativeHandle.
michael@0 77 virtual void* GetHandle() MOZ_OVERRIDE { return mImage.get(); }
michael@0 78
michael@0 79 virtual int AddRef() MOZ_OVERRIDE
michael@0 80 {
michael@0 81 return DummyRefCountBase::AddRef();
michael@0 82 }
michael@0 83
michael@0 84 virtual int Release() MOZ_OVERRIDE
michael@0 85 {
michael@0 86 return DummyRefCountBase::Release();
michael@0 87 }
michael@0 88
michael@0 89 private:
michael@0 90 RefPtr<layers::Image> mImage;
michael@0 91 };
michael@0 92
michael@0 93 // Graphic buffer lifecycle management.
michael@0 94 // Return buffer to OMX codec when renderer is done with it.
michael@0 95 class RecycleCallback
michael@0 96 {
michael@0 97 public:
michael@0 98 RecycleCallback(const sp<MediaCodec>& aOmx, uint32_t aBufferIndex)
michael@0 99 : mOmx(aOmx)
michael@0 100 , mBufferIndex(aBufferIndex)
michael@0 101 {}
michael@0 102 typedef void* CallbackPtr;
michael@0 103 static void ReturnOMXBuffer(layers::TextureClient* aClient, CallbackPtr aClosure)
michael@0 104 {
michael@0 105 aClient->ClearRecycleCallback();
michael@0 106 RecycleCallback* self = static_cast<RecycleCallback*>(aClosure);
michael@0 107 self->mOmx->releaseOutputBuffer(self->mBufferIndex);
michael@0 108 delete self;
michael@0 109 }
michael@0 110
michael@0 111 private:
michael@0 112 sp<MediaCodec> mOmx;
michael@0 113 uint32_t mBufferIndex;
michael@0 114 };
michael@0 115
michael@0 116 struct EncodedFrame
michael@0 117 {
michael@0 118 uint32_t mWidth;
michael@0 119 uint32_t mHeight;
michael@0 120 uint32_t mTimestamp;
michael@0 121 int64_t mRenderTimeMs;
michael@0 122 };
michael@0 123
michael@0 124 // Base runnable class to repeatly pull OMX output buffers in seperate thread.
michael@0 125 // How to use:
michael@0 126 // - implementing DrainOutput() to get output. Remember to return false to tell
michael@0 127 // drain not to pop input queue.
michael@0 128 // - call QueueInput() to schedule a run to drain output. The input, aFrame,
michael@0 129 // should contains corresponding info such as image size and timestamps for
michael@0 130 // DrainOutput() implementation to construct data needed by encoded/decoded
michael@0 131 // callbacks.
michael@0 132 // TODO: Bug 997110 - Revisit queue/drain logic. Current design assumes that
michael@0 133 // encoder only generate one output buffer per input frame and won't work
michael@0 134 // if encoder drops frames or generates multiple output per input.
michael@0 135 class OMXOutputDrain : public nsRunnable
michael@0 136 {
michael@0 137 public:
michael@0 138 void Start() {
michael@0 139 MonitorAutoLock lock(mMonitor);
michael@0 140 if (mThread == nullptr) {
michael@0 141 NS_NewNamedThread("OMXOutputDrain", getter_AddRefs(mThread));
michael@0 142 }
michael@0 143 CODEC_LOGD("OMXOutputDrain started");
michael@0 144 mEnding = false;
michael@0 145 mThread->Dispatch(this, NS_DISPATCH_NORMAL);
michael@0 146 }
michael@0 147
michael@0 148 void Stop() {
michael@0 149 MonitorAutoLock lock(mMonitor);
michael@0 150 mEnding = true;
michael@0 151 lock.NotifyAll(); // In case Run() is waiting.
michael@0 152
michael@0 153 if (mThread != nullptr) {
michael@0 154 mThread->Shutdown();
michael@0 155 mThread = nullptr;
michael@0 156 }
michael@0 157 CODEC_LOGD("OMXOutputDrain stopped");
michael@0 158 }
michael@0 159
michael@0 160 void QueueInput(const EncodedFrame& aFrame)
michael@0 161 {
michael@0 162 MonitorAutoLock lock(mMonitor);
michael@0 163
michael@0 164 MOZ_ASSERT(mThread);
michael@0 165
michael@0 166 mInputFrames.push(aFrame);
michael@0 167 // Notify Run() about queued input and it can start working.
michael@0 168 lock.NotifyAll();
michael@0 169 }
michael@0 170
michael@0 171 NS_IMETHODIMP Run() MOZ_OVERRIDE
michael@0 172 {
michael@0 173 MOZ_ASSERT(mThread);
michael@0 174
michael@0 175 MonitorAutoLock lock(mMonitor);
michael@0 176 while (true) {
michael@0 177 if (mInputFrames.empty()) {
michael@0 178 ALOGE("Waiting OMXOutputDrain");
michael@0 179 // Wait for new input.
michael@0 180 lock.Wait();
michael@0 181 }
michael@0 182
michael@0 183 if (mEnding) {
michael@0 184 ALOGE("Ending OMXOutputDrain");
michael@0 185 // Stop draining.
michael@0 186 break;
michael@0 187 }
michael@0 188
michael@0 189 MOZ_ASSERT(!mInputFrames.empty());
michael@0 190 EncodedFrame frame = mInputFrames.front();
michael@0 191 bool shouldPop = false;
michael@0 192 {
michael@0 193 // Release monitor while draining because it's blocking.
michael@0 194 MonitorAutoUnlock unlock(mMonitor);
michael@0 195 // |frame| provides size and time of corresponding input.
michael@0 196 shouldPop = DrainOutput(frame);
michael@0 197 }
michael@0 198 if (shouldPop) {
michael@0 199 mInputFrames.pop();
michael@0 200 }
michael@0 201 }
michael@0 202
michael@0 203 CODEC_LOGD("OMXOutputDrain Ended");
michael@0 204 return NS_OK;
michael@0 205 }
michael@0 206
michael@0 207 protected:
michael@0 208 OMXOutputDrain()
michael@0 209 : mMonitor("OMXOutputDrain monitor")
michael@0 210 , mEnding(false)
michael@0 211 {}
michael@0 212
michael@0 213 // Drain output buffer for input frame aFrame.
michael@0 214 // aFrame contains info such as size and time of the input frame and can be
michael@0 215 // used to construct data for encoded/decoded callbacks if needed.
michael@0 216 // Return true to indicate we should pop input queue, and return false to
michael@0 217 // indicate aFrame should not be removed from input queue (either output is
michael@0 218 // not ready yet and should try again later, or the drained output is SPS/PPS
michael@0 219 // NALUs that has no corresponding input in queue).
michael@0 220 virtual bool DrainOutput(const EncodedFrame& aFrame) = 0;
michael@0 221
michael@0 222 private:
michael@0 223 // This monitor protects all things below it, and is also used to
michael@0 224 // wait/notify queued input.
michael@0 225 Monitor mMonitor;
michael@0 226 nsCOMPtr<nsIThread> mThread;
michael@0 227 std::queue<EncodedFrame> mInputFrames;
michael@0 228 bool mEnding;
michael@0 229 };
michael@0 230
michael@0 231 // H.264 decoder using stagefright.
michael@0 232 class WebrtcOMXDecoder MOZ_FINAL
michael@0 233 {
michael@0 234 NS_INLINE_DECL_THREADSAFE_REFCOUNTING(WebrtcOMXDecoder)
michael@0 235 public:
michael@0 236 WebrtcOMXDecoder(const char* aMimeType)
michael@0 237 : mWidth(0)
michael@0 238 , mHeight(0)
michael@0 239 , mStarted(false)
michael@0 240 {
michael@0 241 // Create binder thread pool required by stagefright.
michael@0 242 android::ProcessState::self()->startThreadPool();
michael@0 243
michael@0 244 mLooper = new ALooper;
michael@0 245 mLooper->start();
michael@0 246 mCodec = MediaCodec::CreateByType(mLooper, aMimeType, false /* encoder */);
michael@0 247 }
michael@0 248
michael@0 249 virtual ~WebrtcOMXDecoder()
michael@0 250 {
michael@0 251 if (mStarted) {
michael@0 252 Stop();
michael@0 253 }
michael@0 254 if (mCodec != nullptr) {
michael@0 255 mCodec->release();
michael@0 256 mCodec.clear();
michael@0 257 }
michael@0 258 mLooper.clear();
michael@0 259 }
michael@0 260
michael@0 261 // Parse SPS/PPS NALUs.
michael@0 262 static sp<MetaData> ParseParamSets(sp<ABuffer>& aParamSets)
michael@0 263 {
michael@0 264 return MakeAVCCodecSpecificData(aParamSets);
michael@0 265 }
michael@0 266
michael@0 267 // Configure decoder using data returned by ParseParamSets().
michael@0 268 status_t ConfigureWithParamSets(const sp<MetaData>& aParamSets)
michael@0 269 {
michael@0 270 MOZ_ASSERT(mCodec != nullptr);
michael@0 271 if (mCodec == nullptr) {
michael@0 272 return INVALID_OPERATION;
michael@0 273 }
michael@0 274
michael@0 275 int32_t width = 0;
michael@0 276 bool ok = aParamSets->findInt32(kKeyWidth, &width);
michael@0 277 MOZ_ASSERT(ok && width > 0);
michael@0 278 int32_t height = 0;
michael@0 279 ok = aParamSets->findInt32(kKeyHeight, &height);
michael@0 280 MOZ_ASSERT(ok && height > 0);
michael@0 281 CODEC_LOGD("OMX:%p decoder config width:%d height:%d", this, width, height);
michael@0 282
michael@0 283 sp<AMessage> config = new AMessage();
michael@0 284 config->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC);
michael@0 285 config->setInt32("width", width);
michael@0 286 config->setInt32("height", height);
michael@0 287 mWidth = width;
michael@0 288 mHeight = height;
michael@0 289
michael@0 290 sp<Surface> surface = nullptr;
michael@0 291 mNativeWindow = new GonkNativeWindow();
michael@0 292 if (mNativeWindow.get()) {
michael@0 293 mNativeWindowClient = new GonkNativeWindowClient(mNativeWindow->getBufferQueue());
michael@0 294 if (mNativeWindowClient.get()) {
michael@0 295 surface = new Surface(mNativeWindowClient->getIGraphicBufferProducer());
michael@0 296 }
michael@0 297 }
michael@0 298 status_t result = mCodec->configure(config, surface, nullptr, 0);
michael@0 299 if (result == OK) {
michael@0 300 result = Start();
michael@0 301 }
michael@0 302 return result;
michael@0 303 }
michael@0 304
michael@0 305 status_t
michael@0 306 FillInput(const webrtc::EncodedImage& aEncoded, bool aIsFirstFrame,
michael@0 307 int64_t& aRenderTimeMs, webrtc::DecodedImageCallback* aCallback)
michael@0 308 {
michael@0 309 MOZ_ASSERT(mCodec != nullptr);
michael@0 310 if (mCodec == nullptr) {
michael@0 311 return INVALID_OPERATION;
michael@0 312 }
michael@0 313
michael@0 314 size_t index;
michael@0 315 status_t err = mCodec->dequeueInputBuffer(&index,
michael@0 316 aIsFirstFrame ? START_DEQUEUE_BUFFER_TIMEOUT_US : DEQUEUE_BUFFER_TIMEOUT_US);
michael@0 317 if (err != OK) {
michael@0 318 CODEC_LOGE("decode dequeue input buffer error:%d", err);
michael@0 319 return err;
michael@0 320 }
michael@0 321
michael@0 322 uint32_t flags = 0;
michael@0 323 if (aEncoded._frameType == webrtc::kKeyFrame) {
michael@0 324 flags = aIsFirstFrame ? MediaCodec::BUFFER_FLAG_CODECCONFIG : MediaCodec::BUFFER_FLAG_SYNCFRAME;
michael@0 325 }
michael@0 326 size_t size = aEncoded._length;
michael@0 327 MOZ_ASSERT(size);
michael@0 328 const sp<ABuffer>& omxIn = mInputBuffers.itemAt(index);
michael@0 329 MOZ_ASSERT(omxIn->capacity() >= size);
michael@0 330 omxIn->setRange(0, size);
michael@0 331 // Copying is needed because MediaCodec API doesn't support externallay
michael@0 332 // allocated buffer as input.
michael@0 333 memcpy(omxIn->data(), aEncoded._buffer, size);
michael@0 334 int64_t inputTimeUs = aEncoded._timeStamp * 1000 / 90; // 90kHz -> us.
michael@0 335 err = mCodec->queueInputBuffer(index, 0, size, inputTimeUs, flags);
michael@0 336 if (err == OK && !(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
michael@0 337 if (mOutputDrain == nullptr) {
michael@0 338 mOutputDrain = new OutputDrain(this, aCallback);
michael@0 339 mOutputDrain->Start();
michael@0 340 }
michael@0 341 EncodedFrame frame;
michael@0 342 frame.mWidth = mWidth;
michael@0 343 frame.mHeight = mHeight;
michael@0 344 frame.mTimestamp = aEncoded._timeStamp;
michael@0 345 frame.mRenderTimeMs = aRenderTimeMs;
michael@0 346 mOutputDrain->QueueInput(frame);
michael@0 347 }
michael@0 348
michael@0 349 return err;
michael@0 350 }
michael@0 351
michael@0 352 status_t
michael@0 353 DrainOutput(const EncodedFrame& aFrame, webrtc::DecodedImageCallback* aCallback)
michael@0 354 {
michael@0 355 MOZ_ASSERT(mCodec != nullptr);
michael@0 356 if (mCodec == nullptr) {
michael@0 357 return INVALID_OPERATION;
michael@0 358 }
michael@0 359
michael@0 360 size_t index = 0;
michael@0 361 size_t outOffset = 0;
michael@0 362 size_t outSize = 0;
michael@0 363 int64_t outTime = -1ll;
michael@0 364 uint32_t outFlags = 0;
michael@0 365 status_t err = mCodec->dequeueOutputBuffer(&index, &outOffset, &outSize,
michael@0 366 &outTime, &outFlags,
michael@0 367 DRAIN_THREAD_TIMEOUT_US);
michael@0 368 switch (err) {
michael@0 369 case OK:
michael@0 370 break;
michael@0 371 case -EAGAIN:
michael@0 372 // Not an error: output not available yet. Try later.
michael@0 373 CODEC_LOGI("decode dequeue OMX output buffer timed out. Try later.");
michael@0 374 return err;
michael@0 375 case INFO_FORMAT_CHANGED:
michael@0 376 // Not an error: will get this value when OMX output buffer is enabled,
michael@0 377 // or when input size changed.
michael@0 378 CODEC_LOGD("decode dequeue OMX output buffer format change");
michael@0 379 return err;
michael@0 380 case INFO_OUTPUT_BUFFERS_CHANGED:
michael@0 381 // Not an error: will get this value when OMX output buffer changed
michael@0 382 // (probably because of input size change).
michael@0 383 CODEC_LOGD("decode dequeue OMX output buffer change");
michael@0 384 err = mCodec->getOutputBuffers(&mOutputBuffers);
michael@0 385 MOZ_ASSERT(err == OK);
michael@0 386 return INFO_OUTPUT_BUFFERS_CHANGED;
michael@0 387 default:
michael@0 388 CODEC_LOGE("decode dequeue OMX output buffer error:%d", err);
michael@0 389 // Return OK to instruct OutputDrain to drop input from queue.
michael@0 390 return OK;
michael@0 391 }
michael@0 392
michael@0 393 sp<ABuffer> omxOut = mOutputBuffers.itemAt(index);
michael@0 394 nsAutoPtr<webrtc::I420VideoFrame> videoFrame(GenerateVideoFrame(aFrame,
michael@0 395 index,
michael@0 396 omxOut));
michael@0 397 if (videoFrame == nullptr) {
michael@0 398 mCodec->releaseOutputBuffer(index);
michael@0 399 } else if (aCallback) {
michael@0 400 aCallback->Decoded(*videoFrame);
michael@0 401 // OMX buffer will be released by RecycleCallback after rendered.
michael@0 402 }
michael@0 403
michael@0 404 return err;
michael@0 405 }
michael@0 406
michael@0 407 private:
michael@0 408 class OutputDrain : public OMXOutputDrain
michael@0 409 {
michael@0 410 public:
michael@0 411 OutputDrain(WebrtcOMXDecoder* aOMX, webrtc::DecodedImageCallback* aCallback)
michael@0 412 : OMXOutputDrain()
michael@0 413 , mOMX(aOMX)
michael@0 414 , mCallback(aCallback)
michael@0 415 {}
michael@0 416
michael@0 417 protected:
michael@0 418 virtual bool DrainOutput(const EncodedFrame& aFrame) MOZ_OVERRIDE
michael@0 419 {
michael@0 420 return (mOMX->DrainOutput(aFrame, mCallback) == OK);
michael@0 421 }
michael@0 422
michael@0 423 private:
michael@0 424 WebrtcOMXDecoder* mOMX;
michael@0 425 webrtc::DecodedImageCallback* mCallback;
michael@0 426 };
michael@0 427
michael@0 428 status_t Start()
michael@0 429 {
michael@0 430 MOZ_ASSERT(!mStarted);
michael@0 431 if (mStarted) {
michael@0 432 return OK;
michael@0 433 }
michael@0 434
michael@0 435 status_t err = mCodec->start();
michael@0 436 if (err == OK) {
michael@0 437 mStarted = true;
michael@0 438 mCodec->getInputBuffers(&mInputBuffers);
michael@0 439 mCodec->getOutputBuffers(&mOutputBuffers);
michael@0 440 }
michael@0 441
michael@0 442 return err;
michael@0 443 }
michael@0 444
michael@0 445 status_t Stop()
michael@0 446 {
michael@0 447 MOZ_ASSERT(mStarted);
michael@0 448 if (!mStarted) {
michael@0 449 return OK;
michael@0 450 }
michael@0 451 if (mOutputDrain != nullptr) {
michael@0 452 mOutputDrain->Stop();
michael@0 453 mOutputDrain = nullptr;
michael@0 454 }
michael@0 455
michael@0 456 status_t err = mCodec->stop();
michael@0 457 if (err == OK) {
michael@0 458 mInputBuffers.clear();
michael@0 459 mOutputBuffers.clear();
michael@0 460 mStarted = false;
michael@0 461 } else {
michael@0 462 MOZ_ASSERT(false);
michael@0 463 }
michael@0 464
michael@0 465 return err;
michael@0 466 }
michael@0 467
michael@0 468 webrtc::I420VideoFrame*
michael@0 469 GenerateVideoFrame(const EncodedFrame& aEncoded, uint32_t aBufferIndex,
michael@0 470 const sp<ABuffer>& aOMXBuffer)
michael@0 471 {
michael@0 472 // TODO: Get decoded frame buffer through native window to obsolete
michael@0 473 // changes to stagefright code.
michael@0 474 sp<RefBase> obj;
michael@0 475 bool hasGraphicBuffer = aOMXBuffer->meta()->findObject("graphic-buffer", &obj);
michael@0 476 if (!hasGraphicBuffer) {
michael@0 477 MOZ_ASSERT(false, "Decoder doesn't produce graphic buffer");
michael@0 478 // Nothing to render.
michael@0 479 return nullptr;
michael@0 480 }
michael@0 481
michael@0 482 sp<GraphicBuffer> gb = static_cast<GraphicBuffer*>(obj.get());
michael@0 483 if (!gb.get()) {
michael@0 484 MOZ_ASSERT(false, "Null graphic buffer");
michael@0 485 return nullptr;
michael@0 486 }
michael@0 487
michael@0 488 RefPtr<mozilla::layers::TextureClient> textureClient =
michael@0 489 mNativeWindow->getTextureClientFromBuffer(gb.get());
michael@0 490 textureClient->SetRecycleCallback(RecycleCallback::ReturnOMXBuffer,
michael@0 491 new RecycleCallback(mCodec, aBufferIndex));
michael@0 492
michael@0 493 int width = gb->getWidth();
michael@0 494 int height = gb->getHeight();
michael@0 495 layers::GrallocImage::GrallocData grallocData;
michael@0 496 grallocData.mPicSize = gfx::IntSize(width, height);
michael@0 497 grallocData.mGraphicBuffer = textureClient;
michael@0 498
michael@0 499 layers::GrallocImage* grallocImage = new layers::GrallocImage();
michael@0 500 grallocImage->SetData(grallocData);
michael@0 501
michael@0 502 nsAutoPtr<webrtc::I420VideoFrame> videoFrame(
michael@0 503 new webrtc::TextureVideoFrame(new ImageNativeHandle(grallocImage),
michael@0 504 width, height,
michael@0 505 aEncoded.mTimestamp,
michael@0 506 aEncoded.mRenderTimeMs));
michael@0 507
michael@0 508 return videoFrame.forget();
michael@0 509 }
michael@0 510
michael@0 511 sp<ALooper> mLooper;
michael@0 512 sp<MediaCodec> mCodec; // OMXCodec
michael@0 513 int mWidth;
michael@0 514 int mHeight;
michael@0 515 android::Vector<sp<ABuffer> > mInputBuffers;
michael@0 516 android::Vector<sp<ABuffer> > mOutputBuffers;
michael@0 517 bool mStarted;
michael@0 518
michael@0 519 sp<GonkNativeWindow> mNativeWindow;
michael@0 520 sp<GonkNativeWindowClient> mNativeWindowClient;
michael@0 521
michael@0 522 RefPtr<OutputDrain> mOutputDrain;
michael@0 523 };
michael@0 524
michael@0 525 class EncOutputDrain : public OMXOutputDrain
michael@0 526 {
michael@0 527 public:
michael@0 528 EncOutputDrain(OMXVideoEncoder* aOMX, webrtc::EncodedImageCallback* aCallback)
michael@0 529 : OMXOutputDrain()
michael@0 530 , mOMX(aOMX)
michael@0 531 , mCallback(aCallback)
michael@0 532 , mIsPrevOutputParamSets(false)
michael@0 533 {}
michael@0 534
michael@0 535 protected:
michael@0 536 virtual bool DrainOutput(const EncodedFrame& aInputFrame) MOZ_OVERRIDE
michael@0 537 {
michael@0 538 nsTArray<uint8_t> output;
michael@0 539 int64_t timeUs = -1ll;
michael@0 540 int flags = 0;
michael@0 541 nsresult rv = mOMX->GetNextEncodedFrame(&output, &timeUs, &flags,
michael@0 542 DRAIN_THREAD_TIMEOUT_US);
michael@0 543 if (NS_WARN_IF(NS_FAILED(rv))) {
michael@0 544 // Fail to get encoded frame. The corresponding input frame should be
michael@0 545 // removed.
michael@0 546 return true;
michael@0 547 }
michael@0 548
michael@0 549 if (output.Length() == 0) {
michael@0 550 // No encoded data yet. Try later.
michael@0 551 CODEC_LOGD("OMX:%p (encode no output available this time)", mOMX);
michael@0 552 return false;
michael@0 553 }
michael@0 554
michael@0 555 bool isParamSets = (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG);
michael@0 556 bool isIFrame = (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME);
michael@0 557 // Should not be parameter sets and I-frame at the same time.
michael@0 558 MOZ_ASSERT(!(isParamSets && isIFrame));
michael@0 559
michael@0 560 if (mCallback) {
michael@0 561 // Implementation here assumes encoder output to be a buffer containing
michael@0 562 // parameter sets(SPS + PPS) followed by a series of buffers, each for
michael@0 563 // one input frame.
michael@0 564 // TODO: handle output violating this assumpton in bug 997110.
michael@0 565 webrtc::EncodedImage encoded(output.Elements(), output.Length(),
michael@0 566 output.Capacity());
michael@0 567 encoded._frameType = (isParamSets || isIFrame) ?
michael@0 568 webrtc::kKeyFrame : webrtc::kDeltaFrame;
michael@0 569 encoded._encodedWidth = aInputFrame.mWidth;
michael@0 570 encoded._encodedHeight = aInputFrame.mHeight;
michael@0 571 encoded._timeStamp = aInputFrame.mTimestamp;
michael@0 572 encoded.capture_time_ms_ = aInputFrame.mRenderTimeMs;
michael@0 573 encoded._completeFrame = true;
michael@0 574
michael@0 575 ALOGE("OMX:%p encode frame type:%d size:%u", mOMX, encoded._frameType, encoded._length);
michael@0 576
michael@0 577 // Prepend SPS/PPS to I-frames unless they were sent last time.
michael@0 578 SendEncodedDataToCallback(encoded, isIFrame && !mIsPrevOutputParamSets);
michael@0 579 mIsPrevOutputParamSets = isParamSets;
michael@0 580 }
michael@0 581
michael@0 582 // Tell base class not to pop input for parameter sets blob because they
michael@0 583 // don't have corresponding input.
michael@0 584 return !isParamSets;
michael@0 585 }
michael@0 586
michael@0 587 private:
michael@0 588 // Send encoded data to callback.The data will be broken into individual NALUs
michael@0 589 // if necessary and sent to callback one by one. This function can also insert
michael@0 590 // SPS/PPS NALUs in front of input data if requested.
michael@0 591 void SendEncodedDataToCallback(webrtc::EncodedImage& aEncodedImage,
michael@0 592 bool aPrependParamSets)
michael@0 593 {
michael@0 594 // Individual NALU inherits metadata from input encoded data.
michael@0 595 webrtc::EncodedImage nalu(aEncodedImage);
michael@0 596
michael@0 597 if (aPrependParamSets) {
michael@0 598 // Insert current parameter sets in front of the input encoded data.
michael@0 599 nsTArray<uint8_t> paramSets;
michael@0 600 mOMX->GetCodecConfig(&paramSets);
michael@0 601 MOZ_ASSERT(paramSets.Length() > 4); // Start code + ...
michael@0 602 // Set buffer range.
michael@0 603 nalu._buffer = paramSets.Elements();
michael@0 604 nalu._length = paramSets.Length();
michael@0 605 // Break into NALUs and send.
michael@0 606 SendEncodedDataToCallback(nalu, false);
michael@0 607 }
michael@0 608
michael@0 609 // Break input encoded data into NALUs and send each one to callback.
michael@0 610 const uint8_t* data = aEncodedImage._buffer;
michael@0 611 size_t size = aEncodedImage._length;
michael@0 612 const uint8_t* nalStart = nullptr;
michael@0 613 size_t nalSize = 0;
michael@0 614 while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
michael@0 615 nalu._buffer = const_cast<uint8_t*>(nalStart);
michael@0 616 nalu._length = nalSize;
michael@0 617 mCallback->Encoded(nalu, nullptr, nullptr);
michael@0 618 }
michael@0 619 }
michael@0 620
michael@0 621 OMXVideoEncoder* mOMX;
michael@0 622 webrtc::EncodedImageCallback* mCallback;
michael@0 623 bool mIsPrevOutputParamSets;
michael@0 624 };
michael@0 625
michael@0 626 // Encoder.
michael@0 627 WebrtcOMXH264VideoEncoder::WebrtcOMXH264VideoEncoder()
michael@0 628 : mOMX(nullptr)
michael@0 629 , mCallback(nullptr)
michael@0 630 , mWidth(0)
michael@0 631 , mHeight(0)
michael@0 632 , mFrameRate(0)
michael@0 633 , mOMXConfigured(false)
michael@0 634 {
michael@0 635 CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p constructed", this);
michael@0 636 }
michael@0 637
michael@0 638 int32_t
michael@0 639 WebrtcOMXH264VideoEncoder::InitEncode(const webrtc::VideoCodec* aCodecSettings,
michael@0 640 int32_t aNumOfCores,
michael@0 641 uint32_t aMaxPayloadSize)
michael@0 642 {
michael@0 643 CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p init", this);
michael@0 644
michael@0 645 if (mOMX == nullptr) {
michael@0 646 nsAutoPtr<OMXVideoEncoder> omx(OMXCodecWrapper::CreateAVCEncoder());
michael@0 647 if (NS_WARN_IF(omx == nullptr)) {
michael@0 648 return WEBRTC_VIDEO_CODEC_ERROR;
michael@0 649 }
michael@0 650 mOMX = omx.forget();
michael@0 651 }
michael@0 652
michael@0 653 // Defer configuration until 1st frame is received because this function will
michael@0 654 // be called more than once, and unfortunately with incorrect setting values
michael@0 655 // at first.
michael@0 656 mWidth = aCodecSettings->width;
michael@0 657 mHeight = aCodecSettings->height;
michael@0 658 mFrameRate = aCodecSettings->maxFramerate;
michael@0 659
michael@0 660 return WEBRTC_VIDEO_CODEC_OK;
michael@0 661 }
michael@0 662
michael@0 663 int32_t
michael@0 664 WebrtcOMXH264VideoEncoder::Encode(const webrtc::I420VideoFrame& aInputImage,
michael@0 665 const webrtc::CodecSpecificInfo* aCodecSpecificInfo,
michael@0 666 const std::vector<webrtc::VideoFrameType>* aFrameTypes)
michael@0 667 {
michael@0 668 MOZ_ASSERT(mOMX != nullptr);
michael@0 669 if (mOMX == nullptr) {
michael@0 670 return WEBRTC_VIDEO_CODEC_ERROR;
michael@0 671 }
michael@0 672
michael@0 673 if (!mOMXConfigured) {
michael@0 674 mOMX->Configure(mWidth, mHeight, mFrameRate,
michael@0 675 OMXVideoEncoder::BlobFormat::AVC_NAL);
michael@0 676 mOMXConfigured = true;
michael@0 677 CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p start OMX with image size:%ux%u",
michael@0 678 this, mWidth, mHeight);
michael@0 679 }
michael@0 680
michael@0 681 // Wrap I420VideoFrame input with PlanarYCbCrImage for OMXVideoEncoder.
michael@0 682 layers::PlanarYCbCrData yuvData;
michael@0 683 yuvData.mYChannel = const_cast<uint8_t*>(aInputImage.buffer(webrtc::kYPlane));
michael@0 684 yuvData.mYSize = gfx::IntSize(aInputImage.width(), aInputImage.height());
michael@0 685 yuvData.mYStride = aInputImage.stride(webrtc::kYPlane);
michael@0 686 MOZ_ASSERT(aInputImage.stride(webrtc::kUPlane) == aInputImage.stride(webrtc::kVPlane));
michael@0 687 yuvData.mCbCrStride = aInputImage.stride(webrtc::kUPlane);
michael@0 688 yuvData.mCbChannel = const_cast<uint8_t*>(aInputImage.buffer(webrtc::kUPlane));
michael@0 689 yuvData.mCrChannel = const_cast<uint8_t*>(aInputImage.buffer(webrtc::kVPlane));
michael@0 690 yuvData.mCbCrSize = gfx::IntSize((yuvData.mYSize.width + 1) / 2,
michael@0 691 (yuvData.mYSize.height + 1) / 2);
michael@0 692 yuvData.mPicSize = yuvData.mYSize;
michael@0 693 yuvData.mStereoMode = StereoMode::MONO;
michael@0 694 layers::PlanarYCbCrImage img(nullptr);
michael@0 695 img.SetDataNoCopy(yuvData);
michael@0 696
michael@0 697 nsresult rv = mOMX->Encode(&img,
michael@0 698 yuvData.mYSize.width,
michael@0 699 yuvData.mYSize.height,
michael@0 700 aInputImage.timestamp() * 1000 / 90, // 90kHz -> us.
michael@0 701 0);
michael@0 702 if (rv == NS_OK) {
michael@0 703 if (mOutputDrain == nullptr) {
michael@0 704 mOutputDrain = new EncOutputDrain(mOMX, mCallback);
michael@0 705 mOutputDrain->Start();
michael@0 706 }
michael@0 707 EncodedFrame frame;
michael@0 708 frame.mWidth = mWidth;
michael@0 709 frame.mHeight = mHeight;
michael@0 710 frame.mTimestamp = aInputImage.timestamp();
michael@0 711 frame.mRenderTimeMs = aInputImage.render_time_ms();
michael@0 712 mOutputDrain->QueueInput(frame);
michael@0 713 }
michael@0 714
michael@0 715 return (rv == NS_OK) ? WEBRTC_VIDEO_CODEC_OK : WEBRTC_VIDEO_CODEC_ERROR;
michael@0 716 }
michael@0 717
michael@0 718 int32_t
michael@0 719 WebrtcOMXH264VideoEncoder::RegisterEncodeCompleteCallback(
michael@0 720 webrtc::EncodedImageCallback* aCallback)
michael@0 721 {
michael@0 722 CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p set callback:%p", this, aCallback);
michael@0 723 MOZ_ASSERT(aCallback);
michael@0 724 mCallback = aCallback;
michael@0 725
michael@0 726 return WEBRTC_VIDEO_CODEC_OK;
michael@0 727 }
michael@0 728
michael@0 729 int32_t
michael@0 730 WebrtcOMXH264VideoEncoder::Release()
michael@0 731 {
michael@0 732 CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p will be released", this);
michael@0 733
michael@0 734 if (mOutputDrain != nullptr) {
michael@0 735 mOutputDrain->Stop();
michael@0 736 mOutputDrain = nullptr;
michael@0 737 }
michael@0 738
michael@0 739 mOMX = nullptr;
michael@0 740
michael@0 741 return WEBRTC_VIDEO_CODEC_OK;
michael@0 742 }
michael@0 743
michael@0 744 WebrtcOMXH264VideoEncoder::~WebrtcOMXH264VideoEncoder()
michael@0 745 {
michael@0 746 CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p will be destructed", this);
michael@0 747
michael@0 748 Release();
michael@0 749 }
michael@0 750
michael@0 751 // Inform the encoder of the new packet loss rate and the round-trip time of
michael@0 752 // the network. aPacketLossRate is fraction lost and can be 0~255
michael@0 753 // (255 means 100% lost).
michael@0 754 // Note: stagefright doesn't handle these parameters.
michael@0 755 int32_t
michael@0 756 WebrtcOMXH264VideoEncoder::SetChannelParameters(uint32_t aPacketLossRate,
michael@0 757 int aRoundTripTimeMs)
michael@0 758 {
michael@0 759 CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p set channel packet loss:%u, rtt:%d",
michael@0 760 this, aPacketLossRate, aRoundTripTimeMs);
michael@0 761
michael@0 762 return WEBRTC_VIDEO_CODEC_OK;
michael@0 763 }
michael@0 764
michael@0 765 // TODO: Bug 997567. Find the way to support frame rate change.
michael@0 766 int32_t
michael@0 767 WebrtcOMXH264VideoEncoder::SetRates(uint32_t aBitRate, uint32_t aFrameRate)
michael@0 768 {
michael@0 769 CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p set bitrate:%u, frame rate:%u)",
michael@0 770 this, aBitRate, aFrameRate);
michael@0 771 MOZ_ASSERT(mOMX != nullptr);
michael@0 772 if (mOMX == nullptr) {
michael@0 773 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
michael@0 774 }
michael@0 775
michael@0 776 mOMX->SetBitrate(aBitRate);
michael@0 777
michael@0 778 return WEBRTC_VIDEO_CODEC_OK;
michael@0 779 }
michael@0 780
michael@0 781 // Decoder.
michael@0 782 WebrtcOMXH264VideoDecoder::WebrtcOMXH264VideoDecoder()
michael@0 783 : mCallback(nullptr)
michael@0 784 , mOMX(nullptr)
michael@0 785 {
michael@0 786 CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p will be constructed", this);
michael@0 787 }
michael@0 788
michael@0 789 int32_t
michael@0 790 WebrtcOMXH264VideoDecoder::InitDecode(const webrtc::VideoCodec* aCodecSettings,
michael@0 791 int32_t aNumOfCores)
michael@0 792 {
michael@0 793 CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p init OMX:%p", this, mOMX.get());
michael@0 794
michael@0 795 // Defer configuration until SPS/PPS NALUs (where actual decoder config
michael@0 796 // values can be extracted) are received.
michael@0 797
michael@0 798 return WEBRTC_VIDEO_CODEC_OK;
michael@0 799 }
michael@0 800
michael@0 801 int32_t
michael@0 802 WebrtcOMXH264VideoDecoder::Decode(const webrtc::EncodedImage& aInputImage,
michael@0 803 bool aMissingFrames,
michael@0 804 const webrtc::RTPFragmentationHeader* aFragmentation,
michael@0 805 const webrtc::CodecSpecificInfo* aCodecSpecificInfo,
michael@0 806 int64_t aRenderTimeMs)
michael@0 807 {
michael@0 808 if (aInputImage._length== 0 || !aInputImage._buffer) {
michael@0 809 return WEBRTC_VIDEO_CODEC_ERROR;
michael@0 810 }
michael@0 811
michael@0 812 ALOGE("WebrtcOMXH264VideoDecoder:%p will decode", this);
michael@0 813
michael@0 814 bool configured = !!mOMX;
michael@0 815 if (!configured) {
michael@0 816 // Search for SPS/PPS NALUs in input to get decoder config.
michael@0 817 sp<ABuffer> input = new ABuffer(aInputImage._buffer, aInputImage._length);
michael@0 818 sp<MetaData> paramSets = WebrtcOMXDecoder::ParseParamSets(input);
michael@0 819 if (NS_WARN_IF(paramSets == nullptr)) {
michael@0 820 // Cannot config decoder because SPS/PPS NALUs haven't been seen.
michael@0 821 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
michael@0 822 }
michael@0 823 RefPtr<WebrtcOMXDecoder> omx = new WebrtcOMXDecoder(MEDIA_MIMETYPE_VIDEO_AVC);
michael@0 824 status_t result = omx->ConfigureWithParamSets(paramSets);
michael@0 825 if (NS_WARN_IF(result != OK)) {
michael@0 826 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
michael@0 827 }
michael@0 828 CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p start OMX", this);
michael@0 829 mOMX = omx;
michael@0 830 }
michael@0 831
michael@0 832 bool feedFrame = true;
michael@0 833 while (feedFrame) {
michael@0 834 int64_t timeUs;
michael@0 835 status_t err = mOMX->FillInput(aInputImage, !configured, aRenderTimeMs, mCallback);
michael@0 836 feedFrame = (err == -EAGAIN); // No input buffer available. Try again.
michael@0 837 }
michael@0 838
michael@0 839 return WEBRTC_VIDEO_CODEC_OK;
michael@0 840 }
michael@0 841
michael@0 842 int32_t
michael@0 843 WebrtcOMXH264VideoDecoder::RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback* aCallback)
michael@0 844 {
michael@0 845 CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p set callback:%p", this, aCallback);
michael@0 846 MOZ_ASSERT(aCallback);
michael@0 847 mCallback = aCallback;
michael@0 848
michael@0 849 return WEBRTC_VIDEO_CODEC_OK;
michael@0 850 }
michael@0 851
michael@0 852 int32_t
michael@0 853 WebrtcOMXH264VideoDecoder::Release()
michael@0 854 {
michael@0 855 CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p will be released", this);
michael@0 856
michael@0 857 mOMX = nullptr;
michael@0 858
michael@0 859 return WEBRTC_VIDEO_CODEC_OK;
michael@0 860 }
michael@0 861
michael@0 862 WebrtcOMXH264VideoDecoder::~WebrtcOMXH264VideoDecoder()
michael@0 863 {
michael@0 864 CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p will be destructed", this);
michael@0 865 Release();
michael@0 866 }
michael@0 867
michael@0 868 int32_t
michael@0 869 WebrtcOMXH264VideoDecoder::Reset()
michael@0 870 {
michael@0 871 CODEC_LOGW("WebrtcOMXH264VideoDecoder::Reset() will NOT reset decoder");
michael@0 872 return WEBRTC_VIDEO_CODEC_OK;
michael@0 873 }
michael@0 874
michael@0 875 }

mercurial