Thu, 22 Jan 2015 13:21:57 +0100
Incorporate requested changes from Mozilla in review:
https://bugzilla.mozilla.org/show_bug.cgi?id=1123480#c6
1 /* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this file,
3 * You can obtain one at http://mozilla.org/MPL/2.0/. */
5 #include "CSFLog.h"
7 #include "WebrtcOMXH264VideoCodec.h"
9 // Android/Stagefright
10 #include <avc_utils.h>
11 #include <binder/ProcessState.h>
12 #include <foundation/ABuffer.h>
13 #include <foundation/AMessage.h>
14 #include <gui/Surface.h>
15 #include <media/ICrypto.h>
16 #include <MediaCodec.h>
17 #include <MediaDefs.h>
18 #include <MediaErrors.h>
19 #include <MetaData.h>
20 #include <OMX_Component.h>
21 using namespace android;
23 // WebRTC
24 #include "common_video/interface/texture_video_frame.h"
25 #include "video_engine/include/vie_external_codec.h"
27 // Gecko
28 #include "GonkNativeWindow.h"
29 #include "GonkNativeWindowClient.h"
30 #include "mozilla/Atomics.h"
31 #include "mozilla/Mutex.h"
32 #include "nsThreadUtils.h"
33 #include "OMXCodecWrapper.h"
34 #include "TextureClient.h"
36 #define DEQUEUE_BUFFER_TIMEOUT_US (100 * 1000ll) // 100ms.
37 #define START_DEQUEUE_BUFFER_TIMEOUT_US (10 * DEQUEUE_BUFFER_TIMEOUT_US) // 1s.
38 #define DRAIN_THREAD_TIMEOUT_US (1000 * 1000ll) // 1s.
40 #define LOG_TAG "WebrtcOMXH264VideoCodec"
41 #define CODEC_LOGV(...) CSFLogInfo(LOG_TAG, __VA_ARGS__)
42 #define CODEC_LOGD(...) CSFLogDebug(LOG_TAG, __VA_ARGS__)
43 #define CODEC_LOGI(...) CSFLogInfo(LOG_TAG, __VA_ARGS__)
44 #define CODEC_LOGW(...) CSFLogWarn(LOG_TAG, __VA_ARGS__)
45 #define CODEC_LOGE(...) CSFLogError(LOG_TAG, __VA_ARGS__)
47 namespace mozilla {
49 // NS_INLINE_DECL_THREADSAFE_REFCOUNTING() cannot be used directly in
50 // ImageNativeHandle below because the return type of webrtc::NativeHandle
51 // AddRef()/Release() conflicts with those defined in macro. To avoid another
52 // copy/paste of ref-counting implementation here, this dummy base class
53 // is created to proivde another level of indirection.
54 class DummyRefCountBase {
55 public:
56 // Use the name of real class for logging.
57 NS_INLINE_DECL_THREADSAFE_REFCOUNTING(ImageNativeHandle)
58 // To make sure subclass will be deleted/destructed properly.
59 virtual ~DummyRefCountBase() {}
60 };
62 // This function implements 2 interafces:
63 // 1. webrtc::NativeHandle: to wrap layers::Image object so decoded frames can
64 // be passed through WebRTC rendering pipeline using TextureVideoFrame.
65 // 2. ImageHandle: for renderer to get the image object inside without knowledge
66 // about webrtc::NativeHandle.
67 class ImageNativeHandle MOZ_FINAL
68 : public webrtc::NativeHandle
69 , public DummyRefCountBase
70 {
71 public:
72 ImageNativeHandle(layers::Image* aImage)
73 : mImage(aImage)
74 {}
76 // Implement webrtc::NativeHandle.
77 virtual void* GetHandle() MOZ_OVERRIDE { return mImage.get(); }
79 virtual int AddRef() MOZ_OVERRIDE
80 {
81 return DummyRefCountBase::AddRef();
82 }
84 virtual int Release() MOZ_OVERRIDE
85 {
86 return DummyRefCountBase::Release();
87 }
89 private:
90 RefPtr<layers::Image> mImage;
91 };
93 // Graphic buffer lifecycle management.
94 // Return buffer to OMX codec when renderer is done with it.
95 class RecycleCallback
96 {
97 public:
98 RecycleCallback(const sp<MediaCodec>& aOmx, uint32_t aBufferIndex)
99 : mOmx(aOmx)
100 , mBufferIndex(aBufferIndex)
101 {}
102 typedef void* CallbackPtr;
103 static void ReturnOMXBuffer(layers::TextureClient* aClient, CallbackPtr aClosure)
104 {
105 aClient->ClearRecycleCallback();
106 RecycleCallback* self = static_cast<RecycleCallback*>(aClosure);
107 self->mOmx->releaseOutputBuffer(self->mBufferIndex);
108 delete self;
109 }
111 private:
112 sp<MediaCodec> mOmx;
113 uint32_t mBufferIndex;
114 };
116 struct EncodedFrame
117 {
118 uint32_t mWidth;
119 uint32_t mHeight;
120 uint32_t mTimestamp;
121 int64_t mRenderTimeMs;
122 };
124 // Base runnable class to repeatly pull OMX output buffers in seperate thread.
125 // How to use:
126 // - implementing DrainOutput() to get output. Remember to return false to tell
127 // drain not to pop input queue.
128 // - call QueueInput() to schedule a run to drain output. The input, aFrame,
129 // should contains corresponding info such as image size and timestamps for
130 // DrainOutput() implementation to construct data needed by encoded/decoded
131 // callbacks.
132 // TODO: Bug 997110 - Revisit queue/drain logic. Current design assumes that
133 // encoder only generate one output buffer per input frame and won't work
134 // if encoder drops frames or generates multiple output per input.
135 class OMXOutputDrain : public nsRunnable
136 {
137 public:
138 void Start() {
139 MonitorAutoLock lock(mMonitor);
140 if (mThread == nullptr) {
141 NS_NewNamedThread("OMXOutputDrain", getter_AddRefs(mThread));
142 }
143 CODEC_LOGD("OMXOutputDrain started");
144 mEnding = false;
145 mThread->Dispatch(this, NS_DISPATCH_NORMAL);
146 }
148 void Stop() {
149 MonitorAutoLock lock(mMonitor);
150 mEnding = true;
151 lock.NotifyAll(); // In case Run() is waiting.
153 if (mThread != nullptr) {
154 mThread->Shutdown();
155 mThread = nullptr;
156 }
157 CODEC_LOGD("OMXOutputDrain stopped");
158 }
160 void QueueInput(const EncodedFrame& aFrame)
161 {
162 MonitorAutoLock lock(mMonitor);
164 MOZ_ASSERT(mThread);
166 mInputFrames.push(aFrame);
167 // Notify Run() about queued input and it can start working.
168 lock.NotifyAll();
169 }
171 NS_IMETHODIMP Run() MOZ_OVERRIDE
172 {
173 MOZ_ASSERT(mThread);
175 MonitorAutoLock lock(mMonitor);
176 while (true) {
177 if (mInputFrames.empty()) {
178 ALOGE("Waiting OMXOutputDrain");
179 // Wait for new input.
180 lock.Wait();
181 }
183 if (mEnding) {
184 ALOGE("Ending OMXOutputDrain");
185 // Stop draining.
186 break;
187 }
189 MOZ_ASSERT(!mInputFrames.empty());
190 EncodedFrame frame = mInputFrames.front();
191 bool shouldPop = false;
192 {
193 // Release monitor while draining because it's blocking.
194 MonitorAutoUnlock unlock(mMonitor);
195 // |frame| provides size and time of corresponding input.
196 shouldPop = DrainOutput(frame);
197 }
198 if (shouldPop) {
199 mInputFrames.pop();
200 }
201 }
203 CODEC_LOGD("OMXOutputDrain Ended");
204 return NS_OK;
205 }
207 protected:
208 OMXOutputDrain()
209 : mMonitor("OMXOutputDrain monitor")
210 , mEnding(false)
211 {}
213 // Drain output buffer for input frame aFrame.
214 // aFrame contains info such as size and time of the input frame and can be
215 // used to construct data for encoded/decoded callbacks if needed.
216 // Return true to indicate we should pop input queue, and return false to
217 // indicate aFrame should not be removed from input queue (either output is
218 // not ready yet and should try again later, or the drained output is SPS/PPS
219 // NALUs that has no corresponding input in queue).
220 virtual bool DrainOutput(const EncodedFrame& aFrame) = 0;
222 private:
223 // This monitor protects all things below it, and is also used to
224 // wait/notify queued input.
225 Monitor mMonitor;
226 nsCOMPtr<nsIThread> mThread;
227 std::queue<EncodedFrame> mInputFrames;
228 bool mEnding;
229 };
231 // H.264 decoder using stagefright.
232 class WebrtcOMXDecoder MOZ_FINAL
233 {
234 NS_INLINE_DECL_THREADSAFE_REFCOUNTING(WebrtcOMXDecoder)
235 public:
236 WebrtcOMXDecoder(const char* aMimeType)
237 : mWidth(0)
238 , mHeight(0)
239 , mStarted(false)
240 {
241 // Create binder thread pool required by stagefright.
242 android::ProcessState::self()->startThreadPool();
244 mLooper = new ALooper;
245 mLooper->start();
246 mCodec = MediaCodec::CreateByType(mLooper, aMimeType, false /* encoder */);
247 }
249 virtual ~WebrtcOMXDecoder()
250 {
251 if (mStarted) {
252 Stop();
253 }
254 if (mCodec != nullptr) {
255 mCodec->release();
256 mCodec.clear();
257 }
258 mLooper.clear();
259 }
261 // Parse SPS/PPS NALUs.
262 static sp<MetaData> ParseParamSets(sp<ABuffer>& aParamSets)
263 {
264 return MakeAVCCodecSpecificData(aParamSets);
265 }
267 // Configure decoder using data returned by ParseParamSets().
268 status_t ConfigureWithParamSets(const sp<MetaData>& aParamSets)
269 {
270 MOZ_ASSERT(mCodec != nullptr);
271 if (mCodec == nullptr) {
272 return INVALID_OPERATION;
273 }
275 int32_t width = 0;
276 bool ok = aParamSets->findInt32(kKeyWidth, &width);
277 MOZ_ASSERT(ok && width > 0);
278 int32_t height = 0;
279 ok = aParamSets->findInt32(kKeyHeight, &height);
280 MOZ_ASSERT(ok && height > 0);
281 CODEC_LOGD("OMX:%p decoder config width:%d height:%d", this, width, height);
283 sp<AMessage> config = new AMessage();
284 config->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC);
285 config->setInt32("width", width);
286 config->setInt32("height", height);
287 mWidth = width;
288 mHeight = height;
290 sp<Surface> surface = nullptr;
291 mNativeWindow = new GonkNativeWindow();
292 if (mNativeWindow.get()) {
293 mNativeWindowClient = new GonkNativeWindowClient(mNativeWindow->getBufferQueue());
294 if (mNativeWindowClient.get()) {
295 surface = new Surface(mNativeWindowClient->getIGraphicBufferProducer());
296 }
297 }
298 status_t result = mCodec->configure(config, surface, nullptr, 0);
299 if (result == OK) {
300 result = Start();
301 }
302 return result;
303 }
305 status_t
306 FillInput(const webrtc::EncodedImage& aEncoded, bool aIsFirstFrame,
307 int64_t& aRenderTimeMs, webrtc::DecodedImageCallback* aCallback)
308 {
309 MOZ_ASSERT(mCodec != nullptr);
310 if (mCodec == nullptr) {
311 return INVALID_OPERATION;
312 }
314 size_t index;
315 status_t err = mCodec->dequeueInputBuffer(&index,
316 aIsFirstFrame ? START_DEQUEUE_BUFFER_TIMEOUT_US : DEQUEUE_BUFFER_TIMEOUT_US);
317 if (err != OK) {
318 CODEC_LOGE("decode dequeue input buffer error:%d", err);
319 return err;
320 }
322 uint32_t flags = 0;
323 if (aEncoded._frameType == webrtc::kKeyFrame) {
324 flags = aIsFirstFrame ? MediaCodec::BUFFER_FLAG_CODECCONFIG : MediaCodec::BUFFER_FLAG_SYNCFRAME;
325 }
326 size_t size = aEncoded._length;
327 MOZ_ASSERT(size);
328 const sp<ABuffer>& omxIn = mInputBuffers.itemAt(index);
329 MOZ_ASSERT(omxIn->capacity() >= size);
330 omxIn->setRange(0, size);
331 // Copying is needed because MediaCodec API doesn't support externallay
332 // allocated buffer as input.
333 memcpy(omxIn->data(), aEncoded._buffer, size);
334 int64_t inputTimeUs = aEncoded._timeStamp * 1000 / 90; // 90kHz -> us.
335 err = mCodec->queueInputBuffer(index, 0, size, inputTimeUs, flags);
336 if (err == OK && !(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
337 if (mOutputDrain == nullptr) {
338 mOutputDrain = new OutputDrain(this, aCallback);
339 mOutputDrain->Start();
340 }
341 EncodedFrame frame;
342 frame.mWidth = mWidth;
343 frame.mHeight = mHeight;
344 frame.mTimestamp = aEncoded._timeStamp;
345 frame.mRenderTimeMs = aRenderTimeMs;
346 mOutputDrain->QueueInput(frame);
347 }
349 return err;
350 }
352 status_t
353 DrainOutput(const EncodedFrame& aFrame, webrtc::DecodedImageCallback* aCallback)
354 {
355 MOZ_ASSERT(mCodec != nullptr);
356 if (mCodec == nullptr) {
357 return INVALID_OPERATION;
358 }
360 size_t index = 0;
361 size_t outOffset = 0;
362 size_t outSize = 0;
363 int64_t outTime = -1ll;
364 uint32_t outFlags = 0;
365 status_t err = mCodec->dequeueOutputBuffer(&index, &outOffset, &outSize,
366 &outTime, &outFlags,
367 DRAIN_THREAD_TIMEOUT_US);
368 switch (err) {
369 case OK:
370 break;
371 case -EAGAIN:
372 // Not an error: output not available yet. Try later.
373 CODEC_LOGI("decode dequeue OMX output buffer timed out. Try later.");
374 return err;
375 case INFO_FORMAT_CHANGED:
376 // Not an error: will get this value when OMX output buffer is enabled,
377 // or when input size changed.
378 CODEC_LOGD("decode dequeue OMX output buffer format change");
379 return err;
380 case INFO_OUTPUT_BUFFERS_CHANGED:
381 // Not an error: will get this value when OMX output buffer changed
382 // (probably because of input size change).
383 CODEC_LOGD("decode dequeue OMX output buffer change");
384 err = mCodec->getOutputBuffers(&mOutputBuffers);
385 MOZ_ASSERT(err == OK);
386 return INFO_OUTPUT_BUFFERS_CHANGED;
387 default:
388 CODEC_LOGE("decode dequeue OMX output buffer error:%d", err);
389 // Return OK to instruct OutputDrain to drop input from queue.
390 return OK;
391 }
393 sp<ABuffer> omxOut = mOutputBuffers.itemAt(index);
394 nsAutoPtr<webrtc::I420VideoFrame> videoFrame(GenerateVideoFrame(aFrame,
395 index,
396 omxOut));
397 if (videoFrame == nullptr) {
398 mCodec->releaseOutputBuffer(index);
399 } else if (aCallback) {
400 aCallback->Decoded(*videoFrame);
401 // OMX buffer will be released by RecycleCallback after rendered.
402 }
404 return err;
405 }
407 private:
408 class OutputDrain : public OMXOutputDrain
409 {
410 public:
411 OutputDrain(WebrtcOMXDecoder* aOMX, webrtc::DecodedImageCallback* aCallback)
412 : OMXOutputDrain()
413 , mOMX(aOMX)
414 , mCallback(aCallback)
415 {}
417 protected:
418 virtual bool DrainOutput(const EncodedFrame& aFrame) MOZ_OVERRIDE
419 {
420 return (mOMX->DrainOutput(aFrame, mCallback) == OK);
421 }
423 private:
424 WebrtcOMXDecoder* mOMX;
425 webrtc::DecodedImageCallback* mCallback;
426 };
428 status_t Start()
429 {
430 MOZ_ASSERT(!mStarted);
431 if (mStarted) {
432 return OK;
433 }
435 status_t err = mCodec->start();
436 if (err == OK) {
437 mStarted = true;
438 mCodec->getInputBuffers(&mInputBuffers);
439 mCodec->getOutputBuffers(&mOutputBuffers);
440 }
442 return err;
443 }
445 status_t Stop()
446 {
447 MOZ_ASSERT(mStarted);
448 if (!mStarted) {
449 return OK;
450 }
451 if (mOutputDrain != nullptr) {
452 mOutputDrain->Stop();
453 mOutputDrain = nullptr;
454 }
456 status_t err = mCodec->stop();
457 if (err == OK) {
458 mInputBuffers.clear();
459 mOutputBuffers.clear();
460 mStarted = false;
461 } else {
462 MOZ_ASSERT(false);
463 }
465 return err;
466 }
468 webrtc::I420VideoFrame*
469 GenerateVideoFrame(const EncodedFrame& aEncoded, uint32_t aBufferIndex,
470 const sp<ABuffer>& aOMXBuffer)
471 {
472 // TODO: Get decoded frame buffer through native window to obsolete
473 // changes to stagefright code.
474 sp<RefBase> obj;
475 bool hasGraphicBuffer = aOMXBuffer->meta()->findObject("graphic-buffer", &obj);
476 if (!hasGraphicBuffer) {
477 MOZ_ASSERT(false, "Decoder doesn't produce graphic buffer");
478 // Nothing to render.
479 return nullptr;
480 }
482 sp<GraphicBuffer> gb = static_cast<GraphicBuffer*>(obj.get());
483 if (!gb.get()) {
484 MOZ_ASSERT(false, "Null graphic buffer");
485 return nullptr;
486 }
488 RefPtr<mozilla::layers::TextureClient> textureClient =
489 mNativeWindow->getTextureClientFromBuffer(gb.get());
490 textureClient->SetRecycleCallback(RecycleCallback::ReturnOMXBuffer,
491 new RecycleCallback(mCodec, aBufferIndex));
493 int width = gb->getWidth();
494 int height = gb->getHeight();
495 layers::GrallocImage::GrallocData grallocData;
496 grallocData.mPicSize = gfx::IntSize(width, height);
497 grallocData.mGraphicBuffer = textureClient;
499 layers::GrallocImage* grallocImage = new layers::GrallocImage();
500 grallocImage->SetData(grallocData);
502 nsAutoPtr<webrtc::I420VideoFrame> videoFrame(
503 new webrtc::TextureVideoFrame(new ImageNativeHandle(grallocImage),
504 width, height,
505 aEncoded.mTimestamp,
506 aEncoded.mRenderTimeMs));
508 return videoFrame.forget();
509 }
511 sp<ALooper> mLooper;
512 sp<MediaCodec> mCodec; // OMXCodec
513 int mWidth;
514 int mHeight;
515 android::Vector<sp<ABuffer> > mInputBuffers;
516 android::Vector<sp<ABuffer> > mOutputBuffers;
517 bool mStarted;
519 sp<GonkNativeWindow> mNativeWindow;
520 sp<GonkNativeWindowClient> mNativeWindowClient;
522 RefPtr<OutputDrain> mOutputDrain;
523 };
525 class EncOutputDrain : public OMXOutputDrain
526 {
527 public:
528 EncOutputDrain(OMXVideoEncoder* aOMX, webrtc::EncodedImageCallback* aCallback)
529 : OMXOutputDrain()
530 , mOMX(aOMX)
531 , mCallback(aCallback)
532 , mIsPrevOutputParamSets(false)
533 {}
535 protected:
536 virtual bool DrainOutput(const EncodedFrame& aInputFrame) MOZ_OVERRIDE
537 {
538 nsTArray<uint8_t> output;
539 int64_t timeUs = -1ll;
540 int flags = 0;
541 nsresult rv = mOMX->GetNextEncodedFrame(&output, &timeUs, &flags,
542 DRAIN_THREAD_TIMEOUT_US);
543 if (NS_WARN_IF(NS_FAILED(rv))) {
544 // Fail to get encoded frame. The corresponding input frame should be
545 // removed.
546 return true;
547 }
549 if (output.Length() == 0) {
550 // No encoded data yet. Try later.
551 CODEC_LOGD("OMX:%p (encode no output available this time)", mOMX);
552 return false;
553 }
555 bool isParamSets = (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG);
556 bool isIFrame = (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME);
557 // Should not be parameter sets and I-frame at the same time.
558 MOZ_ASSERT(!(isParamSets && isIFrame));
560 if (mCallback) {
561 // Implementation here assumes encoder output to be a buffer containing
562 // parameter sets(SPS + PPS) followed by a series of buffers, each for
563 // one input frame.
564 // TODO: handle output violating this assumpton in bug 997110.
565 webrtc::EncodedImage encoded(output.Elements(), output.Length(),
566 output.Capacity());
567 encoded._frameType = (isParamSets || isIFrame) ?
568 webrtc::kKeyFrame : webrtc::kDeltaFrame;
569 encoded._encodedWidth = aInputFrame.mWidth;
570 encoded._encodedHeight = aInputFrame.mHeight;
571 encoded._timeStamp = aInputFrame.mTimestamp;
572 encoded.capture_time_ms_ = aInputFrame.mRenderTimeMs;
573 encoded._completeFrame = true;
575 ALOGE("OMX:%p encode frame type:%d size:%u", mOMX, encoded._frameType, encoded._length);
577 // Prepend SPS/PPS to I-frames unless they were sent last time.
578 SendEncodedDataToCallback(encoded, isIFrame && !mIsPrevOutputParamSets);
579 mIsPrevOutputParamSets = isParamSets;
580 }
582 // Tell base class not to pop input for parameter sets blob because they
583 // don't have corresponding input.
584 return !isParamSets;
585 }
587 private:
588 // Send encoded data to callback.The data will be broken into individual NALUs
589 // if necessary and sent to callback one by one. This function can also insert
590 // SPS/PPS NALUs in front of input data if requested.
591 void SendEncodedDataToCallback(webrtc::EncodedImage& aEncodedImage,
592 bool aPrependParamSets)
593 {
594 // Individual NALU inherits metadata from input encoded data.
595 webrtc::EncodedImage nalu(aEncodedImage);
597 if (aPrependParamSets) {
598 // Insert current parameter sets in front of the input encoded data.
599 nsTArray<uint8_t> paramSets;
600 mOMX->GetCodecConfig(¶mSets);
601 MOZ_ASSERT(paramSets.Length() > 4); // Start code + ...
602 // Set buffer range.
603 nalu._buffer = paramSets.Elements();
604 nalu._length = paramSets.Length();
605 // Break into NALUs and send.
606 SendEncodedDataToCallback(nalu, false);
607 }
609 // Break input encoded data into NALUs and send each one to callback.
610 const uint8_t* data = aEncodedImage._buffer;
611 size_t size = aEncodedImage._length;
612 const uint8_t* nalStart = nullptr;
613 size_t nalSize = 0;
614 while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
615 nalu._buffer = const_cast<uint8_t*>(nalStart);
616 nalu._length = nalSize;
617 mCallback->Encoded(nalu, nullptr, nullptr);
618 }
619 }
621 OMXVideoEncoder* mOMX;
622 webrtc::EncodedImageCallback* mCallback;
623 bool mIsPrevOutputParamSets;
624 };
626 // Encoder.
627 WebrtcOMXH264VideoEncoder::WebrtcOMXH264VideoEncoder()
628 : mOMX(nullptr)
629 , mCallback(nullptr)
630 , mWidth(0)
631 , mHeight(0)
632 , mFrameRate(0)
633 , mOMXConfigured(false)
634 {
635 CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p constructed", this);
636 }
638 int32_t
639 WebrtcOMXH264VideoEncoder::InitEncode(const webrtc::VideoCodec* aCodecSettings,
640 int32_t aNumOfCores,
641 uint32_t aMaxPayloadSize)
642 {
643 CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p init", this);
645 if (mOMX == nullptr) {
646 nsAutoPtr<OMXVideoEncoder> omx(OMXCodecWrapper::CreateAVCEncoder());
647 if (NS_WARN_IF(omx == nullptr)) {
648 return WEBRTC_VIDEO_CODEC_ERROR;
649 }
650 mOMX = omx.forget();
651 }
653 // Defer configuration until 1st frame is received because this function will
654 // be called more than once, and unfortunately with incorrect setting values
655 // at first.
656 mWidth = aCodecSettings->width;
657 mHeight = aCodecSettings->height;
658 mFrameRate = aCodecSettings->maxFramerate;
660 return WEBRTC_VIDEO_CODEC_OK;
661 }
663 int32_t
664 WebrtcOMXH264VideoEncoder::Encode(const webrtc::I420VideoFrame& aInputImage,
665 const webrtc::CodecSpecificInfo* aCodecSpecificInfo,
666 const std::vector<webrtc::VideoFrameType>* aFrameTypes)
667 {
668 MOZ_ASSERT(mOMX != nullptr);
669 if (mOMX == nullptr) {
670 return WEBRTC_VIDEO_CODEC_ERROR;
671 }
673 if (!mOMXConfigured) {
674 mOMX->Configure(mWidth, mHeight, mFrameRate,
675 OMXVideoEncoder::BlobFormat::AVC_NAL);
676 mOMXConfigured = true;
677 CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p start OMX with image size:%ux%u",
678 this, mWidth, mHeight);
679 }
681 // Wrap I420VideoFrame input with PlanarYCbCrImage for OMXVideoEncoder.
682 layers::PlanarYCbCrData yuvData;
683 yuvData.mYChannel = const_cast<uint8_t*>(aInputImage.buffer(webrtc::kYPlane));
684 yuvData.mYSize = gfx::IntSize(aInputImage.width(), aInputImage.height());
685 yuvData.mYStride = aInputImage.stride(webrtc::kYPlane);
686 MOZ_ASSERT(aInputImage.stride(webrtc::kUPlane) == aInputImage.stride(webrtc::kVPlane));
687 yuvData.mCbCrStride = aInputImage.stride(webrtc::kUPlane);
688 yuvData.mCbChannel = const_cast<uint8_t*>(aInputImage.buffer(webrtc::kUPlane));
689 yuvData.mCrChannel = const_cast<uint8_t*>(aInputImage.buffer(webrtc::kVPlane));
690 yuvData.mCbCrSize = gfx::IntSize((yuvData.mYSize.width + 1) / 2,
691 (yuvData.mYSize.height + 1) / 2);
692 yuvData.mPicSize = yuvData.mYSize;
693 yuvData.mStereoMode = StereoMode::MONO;
694 layers::PlanarYCbCrImage img(nullptr);
695 img.SetDataNoCopy(yuvData);
697 nsresult rv = mOMX->Encode(&img,
698 yuvData.mYSize.width,
699 yuvData.mYSize.height,
700 aInputImage.timestamp() * 1000 / 90, // 90kHz -> us.
701 0);
702 if (rv == NS_OK) {
703 if (mOutputDrain == nullptr) {
704 mOutputDrain = new EncOutputDrain(mOMX, mCallback);
705 mOutputDrain->Start();
706 }
707 EncodedFrame frame;
708 frame.mWidth = mWidth;
709 frame.mHeight = mHeight;
710 frame.mTimestamp = aInputImage.timestamp();
711 frame.mRenderTimeMs = aInputImage.render_time_ms();
712 mOutputDrain->QueueInput(frame);
713 }
715 return (rv == NS_OK) ? WEBRTC_VIDEO_CODEC_OK : WEBRTC_VIDEO_CODEC_ERROR;
716 }
718 int32_t
719 WebrtcOMXH264VideoEncoder::RegisterEncodeCompleteCallback(
720 webrtc::EncodedImageCallback* aCallback)
721 {
722 CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p set callback:%p", this, aCallback);
723 MOZ_ASSERT(aCallback);
724 mCallback = aCallback;
726 return WEBRTC_VIDEO_CODEC_OK;
727 }
729 int32_t
730 WebrtcOMXH264VideoEncoder::Release()
731 {
732 CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p will be released", this);
734 if (mOutputDrain != nullptr) {
735 mOutputDrain->Stop();
736 mOutputDrain = nullptr;
737 }
739 mOMX = nullptr;
741 return WEBRTC_VIDEO_CODEC_OK;
742 }
744 WebrtcOMXH264VideoEncoder::~WebrtcOMXH264VideoEncoder()
745 {
746 CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p will be destructed", this);
748 Release();
749 }
751 // Inform the encoder of the new packet loss rate and the round-trip time of
752 // the network. aPacketLossRate is fraction lost and can be 0~255
753 // (255 means 100% lost).
754 // Note: stagefright doesn't handle these parameters.
755 int32_t
756 WebrtcOMXH264VideoEncoder::SetChannelParameters(uint32_t aPacketLossRate,
757 int aRoundTripTimeMs)
758 {
759 CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p set channel packet loss:%u, rtt:%d",
760 this, aPacketLossRate, aRoundTripTimeMs);
762 return WEBRTC_VIDEO_CODEC_OK;
763 }
765 // TODO: Bug 997567. Find the way to support frame rate change.
766 int32_t
767 WebrtcOMXH264VideoEncoder::SetRates(uint32_t aBitRate, uint32_t aFrameRate)
768 {
769 CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p set bitrate:%u, frame rate:%u)",
770 this, aBitRate, aFrameRate);
771 MOZ_ASSERT(mOMX != nullptr);
772 if (mOMX == nullptr) {
773 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
774 }
776 mOMX->SetBitrate(aBitRate);
778 return WEBRTC_VIDEO_CODEC_OK;
779 }
781 // Decoder.
782 WebrtcOMXH264VideoDecoder::WebrtcOMXH264VideoDecoder()
783 : mCallback(nullptr)
784 , mOMX(nullptr)
785 {
786 CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p will be constructed", this);
787 }
789 int32_t
790 WebrtcOMXH264VideoDecoder::InitDecode(const webrtc::VideoCodec* aCodecSettings,
791 int32_t aNumOfCores)
792 {
793 CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p init OMX:%p", this, mOMX.get());
795 // Defer configuration until SPS/PPS NALUs (where actual decoder config
796 // values can be extracted) are received.
798 return WEBRTC_VIDEO_CODEC_OK;
799 }
801 int32_t
802 WebrtcOMXH264VideoDecoder::Decode(const webrtc::EncodedImage& aInputImage,
803 bool aMissingFrames,
804 const webrtc::RTPFragmentationHeader* aFragmentation,
805 const webrtc::CodecSpecificInfo* aCodecSpecificInfo,
806 int64_t aRenderTimeMs)
807 {
808 if (aInputImage._length== 0 || !aInputImage._buffer) {
809 return WEBRTC_VIDEO_CODEC_ERROR;
810 }
812 ALOGE("WebrtcOMXH264VideoDecoder:%p will decode", this);
814 bool configured = !!mOMX;
815 if (!configured) {
816 // Search for SPS/PPS NALUs in input to get decoder config.
817 sp<ABuffer> input = new ABuffer(aInputImage._buffer, aInputImage._length);
818 sp<MetaData> paramSets = WebrtcOMXDecoder::ParseParamSets(input);
819 if (NS_WARN_IF(paramSets == nullptr)) {
820 // Cannot config decoder because SPS/PPS NALUs haven't been seen.
821 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
822 }
823 RefPtr<WebrtcOMXDecoder> omx = new WebrtcOMXDecoder(MEDIA_MIMETYPE_VIDEO_AVC);
824 status_t result = omx->ConfigureWithParamSets(paramSets);
825 if (NS_WARN_IF(result != OK)) {
826 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
827 }
828 CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p start OMX", this);
829 mOMX = omx;
830 }
832 bool feedFrame = true;
833 while (feedFrame) {
834 int64_t timeUs;
835 status_t err = mOMX->FillInput(aInputImage, !configured, aRenderTimeMs, mCallback);
836 feedFrame = (err == -EAGAIN); // No input buffer available. Try again.
837 }
839 return WEBRTC_VIDEO_CODEC_OK;
840 }
842 int32_t
843 WebrtcOMXH264VideoDecoder::RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback* aCallback)
844 {
845 CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p set callback:%p", this, aCallback);
846 MOZ_ASSERT(aCallback);
847 mCallback = aCallback;
849 return WEBRTC_VIDEO_CODEC_OK;
850 }
852 int32_t
853 WebrtcOMXH264VideoDecoder::Release()
854 {
855 CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p will be released", this);
857 mOMX = nullptr;
859 return WEBRTC_VIDEO_CODEC_OK;
860 }
862 WebrtcOMXH264VideoDecoder::~WebrtcOMXH264VideoDecoder()
863 {
864 CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p will be destructed", this);
865 Release();
866 }
868 int32_t
869 WebrtcOMXH264VideoDecoder::Reset()
870 {
871 CODEC_LOGW("WebrtcOMXH264VideoDecoder::Reset() will NOT reset decoder");
872 return WEBRTC_VIDEO_CODEC_OK;
873 }
875 }