michael@0: /* This Source Code Form is subject to the terms of the Mozilla Public michael@0: * License, v. 2.0. If a copy of the MPL was not distributed with this file, michael@0: * You can obtain one at http://mozilla.org/MPL/2.0/. */ michael@0: michael@0: #include "CSFLog.h" michael@0: michael@0: #include "WebrtcOMXH264VideoCodec.h" michael@0: michael@0: // Android/Stagefright michael@0: #include michael@0: #include michael@0: #include michael@0: #include michael@0: #include michael@0: #include michael@0: #include michael@0: #include michael@0: #include michael@0: #include michael@0: #include michael@0: using namespace android; michael@0: michael@0: // WebRTC michael@0: #include "common_video/interface/texture_video_frame.h" michael@0: #include "video_engine/include/vie_external_codec.h" michael@0: michael@0: // Gecko michael@0: #include "GonkNativeWindow.h" michael@0: #include "GonkNativeWindowClient.h" michael@0: #include "mozilla/Atomics.h" michael@0: #include "mozilla/Mutex.h" michael@0: #include "nsThreadUtils.h" michael@0: #include "OMXCodecWrapper.h" michael@0: #include "TextureClient.h" michael@0: michael@0: #define DEQUEUE_BUFFER_TIMEOUT_US (100 * 1000ll) // 100ms. michael@0: #define START_DEQUEUE_BUFFER_TIMEOUT_US (10 * DEQUEUE_BUFFER_TIMEOUT_US) // 1s. michael@0: #define DRAIN_THREAD_TIMEOUT_US (1000 * 1000ll) // 1s. michael@0: michael@0: #define LOG_TAG "WebrtcOMXH264VideoCodec" michael@0: #define CODEC_LOGV(...) CSFLogInfo(LOG_TAG, __VA_ARGS__) michael@0: #define CODEC_LOGD(...) CSFLogDebug(LOG_TAG, __VA_ARGS__) michael@0: #define CODEC_LOGI(...) CSFLogInfo(LOG_TAG, __VA_ARGS__) michael@0: #define CODEC_LOGW(...) CSFLogWarn(LOG_TAG, __VA_ARGS__) michael@0: #define CODEC_LOGE(...) CSFLogError(LOG_TAG, __VA_ARGS__) michael@0: michael@0: namespace mozilla { michael@0: michael@0: // NS_INLINE_DECL_THREADSAFE_REFCOUNTING() cannot be used directly in michael@0: // ImageNativeHandle below because the return type of webrtc::NativeHandle michael@0: // AddRef()/Release() conflicts with those defined in macro. To avoid another michael@0: // copy/paste of ref-counting implementation here, this dummy base class michael@0: // is created to proivde another level of indirection. michael@0: class DummyRefCountBase { michael@0: public: michael@0: // Use the name of real class for logging. michael@0: NS_INLINE_DECL_THREADSAFE_REFCOUNTING(ImageNativeHandle) michael@0: // To make sure subclass will be deleted/destructed properly. michael@0: virtual ~DummyRefCountBase() {} michael@0: }; michael@0: michael@0: // This function implements 2 interafces: michael@0: // 1. webrtc::NativeHandle: to wrap layers::Image object so decoded frames can michael@0: // be passed through WebRTC rendering pipeline using TextureVideoFrame. michael@0: // 2. ImageHandle: for renderer to get the image object inside without knowledge michael@0: // about webrtc::NativeHandle. michael@0: class ImageNativeHandle MOZ_FINAL michael@0: : public webrtc::NativeHandle michael@0: , public DummyRefCountBase michael@0: { michael@0: public: michael@0: ImageNativeHandle(layers::Image* aImage) michael@0: : mImage(aImage) michael@0: {} michael@0: michael@0: // Implement webrtc::NativeHandle. michael@0: virtual void* GetHandle() MOZ_OVERRIDE { return mImage.get(); } michael@0: michael@0: virtual int AddRef() MOZ_OVERRIDE michael@0: { michael@0: return DummyRefCountBase::AddRef(); michael@0: } michael@0: michael@0: virtual int Release() MOZ_OVERRIDE michael@0: { michael@0: return DummyRefCountBase::Release(); michael@0: } michael@0: michael@0: private: michael@0: RefPtr mImage; michael@0: }; michael@0: michael@0: // Graphic buffer lifecycle management. michael@0: // Return buffer to OMX codec when renderer is done with it. michael@0: class RecycleCallback michael@0: { michael@0: public: michael@0: RecycleCallback(const sp& aOmx, uint32_t aBufferIndex) michael@0: : mOmx(aOmx) michael@0: , mBufferIndex(aBufferIndex) michael@0: {} michael@0: typedef void* CallbackPtr; michael@0: static void ReturnOMXBuffer(layers::TextureClient* aClient, CallbackPtr aClosure) michael@0: { michael@0: aClient->ClearRecycleCallback(); michael@0: RecycleCallback* self = static_cast(aClosure); michael@0: self->mOmx->releaseOutputBuffer(self->mBufferIndex); michael@0: delete self; michael@0: } michael@0: michael@0: private: michael@0: sp mOmx; michael@0: uint32_t mBufferIndex; michael@0: }; michael@0: michael@0: struct EncodedFrame michael@0: { michael@0: uint32_t mWidth; michael@0: uint32_t mHeight; michael@0: uint32_t mTimestamp; michael@0: int64_t mRenderTimeMs; michael@0: }; michael@0: michael@0: // Base runnable class to repeatly pull OMX output buffers in seperate thread. michael@0: // How to use: michael@0: // - implementing DrainOutput() to get output. Remember to return false to tell michael@0: // drain not to pop input queue. michael@0: // - call QueueInput() to schedule a run to drain output. The input, aFrame, michael@0: // should contains corresponding info such as image size and timestamps for michael@0: // DrainOutput() implementation to construct data needed by encoded/decoded michael@0: // callbacks. michael@0: // TODO: Bug 997110 - Revisit queue/drain logic. Current design assumes that michael@0: // encoder only generate one output buffer per input frame and won't work michael@0: // if encoder drops frames or generates multiple output per input. michael@0: class OMXOutputDrain : public nsRunnable michael@0: { michael@0: public: michael@0: void Start() { michael@0: MonitorAutoLock lock(mMonitor); michael@0: if (mThread == nullptr) { michael@0: NS_NewNamedThread("OMXOutputDrain", getter_AddRefs(mThread)); michael@0: } michael@0: CODEC_LOGD("OMXOutputDrain started"); michael@0: mEnding = false; michael@0: mThread->Dispatch(this, NS_DISPATCH_NORMAL); michael@0: } michael@0: michael@0: void Stop() { michael@0: MonitorAutoLock lock(mMonitor); michael@0: mEnding = true; michael@0: lock.NotifyAll(); // In case Run() is waiting. michael@0: michael@0: if (mThread != nullptr) { michael@0: mThread->Shutdown(); michael@0: mThread = nullptr; michael@0: } michael@0: CODEC_LOGD("OMXOutputDrain stopped"); michael@0: } michael@0: michael@0: void QueueInput(const EncodedFrame& aFrame) michael@0: { michael@0: MonitorAutoLock lock(mMonitor); michael@0: michael@0: MOZ_ASSERT(mThread); michael@0: michael@0: mInputFrames.push(aFrame); michael@0: // Notify Run() about queued input and it can start working. michael@0: lock.NotifyAll(); michael@0: } michael@0: michael@0: NS_IMETHODIMP Run() MOZ_OVERRIDE michael@0: { michael@0: MOZ_ASSERT(mThread); michael@0: michael@0: MonitorAutoLock lock(mMonitor); michael@0: while (true) { michael@0: if (mInputFrames.empty()) { michael@0: ALOGE("Waiting OMXOutputDrain"); michael@0: // Wait for new input. michael@0: lock.Wait(); michael@0: } michael@0: michael@0: if (mEnding) { michael@0: ALOGE("Ending OMXOutputDrain"); michael@0: // Stop draining. michael@0: break; michael@0: } michael@0: michael@0: MOZ_ASSERT(!mInputFrames.empty()); michael@0: EncodedFrame frame = mInputFrames.front(); michael@0: bool shouldPop = false; michael@0: { michael@0: // Release monitor while draining because it's blocking. michael@0: MonitorAutoUnlock unlock(mMonitor); michael@0: // |frame| provides size and time of corresponding input. michael@0: shouldPop = DrainOutput(frame); michael@0: } michael@0: if (shouldPop) { michael@0: mInputFrames.pop(); michael@0: } michael@0: } michael@0: michael@0: CODEC_LOGD("OMXOutputDrain Ended"); michael@0: return NS_OK; michael@0: } michael@0: michael@0: protected: michael@0: OMXOutputDrain() michael@0: : mMonitor("OMXOutputDrain monitor") michael@0: , mEnding(false) michael@0: {} michael@0: michael@0: // Drain output buffer for input frame aFrame. michael@0: // aFrame contains info such as size and time of the input frame and can be michael@0: // used to construct data for encoded/decoded callbacks if needed. michael@0: // Return true to indicate we should pop input queue, and return false to michael@0: // indicate aFrame should not be removed from input queue (either output is michael@0: // not ready yet and should try again later, or the drained output is SPS/PPS michael@0: // NALUs that has no corresponding input in queue). michael@0: virtual bool DrainOutput(const EncodedFrame& aFrame) = 0; michael@0: michael@0: private: michael@0: // This monitor protects all things below it, and is also used to michael@0: // wait/notify queued input. michael@0: Monitor mMonitor; michael@0: nsCOMPtr mThread; michael@0: std::queue mInputFrames; michael@0: bool mEnding; michael@0: }; michael@0: michael@0: // H.264 decoder using stagefright. michael@0: class WebrtcOMXDecoder MOZ_FINAL michael@0: { michael@0: NS_INLINE_DECL_THREADSAFE_REFCOUNTING(WebrtcOMXDecoder) michael@0: public: michael@0: WebrtcOMXDecoder(const char* aMimeType) michael@0: : mWidth(0) michael@0: , mHeight(0) michael@0: , mStarted(false) michael@0: { michael@0: // Create binder thread pool required by stagefright. michael@0: android::ProcessState::self()->startThreadPool(); michael@0: michael@0: mLooper = new ALooper; michael@0: mLooper->start(); michael@0: mCodec = MediaCodec::CreateByType(mLooper, aMimeType, false /* encoder */); michael@0: } michael@0: michael@0: virtual ~WebrtcOMXDecoder() michael@0: { michael@0: if (mStarted) { michael@0: Stop(); michael@0: } michael@0: if (mCodec != nullptr) { michael@0: mCodec->release(); michael@0: mCodec.clear(); michael@0: } michael@0: mLooper.clear(); michael@0: } michael@0: michael@0: // Parse SPS/PPS NALUs. michael@0: static sp ParseParamSets(sp& aParamSets) michael@0: { michael@0: return MakeAVCCodecSpecificData(aParamSets); michael@0: } michael@0: michael@0: // Configure decoder using data returned by ParseParamSets(). michael@0: status_t ConfigureWithParamSets(const sp& aParamSets) michael@0: { michael@0: MOZ_ASSERT(mCodec != nullptr); michael@0: if (mCodec == nullptr) { michael@0: return INVALID_OPERATION; michael@0: } michael@0: michael@0: int32_t width = 0; michael@0: bool ok = aParamSets->findInt32(kKeyWidth, &width); michael@0: MOZ_ASSERT(ok && width > 0); michael@0: int32_t height = 0; michael@0: ok = aParamSets->findInt32(kKeyHeight, &height); michael@0: MOZ_ASSERT(ok && height > 0); michael@0: CODEC_LOGD("OMX:%p decoder config width:%d height:%d", this, width, height); michael@0: michael@0: sp config = new AMessage(); michael@0: config->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC); michael@0: config->setInt32("width", width); michael@0: config->setInt32("height", height); michael@0: mWidth = width; michael@0: mHeight = height; michael@0: michael@0: sp surface = nullptr; michael@0: mNativeWindow = new GonkNativeWindow(); michael@0: if (mNativeWindow.get()) { michael@0: mNativeWindowClient = new GonkNativeWindowClient(mNativeWindow->getBufferQueue()); michael@0: if (mNativeWindowClient.get()) { michael@0: surface = new Surface(mNativeWindowClient->getIGraphicBufferProducer()); michael@0: } michael@0: } michael@0: status_t result = mCodec->configure(config, surface, nullptr, 0); michael@0: if (result == OK) { michael@0: result = Start(); michael@0: } michael@0: return result; michael@0: } michael@0: michael@0: status_t michael@0: FillInput(const webrtc::EncodedImage& aEncoded, bool aIsFirstFrame, michael@0: int64_t& aRenderTimeMs, webrtc::DecodedImageCallback* aCallback) michael@0: { michael@0: MOZ_ASSERT(mCodec != nullptr); michael@0: if (mCodec == nullptr) { michael@0: return INVALID_OPERATION; michael@0: } michael@0: michael@0: size_t index; michael@0: status_t err = mCodec->dequeueInputBuffer(&index, michael@0: aIsFirstFrame ? START_DEQUEUE_BUFFER_TIMEOUT_US : DEQUEUE_BUFFER_TIMEOUT_US); michael@0: if (err != OK) { michael@0: CODEC_LOGE("decode dequeue input buffer error:%d", err); michael@0: return err; michael@0: } michael@0: michael@0: uint32_t flags = 0; michael@0: if (aEncoded._frameType == webrtc::kKeyFrame) { michael@0: flags = aIsFirstFrame ? MediaCodec::BUFFER_FLAG_CODECCONFIG : MediaCodec::BUFFER_FLAG_SYNCFRAME; michael@0: } michael@0: size_t size = aEncoded._length; michael@0: MOZ_ASSERT(size); michael@0: const sp& omxIn = mInputBuffers.itemAt(index); michael@0: MOZ_ASSERT(omxIn->capacity() >= size); michael@0: omxIn->setRange(0, size); michael@0: // Copying is needed because MediaCodec API doesn't support externallay michael@0: // allocated buffer as input. michael@0: memcpy(omxIn->data(), aEncoded._buffer, size); michael@0: int64_t inputTimeUs = aEncoded._timeStamp * 1000 / 90; // 90kHz -> us. michael@0: err = mCodec->queueInputBuffer(index, 0, size, inputTimeUs, flags); michael@0: if (err == OK && !(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) { michael@0: if (mOutputDrain == nullptr) { michael@0: mOutputDrain = new OutputDrain(this, aCallback); michael@0: mOutputDrain->Start(); michael@0: } michael@0: EncodedFrame frame; michael@0: frame.mWidth = mWidth; michael@0: frame.mHeight = mHeight; michael@0: frame.mTimestamp = aEncoded._timeStamp; michael@0: frame.mRenderTimeMs = aRenderTimeMs; michael@0: mOutputDrain->QueueInput(frame); michael@0: } michael@0: michael@0: return err; michael@0: } michael@0: michael@0: status_t michael@0: DrainOutput(const EncodedFrame& aFrame, webrtc::DecodedImageCallback* aCallback) michael@0: { michael@0: MOZ_ASSERT(mCodec != nullptr); michael@0: if (mCodec == nullptr) { michael@0: return INVALID_OPERATION; michael@0: } michael@0: michael@0: size_t index = 0; michael@0: size_t outOffset = 0; michael@0: size_t outSize = 0; michael@0: int64_t outTime = -1ll; michael@0: uint32_t outFlags = 0; michael@0: status_t err = mCodec->dequeueOutputBuffer(&index, &outOffset, &outSize, michael@0: &outTime, &outFlags, michael@0: DRAIN_THREAD_TIMEOUT_US); michael@0: switch (err) { michael@0: case OK: michael@0: break; michael@0: case -EAGAIN: michael@0: // Not an error: output not available yet. Try later. michael@0: CODEC_LOGI("decode dequeue OMX output buffer timed out. Try later."); michael@0: return err; michael@0: case INFO_FORMAT_CHANGED: michael@0: // Not an error: will get this value when OMX output buffer is enabled, michael@0: // or when input size changed. michael@0: CODEC_LOGD("decode dequeue OMX output buffer format change"); michael@0: return err; michael@0: case INFO_OUTPUT_BUFFERS_CHANGED: michael@0: // Not an error: will get this value when OMX output buffer changed michael@0: // (probably because of input size change). michael@0: CODEC_LOGD("decode dequeue OMX output buffer change"); michael@0: err = mCodec->getOutputBuffers(&mOutputBuffers); michael@0: MOZ_ASSERT(err == OK); michael@0: return INFO_OUTPUT_BUFFERS_CHANGED; michael@0: default: michael@0: CODEC_LOGE("decode dequeue OMX output buffer error:%d", err); michael@0: // Return OK to instruct OutputDrain to drop input from queue. michael@0: return OK; michael@0: } michael@0: michael@0: sp omxOut = mOutputBuffers.itemAt(index); michael@0: nsAutoPtr videoFrame(GenerateVideoFrame(aFrame, michael@0: index, michael@0: omxOut)); michael@0: if (videoFrame == nullptr) { michael@0: mCodec->releaseOutputBuffer(index); michael@0: } else if (aCallback) { michael@0: aCallback->Decoded(*videoFrame); michael@0: // OMX buffer will be released by RecycleCallback after rendered. michael@0: } michael@0: michael@0: return err; michael@0: } michael@0: michael@0: private: michael@0: class OutputDrain : public OMXOutputDrain michael@0: { michael@0: public: michael@0: OutputDrain(WebrtcOMXDecoder* aOMX, webrtc::DecodedImageCallback* aCallback) michael@0: : OMXOutputDrain() michael@0: , mOMX(aOMX) michael@0: , mCallback(aCallback) michael@0: {} michael@0: michael@0: protected: michael@0: virtual bool DrainOutput(const EncodedFrame& aFrame) MOZ_OVERRIDE michael@0: { michael@0: return (mOMX->DrainOutput(aFrame, mCallback) == OK); michael@0: } michael@0: michael@0: private: michael@0: WebrtcOMXDecoder* mOMX; michael@0: webrtc::DecodedImageCallback* mCallback; michael@0: }; michael@0: michael@0: status_t Start() michael@0: { michael@0: MOZ_ASSERT(!mStarted); michael@0: if (mStarted) { michael@0: return OK; michael@0: } michael@0: michael@0: status_t err = mCodec->start(); michael@0: if (err == OK) { michael@0: mStarted = true; michael@0: mCodec->getInputBuffers(&mInputBuffers); michael@0: mCodec->getOutputBuffers(&mOutputBuffers); michael@0: } michael@0: michael@0: return err; michael@0: } michael@0: michael@0: status_t Stop() michael@0: { michael@0: MOZ_ASSERT(mStarted); michael@0: if (!mStarted) { michael@0: return OK; michael@0: } michael@0: if (mOutputDrain != nullptr) { michael@0: mOutputDrain->Stop(); michael@0: mOutputDrain = nullptr; michael@0: } michael@0: michael@0: status_t err = mCodec->stop(); michael@0: if (err == OK) { michael@0: mInputBuffers.clear(); michael@0: mOutputBuffers.clear(); michael@0: mStarted = false; michael@0: } else { michael@0: MOZ_ASSERT(false); michael@0: } michael@0: michael@0: return err; michael@0: } michael@0: michael@0: webrtc::I420VideoFrame* michael@0: GenerateVideoFrame(const EncodedFrame& aEncoded, uint32_t aBufferIndex, michael@0: const sp& aOMXBuffer) michael@0: { michael@0: // TODO: Get decoded frame buffer through native window to obsolete michael@0: // changes to stagefright code. michael@0: sp obj; michael@0: bool hasGraphicBuffer = aOMXBuffer->meta()->findObject("graphic-buffer", &obj); michael@0: if (!hasGraphicBuffer) { michael@0: MOZ_ASSERT(false, "Decoder doesn't produce graphic buffer"); michael@0: // Nothing to render. michael@0: return nullptr; michael@0: } michael@0: michael@0: sp gb = static_cast(obj.get()); michael@0: if (!gb.get()) { michael@0: MOZ_ASSERT(false, "Null graphic buffer"); michael@0: return nullptr; michael@0: } michael@0: michael@0: RefPtr textureClient = michael@0: mNativeWindow->getTextureClientFromBuffer(gb.get()); michael@0: textureClient->SetRecycleCallback(RecycleCallback::ReturnOMXBuffer, michael@0: new RecycleCallback(mCodec, aBufferIndex)); michael@0: michael@0: int width = gb->getWidth(); michael@0: int height = gb->getHeight(); michael@0: layers::GrallocImage::GrallocData grallocData; michael@0: grallocData.mPicSize = gfx::IntSize(width, height); michael@0: grallocData.mGraphicBuffer = textureClient; michael@0: michael@0: layers::GrallocImage* grallocImage = new layers::GrallocImage(); michael@0: grallocImage->SetData(grallocData); michael@0: michael@0: nsAutoPtr videoFrame( michael@0: new webrtc::TextureVideoFrame(new ImageNativeHandle(grallocImage), michael@0: width, height, michael@0: aEncoded.mTimestamp, michael@0: aEncoded.mRenderTimeMs)); michael@0: michael@0: return videoFrame.forget(); michael@0: } michael@0: michael@0: sp mLooper; michael@0: sp mCodec; // OMXCodec michael@0: int mWidth; michael@0: int mHeight; michael@0: android::Vector > mInputBuffers; michael@0: android::Vector > mOutputBuffers; michael@0: bool mStarted; michael@0: michael@0: sp mNativeWindow; michael@0: sp mNativeWindowClient; michael@0: michael@0: RefPtr mOutputDrain; michael@0: }; michael@0: michael@0: class EncOutputDrain : public OMXOutputDrain michael@0: { michael@0: public: michael@0: EncOutputDrain(OMXVideoEncoder* aOMX, webrtc::EncodedImageCallback* aCallback) michael@0: : OMXOutputDrain() michael@0: , mOMX(aOMX) michael@0: , mCallback(aCallback) michael@0: , mIsPrevOutputParamSets(false) michael@0: {} michael@0: michael@0: protected: michael@0: virtual bool DrainOutput(const EncodedFrame& aInputFrame) MOZ_OVERRIDE michael@0: { michael@0: nsTArray output; michael@0: int64_t timeUs = -1ll; michael@0: int flags = 0; michael@0: nsresult rv = mOMX->GetNextEncodedFrame(&output, &timeUs, &flags, michael@0: DRAIN_THREAD_TIMEOUT_US); michael@0: if (NS_WARN_IF(NS_FAILED(rv))) { michael@0: // Fail to get encoded frame. The corresponding input frame should be michael@0: // removed. michael@0: return true; michael@0: } michael@0: michael@0: if (output.Length() == 0) { michael@0: // No encoded data yet. Try later. michael@0: CODEC_LOGD("OMX:%p (encode no output available this time)", mOMX); michael@0: return false; michael@0: } michael@0: michael@0: bool isParamSets = (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG); michael@0: bool isIFrame = (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME); michael@0: // Should not be parameter sets and I-frame at the same time. michael@0: MOZ_ASSERT(!(isParamSets && isIFrame)); michael@0: michael@0: if (mCallback) { michael@0: // Implementation here assumes encoder output to be a buffer containing michael@0: // parameter sets(SPS + PPS) followed by a series of buffers, each for michael@0: // one input frame. michael@0: // TODO: handle output violating this assumpton in bug 997110. michael@0: webrtc::EncodedImage encoded(output.Elements(), output.Length(), michael@0: output.Capacity()); michael@0: encoded._frameType = (isParamSets || isIFrame) ? michael@0: webrtc::kKeyFrame : webrtc::kDeltaFrame; michael@0: encoded._encodedWidth = aInputFrame.mWidth; michael@0: encoded._encodedHeight = aInputFrame.mHeight; michael@0: encoded._timeStamp = aInputFrame.mTimestamp; michael@0: encoded.capture_time_ms_ = aInputFrame.mRenderTimeMs; michael@0: encoded._completeFrame = true; michael@0: michael@0: ALOGE("OMX:%p encode frame type:%d size:%u", mOMX, encoded._frameType, encoded._length); michael@0: michael@0: // Prepend SPS/PPS to I-frames unless they were sent last time. michael@0: SendEncodedDataToCallback(encoded, isIFrame && !mIsPrevOutputParamSets); michael@0: mIsPrevOutputParamSets = isParamSets; michael@0: } michael@0: michael@0: // Tell base class not to pop input for parameter sets blob because they michael@0: // don't have corresponding input. michael@0: return !isParamSets; michael@0: } michael@0: michael@0: private: michael@0: // Send encoded data to callback.The data will be broken into individual NALUs michael@0: // if necessary and sent to callback one by one. This function can also insert michael@0: // SPS/PPS NALUs in front of input data if requested. michael@0: void SendEncodedDataToCallback(webrtc::EncodedImage& aEncodedImage, michael@0: bool aPrependParamSets) michael@0: { michael@0: // Individual NALU inherits metadata from input encoded data. michael@0: webrtc::EncodedImage nalu(aEncodedImage); michael@0: michael@0: if (aPrependParamSets) { michael@0: // Insert current parameter sets in front of the input encoded data. michael@0: nsTArray paramSets; michael@0: mOMX->GetCodecConfig(¶mSets); michael@0: MOZ_ASSERT(paramSets.Length() > 4); // Start code + ... michael@0: // Set buffer range. michael@0: nalu._buffer = paramSets.Elements(); michael@0: nalu._length = paramSets.Length(); michael@0: // Break into NALUs and send. michael@0: SendEncodedDataToCallback(nalu, false); michael@0: } michael@0: michael@0: // Break input encoded data into NALUs and send each one to callback. michael@0: const uint8_t* data = aEncodedImage._buffer; michael@0: size_t size = aEncodedImage._length; michael@0: const uint8_t* nalStart = nullptr; michael@0: size_t nalSize = 0; michael@0: while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) { michael@0: nalu._buffer = const_cast(nalStart); michael@0: nalu._length = nalSize; michael@0: mCallback->Encoded(nalu, nullptr, nullptr); michael@0: } michael@0: } michael@0: michael@0: OMXVideoEncoder* mOMX; michael@0: webrtc::EncodedImageCallback* mCallback; michael@0: bool mIsPrevOutputParamSets; michael@0: }; michael@0: michael@0: // Encoder. michael@0: WebrtcOMXH264VideoEncoder::WebrtcOMXH264VideoEncoder() michael@0: : mOMX(nullptr) michael@0: , mCallback(nullptr) michael@0: , mWidth(0) michael@0: , mHeight(0) michael@0: , mFrameRate(0) michael@0: , mOMXConfigured(false) michael@0: { michael@0: CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p constructed", this); michael@0: } michael@0: michael@0: int32_t michael@0: WebrtcOMXH264VideoEncoder::InitEncode(const webrtc::VideoCodec* aCodecSettings, michael@0: int32_t aNumOfCores, michael@0: uint32_t aMaxPayloadSize) michael@0: { michael@0: CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p init", this); michael@0: michael@0: if (mOMX == nullptr) { michael@0: nsAutoPtr omx(OMXCodecWrapper::CreateAVCEncoder()); michael@0: if (NS_WARN_IF(omx == nullptr)) { michael@0: return WEBRTC_VIDEO_CODEC_ERROR; michael@0: } michael@0: mOMX = omx.forget(); michael@0: } michael@0: michael@0: // Defer configuration until 1st frame is received because this function will michael@0: // be called more than once, and unfortunately with incorrect setting values michael@0: // at first. michael@0: mWidth = aCodecSettings->width; michael@0: mHeight = aCodecSettings->height; michael@0: mFrameRate = aCodecSettings->maxFramerate; michael@0: michael@0: return WEBRTC_VIDEO_CODEC_OK; michael@0: } michael@0: michael@0: int32_t michael@0: WebrtcOMXH264VideoEncoder::Encode(const webrtc::I420VideoFrame& aInputImage, michael@0: const webrtc::CodecSpecificInfo* aCodecSpecificInfo, michael@0: const std::vector* aFrameTypes) michael@0: { michael@0: MOZ_ASSERT(mOMX != nullptr); michael@0: if (mOMX == nullptr) { michael@0: return WEBRTC_VIDEO_CODEC_ERROR; michael@0: } michael@0: michael@0: if (!mOMXConfigured) { michael@0: mOMX->Configure(mWidth, mHeight, mFrameRate, michael@0: OMXVideoEncoder::BlobFormat::AVC_NAL); michael@0: mOMXConfigured = true; michael@0: CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p start OMX with image size:%ux%u", michael@0: this, mWidth, mHeight); michael@0: } michael@0: michael@0: // Wrap I420VideoFrame input with PlanarYCbCrImage for OMXVideoEncoder. michael@0: layers::PlanarYCbCrData yuvData; michael@0: yuvData.mYChannel = const_cast(aInputImage.buffer(webrtc::kYPlane)); michael@0: yuvData.mYSize = gfx::IntSize(aInputImage.width(), aInputImage.height()); michael@0: yuvData.mYStride = aInputImage.stride(webrtc::kYPlane); michael@0: MOZ_ASSERT(aInputImage.stride(webrtc::kUPlane) == aInputImage.stride(webrtc::kVPlane)); michael@0: yuvData.mCbCrStride = aInputImage.stride(webrtc::kUPlane); michael@0: yuvData.mCbChannel = const_cast(aInputImage.buffer(webrtc::kUPlane)); michael@0: yuvData.mCrChannel = const_cast(aInputImage.buffer(webrtc::kVPlane)); michael@0: yuvData.mCbCrSize = gfx::IntSize((yuvData.mYSize.width + 1) / 2, michael@0: (yuvData.mYSize.height + 1) / 2); michael@0: yuvData.mPicSize = yuvData.mYSize; michael@0: yuvData.mStereoMode = StereoMode::MONO; michael@0: layers::PlanarYCbCrImage img(nullptr); michael@0: img.SetDataNoCopy(yuvData); michael@0: michael@0: nsresult rv = mOMX->Encode(&img, michael@0: yuvData.mYSize.width, michael@0: yuvData.mYSize.height, michael@0: aInputImage.timestamp() * 1000 / 90, // 90kHz -> us. michael@0: 0); michael@0: if (rv == NS_OK) { michael@0: if (mOutputDrain == nullptr) { michael@0: mOutputDrain = new EncOutputDrain(mOMX, mCallback); michael@0: mOutputDrain->Start(); michael@0: } michael@0: EncodedFrame frame; michael@0: frame.mWidth = mWidth; michael@0: frame.mHeight = mHeight; michael@0: frame.mTimestamp = aInputImage.timestamp(); michael@0: frame.mRenderTimeMs = aInputImage.render_time_ms(); michael@0: mOutputDrain->QueueInput(frame); michael@0: } michael@0: michael@0: return (rv == NS_OK) ? WEBRTC_VIDEO_CODEC_OK : WEBRTC_VIDEO_CODEC_ERROR; michael@0: } michael@0: michael@0: int32_t michael@0: WebrtcOMXH264VideoEncoder::RegisterEncodeCompleteCallback( michael@0: webrtc::EncodedImageCallback* aCallback) michael@0: { michael@0: CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p set callback:%p", this, aCallback); michael@0: MOZ_ASSERT(aCallback); michael@0: mCallback = aCallback; michael@0: michael@0: return WEBRTC_VIDEO_CODEC_OK; michael@0: } michael@0: michael@0: int32_t michael@0: WebrtcOMXH264VideoEncoder::Release() michael@0: { michael@0: CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p will be released", this); michael@0: michael@0: if (mOutputDrain != nullptr) { michael@0: mOutputDrain->Stop(); michael@0: mOutputDrain = nullptr; michael@0: } michael@0: michael@0: mOMX = nullptr; michael@0: michael@0: return WEBRTC_VIDEO_CODEC_OK; michael@0: } michael@0: michael@0: WebrtcOMXH264VideoEncoder::~WebrtcOMXH264VideoEncoder() michael@0: { michael@0: CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p will be destructed", this); michael@0: michael@0: Release(); michael@0: } michael@0: michael@0: // Inform the encoder of the new packet loss rate and the round-trip time of michael@0: // the network. aPacketLossRate is fraction lost and can be 0~255 michael@0: // (255 means 100% lost). michael@0: // Note: stagefright doesn't handle these parameters. michael@0: int32_t michael@0: WebrtcOMXH264VideoEncoder::SetChannelParameters(uint32_t aPacketLossRate, michael@0: int aRoundTripTimeMs) michael@0: { michael@0: CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p set channel packet loss:%u, rtt:%d", michael@0: this, aPacketLossRate, aRoundTripTimeMs); michael@0: michael@0: return WEBRTC_VIDEO_CODEC_OK; michael@0: } michael@0: michael@0: // TODO: Bug 997567. Find the way to support frame rate change. michael@0: int32_t michael@0: WebrtcOMXH264VideoEncoder::SetRates(uint32_t aBitRate, uint32_t aFrameRate) michael@0: { michael@0: CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p set bitrate:%u, frame rate:%u)", michael@0: this, aBitRate, aFrameRate); michael@0: MOZ_ASSERT(mOMX != nullptr); michael@0: if (mOMX == nullptr) { michael@0: return WEBRTC_VIDEO_CODEC_UNINITIALIZED; michael@0: } michael@0: michael@0: mOMX->SetBitrate(aBitRate); michael@0: michael@0: return WEBRTC_VIDEO_CODEC_OK; michael@0: } michael@0: michael@0: // Decoder. michael@0: WebrtcOMXH264VideoDecoder::WebrtcOMXH264VideoDecoder() michael@0: : mCallback(nullptr) michael@0: , mOMX(nullptr) michael@0: { michael@0: CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p will be constructed", this); michael@0: } michael@0: michael@0: int32_t michael@0: WebrtcOMXH264VideoDecoder::InitDecode(const webrtc::VideoCodec* aCodecSettings, michael@0: int32_t aNumOfCores) michael@0: { michael@0: CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p init OMX:%p", this, mOMX.get()); michael@0: michael@0: // Defer configuration until SPS/PPS NALUs (where actual decoder config michael@0: // values can be extracted) are received. michael@0: michael@0: return WEBRTC_VIDEO_CODEC_OK; michael@0: } michael@0: michael@0: int32_t michael@0: WebrtcOMXH264VideoDecoder::Decode(const webrtc::EncodedImage& aInputImage, michael@0: bool aMissingFrames, michael@0: const webrtc::RTPFragmentationHeader* aFragmentation, michael@0: const webrtc::CodecSpecificInfo* aCodecSpecificInfo, michael@0: int64_t aRenderTimeMs) michael@0: { michael@0: if (aInputImage._length== 0 || !aInputImage._buffer) { michael@0: return WEBRTC_VIDEO_CODEC_ERROR; michael@0: } michael@0: michael@0: ALOGE("WebrtcOMXH264VideoDecoder:%p will decode", this); michael@0: michael@0: bool configured = !!mOMX; michael@0: if (!configured) { michael@0: // Search for SPS/PPS NALUs in input to get decoder config. michael@0: sp input = new ABuffer(aInputImage._buffer, aInputImage._length); michael@0: sp paramSets = WebrtcOMXDecoder::ParseParamSets(input); michael@0: if (NS_WARN_IF(paramSets == nullptr)) { michael@0: // Cannot config decoder because SPS/PPS NALUs haven't been seen. michael@0: return WEBRTC_VIDEO_CODEC_UNINITIALIZED; michael@0: } michael@0: RefPtr omx = new WebrtcOMXDecoder(MEDIA_MIMETYPE_VIDEO_AVC); michael@0: status_t result = omx->ConfigureWithParamSets(paramSets); michael@0: if (NS_WARN_IF(result != OK)) { michael@0: return WEBRTC_VIDEO_CODEC_UNINITIALIZED; michael@0: } michael@0: CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p start OMX", this); michael@0: mOMX = omx; michael@0: } michael@0: michael@0: bool feedFrame = true; michael@0: while (feedFrame) { michael@0: int64_t timeUs; michael@0: status_t err = mOMX->FillInput(aInputImage, !configured, aRenderTimeMs, mCallback); michael@0: feedFrame = (err == -EAGAIN); // No input buffer available. Try again. michael@0: } michael@0: michael@0: return WEBRTC_VIDEO_CODEC_OK; michael@0: } michael@0: michael@0: int32_t michael@0: WebrtcOMXH264VideoDecoder::RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback* aCallback) michael@0: { michael@0: CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p set callback:%p", this, aCallback); michael@0: MOZ_ASSERT(aCallback); michael@0: mCallback = aCallback; michael@0: michael@0: return WEBRTC_VIDEO_CODEC_OK; michael@0: } michael@0: michael@0: int32_t michael@0: WebrtcOMXH264VideoDecoder::Release() michael@0: { michael@0: CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p will be released", this); michael@0: michael@0: mOMX = nullptr; michael@0: michael@0: return WEBRTC_VIDEO_CODEC_OK; michael@0: } michael@0: michael@0: WebrtcOMXH264VideoDecoder::~WebrtcOMXH264VideoDecoder() michael@0: { michael@0: CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p will be destructed", this); michael@0: Release(); michael@0: } michael@0: michael@0: int32_t michael@0: WebrtcOMXH264VideoDecoder::Reset() michael@0: { michael@0: CODEC_LOGW("WebrtcOMXH264VideoDecoder::Reset() will NOT reset decoder"); michael@0: return WEBRTC_VIDEO_CODEC_OK; michael@0: } michael@0: michael@0: }