content/media/fmp4/BlankDecoderModule.cpp

Fri, 16 Jan 2015 18:13:44 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Fri, 16 Jan 2015 18:13:44 +0100
branch
TOR_BUG_9701
changeset 14
925c144e1f1f
permissions
-rw-r--r--

Integrate suggestion from review to improve consistency with existing code.

michael@0 1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
michael@0 2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
michael@0 3 /* This Source Code Form is subject to the terms of the Mozilla Public
michael@0 4 * License, v. 2.0. If a copy of the MPL was not distributed with this
michael@0 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
michael@0 6
michael@0 7 #include "MediaDecoderReader.h"
michael@0 8 #include "PlatformDecoderModule.h"
michael@0 9 #include "nsRect.h"
michael@0 10 #include "mozilla/RefPtr.h"
michael@0 11 #include "mozilla/CheckedInt.h"
michael@0 12 #include "VideoUtils.h"
michael@0 13 #include "ImageContainer.h"
michael@0 14 #include "mp4_demuxer/mp4_demuxer.h"
michael@0 15 #include "MediaTaskQueue.h"
michael@0 16
michael@0 17 namespace mozilla {
michael@0 18
michael@0 19 // Decoder that uses a passed in object's Create function to create blank
michael@0 20 // MediaData objects.
michael@0 21 template<class BlankMediaDataCreator>
michael@0 22 class BlankMediaDataDecoder : public MediaDataDecoder {
michael@0 23 public:
michael@0 24
michael@0 25 BlankMediaDataDecoder(BlankMediaDataCreator* aCreator,
michael@0 26 MediaTaskQueue* aTaskQueue,
michael@0 27 MediaDataDecoderCallback* aCallback)
michael@0 28 : mCreator(aCreator)
michael@0 29 , mTaskQueue(aTaskQueue)
michael@0 30 , mCallback(aCallback)
michael@0 31 {
michael@0 32 }
michael@0 33
michael@0 34 virtual nsresult Init() MOZ_OVERRIDE {
michael@0 35 return NS_OK;
michael@0 36 }
michael@0 37
michael@0 38 virtual nsresult Shutdown() MOZ_OVERRIDE {
michael@0 39 return NS_OK;
michael@0 40 }
michael@0 41
michael@0 42 class OutputEvent : public nsRunnable {
michael@0 43 public:
michael@0 44 OutputEvent(mp4_demuxer::MP4Sample* aSample,
michael@0 45 MediaDataDecoderCallback* aCallback,
michael@0 46 BlankMediaDataCreator* aCreator)
michael@0 47 : mSample(aSample)
michael@0 48 , mCreator(aCreator)
michael@0 49 , mCallback(aCallback)
michael@0 50 {
michael@0 51 }
michael@0 52 NS_IMETHOD Run() MOZ_OVERRIDE
michael@0 53 {
michael@0 54 mCallback->Output(mCreator->Create(mSample->composition_timestamp,
michael@0 55 mSample->duration,
michael@0 56 mSample->byte_offset));
michael@0 57 return NS_OK;
michael@0 58 }
michael@0 59 private:
michael@0 60 nsAutoPtr<mp4_demuxer::MP4Sample> mSample;
michael@0 61 BlankMediaDataCreator* mCreator;
michael@0 62 MediaDataDecoderCallback* mCallback;
michael@0 63 };
michael@0 64
michael@0 65 virtual nsresult Input(mp4_demuxer::MP4Sample* aSample) MOZ_OVERRIDE
michael@0 66 {
michael@0 67 // The MediaDataDecoder must delete the sample when we're finished
michael@0 68 // with it, so the OutputEvent stores it in an nsAutoPtr and deletes
michael@0 69 // it once it's run.
michael@0 70 RefPtr<nsIRunnable> r(new OutputEvent(aSample, mCallback, mCreator));
michael@0 71 mTaskQueue->Dispatch(r);
michael@0 72 return NS_OK;
michael@0 73 }
michael@0 74
michael@0 75 virtual nsresult Flush() MOZ_OVERRIDE {
michael@0 76 return NS_OK;
michael@0 77 }
michael@0 78
michael@0 79 virtual nsresult Drain() MOZ_OVERRIDE {
michael@0 80 return NS_OK;
michael@0 81 }
michael@0 82
michael@0 83 private:
michael@0 84 nsAutoPtr<BlankMediaDataCreator> mCreator;
michael@0 85 nsAutoPtr<MediaData> mOutput;
michael@0 86 RefPtr<MediaTaskQueue> mTaskQueue;
michael@0 87 MediaDataDecoderCallback* mCallback;
michael@0 88 };
michael@0 89
michael@0 90 class BlankVideoDataCreator {
michael@0 91 public:
michael@0 92 BlankVideoDataCreator(uint32_t aFrameWidth,
michael@0 93 uint32_t aFrameHeight,
michael@0 94 layers::ImageContainer* aImageContainer)
michael@0 95 : mFrameWidth(aFrameWidth)
michael@0 96 , mFrameHeight(aFrameHeight)
michael@0 97 , mImageContainer(aImageContainer)
michael@0 98 {
michael@0 99 mInfo.mDisplay = nsIntSize(mFrameWidth, mFrameHeight);
michael@0 100 mPicture = gfx::IntRect(0, 0, mFrameWidth, mFrameHeight);
michael@0 101 }
michael@0 102
michael@0 103 MediaData* Create(Microseconds aDTS,
michael@0 104 Microseconds aDuration,
michael@0 105 int64_t aOffsetInStream)
michael@0 106 {
michael@0 107 // Create a fake YUV buffer in a 420 format. That is, an 8bpp Y plane,
michael@0 108 // with a U and V plane that are half the size of the Y plane, i.e 8 bit,
michael@0 109 // 2x2 subsampled. Have the data pointers of each frame point to the
michael@0 110 // first plane, they'll always be zero'd memory anyway.
michael@0 111 uint8_t* frame = new uint8_t[mFrameWidth * mFrameHeight];
michael@0 112 memset(frame, 0, mFrameWidth * mFrameHeight);
michael@0 113 VideoData::YCbCrBuffer buffer;
michael@0 114
michael@0 115 // Y plane.
michael@0 116 buffer.mPlanes[0].mData = frame;
michael@0 117 buffer.mPlanes[0].mStride = mFrameWidth;
michael@0 118 buffer.mPlanes[0].mHeight = mFrameHeight;
michael@0 119 buffer.mPlanes[0].mWidth = mFrameWidth;
michael@0 120 buffer.mPlanes[0].mOffset = 0;
michael@0 121 buffer.mPlanes[0].mSkip = 0;
michael@0 122
michael@0 123 // Cb plane.
michael@0 124 buffer.mPlanes[1].mData = frame;
michael@0 125 buffer.mPlanes[1].mStride = mFrameWidth / 2;
michael@0 126 buffer.mPlanes[1].mHeight = mFrameHeight / 2;
michael@0 127 buffer.mPlanes[1].mWidth = mFrameWidth / 2;
michael@0 128 buffer.mPlanes[1].mOffset = 0;
michael@0 129 buffer.mPlanes[1].mSkip = 0;
michael@0 130
michael@0 131 // Cr plane.
michael@0 132 buffer.mPlanes[2].mData = frame;
michael@0 133 buffer.mPlanes[2].mStride = mFrameWidth / 2;
michael@0 134 buffer.mPlanes[2].mHeight = mFrameHeight / 2;
michael@0 135 buffer.mPlanes[2].mWidth = mFrameWidth / 2;
michael@0 136 buffer.mPlanes[2].mOffset = 0;
michael@0 137 buffer.mPlanes[2].mSkip = 0;
michael@0 138
michael@0 139 return VideoData::Create(mInfo,
michael@0 140 mImageContainer,
michael@0 141 nullptr,
michael@0 142 aOffsetInStream,
michael@0 143 aDTS,
michael@0 144 aDuration,
michael@0 145 buffer,
michael@0 146 true,
michael@0 147 aDTS,
michael@0 148 mPicture);
michael@0 149 }
michael@0 150 private:
michael@0 151 VideoInfo mInfo;
michael@0 152 gfx::IntRect mPicture;
michael@0 153 uint32_t mFrameWidth;
michael@0 154 uint32_t mFrameHeight;
michael@0 155 RefPtr<layers::ImageContainer> mImageContainer;
michael@0 156 };
michael@0 157
michael@0 158
michael@0 159 class BlankAudioDataCreator {
michael@0 160 public:
michael@0 161 BlankAudioDataCreator(uint32_t aChannelCount,
michael@0 162 uint32_t aSampleRate,
michael@0 163 uint16_t aBitsPerSample)
michael@0 164 : mFrameSum(0)
michael@0 165 , mChannelCount(aChannelCount)
michael@0 166 , mSampleRate(aSampleRate)
michael@0 167 {
michael@0 168 }
michael@0 169
michael@0 170 MediaData* Create(Microseconds aDTS,
michael@0 171 Microseconds aDuration,
michael@0 172 int64_t aOffsetInStream)
michael@0 173 {
michael@0 174 // Convert duration to frames. We add 1 to duration to account for
michael@0 175 // rounding errors, so we get a consistent tone.
michael@0 176 CheckedInt64 frames = UsecsToFrames(aDuration+1, mSampleRate);
michael@0 177 if (!frames.isValid() ||
michael@0 178 !mChannelCount ||
michael@0 179 !mSampleRate ||
michael@0 180 frames.value() > (UINT32_MAX / mChannelCount)) {
michael@0 181 return nullptr;
michael@0 182 }
michael@0 183 AudioDataValue* samples = new AudioDataValue[frames.value() * mChannelCount];
michael@0 184 // Fill the sound buffer with an A4 tone.
michael@0 185 static const float pi = 3.14159265f;
michael@0 186 static const float noteHz = 440.0f;
michael@0 187 for (int i = 0; i < frames.value(); i++) {
michael@0 188 float f = sin(2 * pi * noteHz * mFrameSum / mSampleRate);
michael@0 189 for (unsigned c = 0; c < mChannelCount; c++) {
michael@0 190 samples[i * mChannelCount + c] = AudioDataValue(f);
michael@0 191 }
michael@0 192 mFrameSum++;
michael@0 193 }
michael@0 194 return new AudioData(aOffsetInStream,
michael@0 195 aDTS,
michael@0 196 aDuration,
michael@0 197 uint32_t(frames.value()),
michael@0 198 samples,
michael@0 199 mChannelCount);
michael@0 200 }
michael@0 201
michael@0 202 private:
michael@0 203 int64_t mFrameSum;
michael@0 204 uint32_t mChannelCount;
michael@0 205 uint32_t mSampleRate;
michael@0 206 };
michael@0 207
michael@0 208 class BlankDecoderModule : public PlatformDecoderModule {
michael@0 209 public:
michael@0 210
michael@0 211 // Called when the decoders have shutdown. Main thread only.
michael@0 212 virtual nsresult Shutdown() MOZ_OVERRIDE {
michael@0 213 return NS_OK;
michael@0 214 }
michael@0 215
michael@0 216 // Decode thread.
michael@0 217 virtual MediaDataDecoder* CreateH264Decoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
michael@0 218 layers::LayersBackend aLayersBackend,
michael@0 219 layers::ImageContainer* aImageContainer,
michael@0 220 MediaTaskQueue* aVideoTaskQueue,
michael@0 221 MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE {
michael@0 222 BlankVideoDataCreator* decoder = new BlankVideoDataCreator(aConfig.visible_rect().width(),
michael@0 223 aConfig.visible_rect().height(),
michael@0 224 aImageContainer);
michael@0 225 return new BlankMediaDataDecoder<BlankVideoDataCreator>(decoder,
michael@0 226 aVideoTaskQueue,
michael@0 227 aCallback);
michael@0 228 }
michael@0 229
michael@0 230 // Decode thread.
michael@0 231 virtual MediaDataDecoder* CreateAACDecoder(const mp4_demuxer::AudioDecoderConfig& aConfig,
michael@0 232 MediaTaskQueue* aAudioTaskQueue,
michael@0 233 MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE {
michael@0 234 BlankAudioDataCreator* decoder =
michael@0 235 new BlankAudioDataCreator(ChannelLayoutToChannelCount(aConfig.channel_layout()),
michael@0 236 aConfig.samples_per_second(),
michael@0 237 aConfig.bits_per_channel());
michael@0 238 return new BlankMediaDataDecoder<BlankAudioDataCreator>(decoder,
michael@0 239 aAudioTaskQueue,
michael@0 240 aCallback);
michael@0 241 }
michael@0 242 };
michael@0 243
michael@0 244 PlatformDecoderModule* CreateBlankDecoderModule()
michael@0 245 {
michael@0 246 return new BlankDecoderModule();
michael@0 247 }
michael@0 248
michael@0 249 } // namespace mozilla

mercurial