Fri, 16 Jan 2015 04:50:19 +0100
Replace accessor implementation with direct member state manipulation, by
request https://trac.torproject.org/projects/tor/ticket/9701#comment:32
1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* This Source Code Form is subject to the terms of the Mozilla Public
3 * License, v. 2.0. If a copy of the MPL was not distributed with this
4 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 #include "MediaDecoderStateMachine.h"
7 #include "AbstractMediaDecoder.h"
8 #include "RawReader.h"
9 #include "RawDecoder.h"
10 #include "VideoUtils.h"
11 #include "nsISeekableStream.h"
12 #include "gfx2DGlue.h"
14 using namespace mozilla;
16 RawReader::RawReader(AbstractMediaDecoder* aDecoder)
17 : MediaDecoderReader(aDecoder),
18 mCurrentFrame(0), mFrameSize(0)
19 {
20 MOZ_COUNT_CTOR(RawReader);
21 }
23 RawReader::~RawReader()
24 {
25 MOZ_COUNT_DTOR(RawReader);
26 }
28 nsresult RawReader::Init(MediaDecoderReader* aCloneDonor)
29 {
30 return NS_OK;
31 }
33 nsresult RawReader::ResetDecode()
34 {
35 mCurrentFrame = 0;
36 return MediaDecoderReader::ResetDecode();
37 }
39 nsresult RawReader::ReadMetadata(MediaInfo* aInfo,
40 MetadataTags** aTags)
41 {
42 NS_ASSERTION(mDecoder->OnDecodeThread(),
43 "Should be on decode thread.");
45 MediaResource* resource = mDecoder->GetResource();
46 NS_ASSERTION(resource, "Decoder has no media resource");
48 if (!ReadFromResource(resource, reinterpret_cast<uint8_t*>(&mMetadata),
49 sizeof(mMetadata)))
50 return NS_ERROR_FAILURE;
52 // Validate the header
53 if (!(mMetadata.headerPacketID == 0 /* Packet ID of 0 for the header*/ &&
54 mMetadata.codecID == RAW_ID /* "YUV" */ &&
55 mMetadata.majorVersion == 0 &&
56 mMetadata.minorVersion == 1))
57 return NS_ERROR_FAILURE;
59 CheckedUint32 dummy = CheckedUint32(static_cast<uint32_t>(mMetadata.frameWidth)) *
60 static_cast<uint32_t>(mMetadata.frameHeight);
61 NS_ENSURE_TRUE(dummy.isValid(), NS_ERROR_FAILURE);
63 if (mMetadata.aspectDenominator == 0 ||
64 mMetadata.framerateDenominator == 0)
65 return NS_ERROR_FAILURE; // Invalid data
67 // Determine and verify frame display size.
68 float pixelAspectRatio = static_cast<float>(mMetadata.aspectNumerator) /
69 mMetadata.aspectDenominator;
70 nsIntSize display(mMetadata.frameWidth, mMetadata.frameHeight);
71 ScaleDisplayByAspectRatio(display, pixelAspectRatio);
72 mPicture = nsIntRect(0, 0, mMetadata.frameWidth, mMetadata.frameHeight);
73 nsIntSize frameSize(mMetadata.frameWidth, mMetadata.frameHeight);
74 if (!IsValidVideoRegion(frameSize, mPicture, display)) {
75 // Video track's frame sizes will overflow. Fail.
76 return NS_ERROR_FAILURE;
77 }
79 mInfo.mVideo.mHasVideo = true;
80 mInfo.mVideo.mDisplay = display;
82 mFrameRate = static_cast<float>(mMetadata.framerateNumerator) /
83 mMetadata.framerateDenominator;
85 // Make some sanity checks
86 if (mFrameRate > 45 ||
87 mFrameRate == 0 ||
88 pixelAspectRatio == 0 ||
89 mMetadata.frameWidth > 2000 ||
90 mMetadata.frameHeight > 2000 ||
91 mMetadata.chromaChannelBpp != 4 ||
92 mMetadata.lumaChannelBpp != 8 ||
93 mMetadata.colorspace != 1 /* 4:2:0 */)
94 return NS_ERROR_FAILURE;
96 mFrameSize = mMetadata.frameWidth * mMetadata.frameHeight *
97 (mMetadata.lumaChannelBpp + mMetadata.chromaChannelBpp) / 8.0 +
98 sizeof(RawPacketHeader);
100 int64_t length = resource->GetLength();
101 if (length != -1) {
102 ReentrantMonitorAutoEnter autoMonitor(mDecoder->GetReentrantMonitor());
103 mDecoder->SetMediaDuration(USECS_PER_S *
104 (length - sizeof(RawVideoHeader)) /
105 (mFrameSize * mFrameRate));
106 }
108 *aInfo = mInfo;
110 *aTags = nullptr;
112 return NS_OK;
113 }
115 bool RawReader::DecodeAudioData()
116 {
117 NS_ASSERTION(mDecoder->OnStateMachineThread() || mDecoder->OnDecodeThread(),
118 "Should be on state machine thread or decode thread.");
119 return false;
120 }
122 // Helper method that either reads until it gets aLength bytes
123 // or returns false
124 bool RawReader::ReadFromResource(MediaResource *aResource, uint8_t* aBuf,
125 uint32_t aLength)
126 {
127 while (aLength > 0) {
128 uint32_t bytesRead = 0;
129 nsresult rv;
131 rv = aResource->Read(reinterpret_cast<char*>(aBuf), aLength, &bytesRead);
132 NS_ENSURE_SUCCESS(rv, false);
134 if (bytesRead == 0) {
135 return false;
136 }
138 aLength -= bytesRead;
139 aBuf += bytesRead;
140 }
142 return true;
143 }
145 bool RawReader::DecodeVideoFrame(bool &aKeyframeSkip,
146 int64_t aTimeThreshold)
147 {
148 NS_ASSERTION(mDecoder->OnDecodeThread(),
149 "Should be on decode thread.");
151 // Record number of frames decoded and parsed. Automatically update the
152 // stats counters using the AutoNotifyDecoded stack-based class.
153 uint32_t parsed = 0, decoded = 0;
154 AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
156 if (!mFrameSize)
157 return false; // Metadata read failed. We should refuse to play.
159 int64_t currentFrameTime = USECS_PER_S * mCurrentFrame / mFrameRate;
160 uint32_t length = mFrameSize - sizeof(RawPacketHeader);
162 nsAutoArrayPtr<uint8_t> buffer(new uint8_t[length]);
163 MediaResource* resource = mDecoder->GetResource();
164 NS_ASSERTION(resource, "Decoder has no media resource");
166 // We're always decoding one frame when called
167 while(true) {
168 RawPacketHeader header;
170 // Read in a packet header and validate
171 if (!(ReadFromResource(resource, reinterpret_cast<uint8_t*>(&header),
172 sizeof(header))) ||
173 !(header.packetID == 0xFF && header.codecID == RAW_ID /* "YUV" */)) {
174 return false;
175 }
177 if (!ReadFromResource(resource, buffer, length)) {
178 return false;
179 }
181 parsed++;
183 if (currentFrameTime >= aTimeThreshold)
184 break;
186 mCurrentFrame++;
187 currentFrameTime += static_cast<double>(USECS_PER_S) / mFrameRate;
188 }
190 VideoData::YCbCrBuffer b;
191 b.mPlanes[0].mData = buffer;
192 b.mPlanes[0].mStride = mMetadata.frameWidth * mMetadata.lumaChannelBpp / 8.0;
193 b.mPlanes[0].mHeight = mMetadata.frameHeight;
194 b.mPlanes[0].mWidth = mMetadata.frameWidth;
195 b.mPlanes[0].mOffset = b.mPlanes[0].mSkip = 0;
197 uint32_t cbcrStride = mMetadata.frameWidth * mMetadata.chromaChannelBpp / 8.0;
199 b.mPlanes[1].mData = buffer + mMetadata.frameHeight * b.mPlanes[0].mStride;
200 b.mPlanes[1].mStride = cbcrStride;
201 b.mPlanes[1].mHeight = mMetadata.frameHeight / 2;
202 b.mPlanes[1].mWidth = mMetadata.frameWidth / 2;
203 b.mPlanes[1].mOffset = b.mPlanes[1].mSkip = 0;
205 b.mPlanes[2].mData = b.mPlanes[1].mData + mMetadata.frameHeight * cbcrStride / 2;
206 b.mPlanes[2].mStride = cbcrStride;
207 b.mPlanes[2].mHeight = mMetadata.frameHeight / 2;
208 b.mPlanes[2].mWidth = mMetadata.frameWidth / 2;
209 b.mPlanes[2].mOffset = b.mPlanes[2].mSkip = 0;
211 VideoData *v = VideoData::Create(mInfo.mVideo,
212 mDecoder->GetImageContainer(),
213 -1,
214 currentFrameTime,
215 (USECS_PER_S / mFrameRate),
216 b,
217 1, // In raw video every frame is a keyframe
218 -1,
219 ToIntRect(mPicture));
220 if (!v)
221 return false;
223 mVideoQueue.Push(v);
224 mCurrentFrame++;
225 decoded++;
226 currentFrameTime += USECS_PER_S / mFrameRate;
228 return true;
229 }
231 nsresult RawReader::Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime)
232 {
233 NS_ASSERTION(mDecoder->OnDecodeThread(),
234 "Should be on decode thread.");
236 MediaResource *resource = mDecoder->GetResource();
237 NS_ASSERTION(resource, "Decoder has no media resource");
239 uint32_t frame = mCurrentFrame;
240 if (aTime >= UINT_MAX)
241 return NS_ERROR_FAILURE;
242 mCurrentFrame = aTime * mFrameRate / USECS_PER_S;
244 CheckedUint32 offset = CheckedUint32(mCurrentFrame) * mFrameSize;
245 offset += sizeof(RawVideoHeader);
246 NS_ENSURE_TRUE(offset.isValid(), NS_ERROR_FAILURE);
248 nsresult rv = resource->Seek(nsISeekableStream::NS_SEEK_SET, offset.value());
249 NS_ENSURE_SUCCESS(rv, rv);
251 mVideoQueue.Reset();
253 while(mVideoQueue.GetSize() == 0) {
254 bool keyframeSkip = false;
255 if (!DecodeVideoFrame(keyframeSkip, 0)) {
256 mCurrentFrame = frame;
257 return NS_ERROR_FAILURE;
258 }
260 {
261 ReentrantMonitorAutoEnter autoMonitor(mDecoder->GetReentrantMonitor());
262 if (mDecoder->IsShutdown()) {
263 mCurrentFrame = frame;
264 return NS_ERROR_FAILURE;
265 }
266 }
268 nsAutoPtr<VideoData> video(mVideoQueue.PeekFront());
269 if (video && video->GetEndTime() < aTime) {
270 mVideoQueue.PopFront();
271 video = nullptr;
272 } else {
273 video.forget();
274 }
275 }
277 return NS_OK;
278 }
280 nsresult RawReader::GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime)
281 {
282 return NS_OK;
283 }