|
1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ |
|
2 /* This Source Code Form is subject to the terms of the Mozilla Public |
|
3 * License, v. 2.0. If a copy of the MPL was not distributed with this |
|
4 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
|
5 |
|
6 #include "MediaDecoderStateMachine.h" |
|
7 #include "AbstractMediaDecoder.h" |
|
8 #include "RawReader.h" |
|
9 #include "RawDecoder.h" |
|
10 #include "VideoUtils.h" |
|
11 #include "nsISeekableStream.h" |
|
12 #include "gfx2DGlue.h" |
|
13 |
|
14 using namespace mozilla; |
|
15 |
|
16 RawReader::RawReader(AbstractMediaDecoder* aDecoder) |
|
17 : MediaDecoderReader(aDecoder), |
|
18 mCurrentFrame(0), mFrameSize(0) |
|
19 { |
|
20 MOZ_COUNT_CTOR(RawReader); |
|
21 } |
|
22 |
|
23 RawReader::~RawReader() |
|
24 { |
|
25 MOZ_COUNT_DTOR(RawReader); |
|
26 } |
|
27 |
|
28 nsresult RawReader::Init(MediaDecoderReader* aCloneDonor) |
|
29 { |
|
30 return NS_OK; |
|
31 } |
|
32 |
|
33 nsresult RawReader::ResetDecode() |
|
34 { |
|
35 mCurrentFrame = 0; |
|
36 return MediaDecoderReader::ResetDecode(); |
|
37 } |
|
38 |
|
39 nsresult RawReader::ReadMetadata(MediaInfo* aInfo, |
|
40 MetadataTags** aTags) |
|
41 { |
|
42 NS_ASSERTION(mDecoder->OnDecodeThread(), |
|
43 "Should be on decode thread."); |
|
44 |
|
45 MediaResource* resource = mDecoder->GetResource(); |
|
46 NS_ASSERTION(resource, "Decoder has no media resource"); |
|
47 |
|
48 if (!ReadFromResource(resource, reinterpret_cast<uint8_t*>(&mMetadata), |
|
49 sizeof(mMetadata))) |
|
50 return NS_ERROR_FAILURE; |
|
51 |
|
52 // Validate the header |
|
53 if (!(mMetadata.headerPacketID == 0 /* Packet ID of 0 for the header*/ && |
|
54 mMetadata.codecID == RAW_ID /* "YUV" */ && |
|
55 mMetadata.majorVersion == 0 && |
|
56 mMetadata.minorVersion == 1)) |
|
57 return NS_ERROR_FAILURE; |
|
58 |
|
59 CheckedUint32 dummy = CheckedUint32(static_cast<uint32_t>(mMetadata.frameWidth)) * |
|
60 static_cast<uint32_t>(mMetadata.frameHeight); |
|
61 NS_ENSURE_TRUE(dummy.isValid(), NS_ERROR_FAILURE); |
|
62 |
|
63 if (mMetadata.aspectDenominator == 0 || |
|
64 mMetadata.framerateDenominator == 0) |
|
65 return NS_ERROR_FAILURE; // Invalid data |
|
66 |
|
67 // Determine and verify frame display size. |
|
68 float pixelAspectRatio = static_cast<float>(mMetadata.aspectNumerator) / |
|
69 mMetadata.aspectDenominator; |
|
70 nsIntSize display(mMetadata.frameWidth, mMetadata.frameHeight); |
|
71 ScaleDisplayByAspectRatio(display, pixelAspectRatio); |
|
72 mPicture = nsIntRect(0, 0, mMetadata.frameWidth, mMetadata.frameHeight); |
|
73 nsIntSize frameSize(mMetadata.frameWidth, mMetadata.frameHeight); |
|
74 if (!IsValidVideoRegion(frameSize, mPicture, display)) { |
|
75 // Video track's frame sizes will overflow. Fail. |
|
76 return NS_ERROR_FAILURE; |
|
77 } |
|
78 |
|
79 mInfo.mVideo.mHasVideo = true; |
|
80 mInfo.mVideo.mDisplay = display; |
|
81 |
|
82 mFrameRate = static_cast<float>(mMetadata.framerateNumerator) / |
|
83 mMetadata.framerateDenominator; |
|
84 |
|
85 // Make some sanity checks |
|
86 if (mFrameRate > 45 || |
|
87 mFrameRate == 0 || |
|
88 pixelAspectRatio == 0 || |
|
89 mMetadata.frameWidth > 2000 || |
|
90 mMetadata.frameHeight > 2000 || |
|
91 mMetadata.chromaChannelBpp != 4 || |
|
92 mMetadata.lumaChannelBpp != 8 || |
|
93 mMetadata.colorspace != 1 /* 4:2:0 */) |
|
94 return NS_ERROR_FAILURE; |
|
95 |
|
96 mFrameSize = mMetadata.frameWidth * mMetadata.frameHeight * |
|
97 (mMetadata.lumaChannelBpp + mMetadata.chromaChannelBpp) / 8.0 + |
|
98 sizeof(RawPacketHeader); |
|
99 |
|
100 int64_t length = resource->GetLength(); |
|
101 if (length != -1) { |
|
102 ReentrantMonitorAutoEnter autoMonitor(mDecoder->GetReentrantMonitor()); |
|
103 mDecoder->SetMediaDuration(USECS_PER_S * |
|
104 (length - sizeof(RawVideoHeader)) / |
|
105 (mFrameSize * mFrameRate)); |
|
106 } |
|
107 |
|
108 *aInfo = mInfo; |
|
109 |
|
110 *aTags = nullptr; |
|
111 |
|
112 return NS_OK; |
|
113 } |
|
114 |
|
115 bool RawReader::DecodeAudioData() |
|
116 { |
|
117 NS_ASSERTION(mDecoder->OnStateMachineThread() || mDecoder->OnDecodeThread(), |
|
118 "Should be on state machine thread or decode thread."); |
|
119 return false; |
|
120 } |
|
121 |
|
122 // Helper method that either reads until it gets aLength bytes |
|
123 // or returns false |
|
124 bool RawReader::ReadFromResource(MediaResource *aResource, uint8_t* aBuf, |
|
125 uint32_t aLength) |
|
126 { |
|
127 while (aLength > 0) { |
|
128 uint32_t bytesRead = 0; |
|
129 nsresult rv; |
|
130 |
|
131 rv = aResource->Read(reinterpret_cast<char*>(aBuf), aLength, &bytesRead); |
|
132 NS_ENSURE_SUCCESS(rv, false); |
|
133 |
|
134 if (bytesRead == 0) { |
|
135 return false; |
|
136 } |
|
137 |
|
138 aLength -= bytesRead; |
|
139 aBuf += bytesRead; |
|
140 } |
|
141 |
|
142 return true; |
|
143 } |
|
144 |
|
145 bool RawReader::DecodeVideoFrame(bool &aKeyframeSkip, |
|
146 int64_t aTimeThreshold) |
|
147 { |
|
148 NS_ASSERTION(mDecoder->OnDecodeThread(), |
|
149 "Should be on decode thread."); |
|
150 |
|
151 // Record number of frames decoded and parsed. Automatically update the |
|
152 // stats counters using the AutoNotifyDecoded stack-based class. |
|
153 uint32_t parsed = 0, decoded = 0; |
|
154 AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded); |
|
155 |
|
156 if (!mFrameSize) |
|
157 return false; // Metadata read failed. We should refuse to play. |
|
158 |
|
159 int64_t currentFrameTime = USECS_PER_S * mCurrentFrame / mFrameRate; |
|
160 uint32_t length = mFrameSize - sizeof(RawPacketHeader); |
|
161 |
|
162 nsAutoArrayPtr<uint8_t> buffer(new uint8_t[length]); |
|
163 MediaResource* resource = mDecoder->GetResource(); |
|
164 NS_ASSERTION(resource, "Decoder has no media resource"); |
|
165 |
|
166 // We're always decoding one frame when called |
|
167 while(true) { |
|
168 RawPacketHeader header; |
|
169 |
|
170 // Read in a packet header and validate |
|
171 if (!(ReadFromResource(resource, reinterpret_cast<uint8_t*>(&header), |
|
172 sizeof(header))) || |
|
173 !(header.packetID == 0xFF && header.codecID == RAW_ID /* "YUV" */)) { |
|
174 return false; |
|
175 } |
|
176 |
|
177 if (!ReadFromResource(resource, buffer, length)) { |
|
178 return false; |
|
179 } |
|
180 |
|
181 parsed++; |
|
182 |
|
183 if (currentFrameTime >= aTimeThreshold) |
|
184 break; |
|
185 |
|
186 mCurrentFrame++; |
|
187 currentFrameTime += static_cast<double>(USECS_PER_S) / mFrameRate; |
|
188 } |
|
189 |
|
190 VideoData::YCbCrBuffer b; |
|
191 b.mPlanes[0].mData = buffer; |
|
192 b.mPlanes[0].mStride = mMetadata.frameWidth * mMetadata.lumaChannelBpp / 8.0; |
|
193 b.mPlanes[0].mHeight = mMetadata.frameHeight; |
|
194 b.mPlanes[0].mWidth = mMetadata.frameWidth; |
|
195 b.mPlanes[0].mOffset = b.mPlanes[0].mSkip = 0; |
|
196 |
|
197 uint32_t cbcrStride = mMetadata.frameWidth * mMetadata.chromaChannelBpp / 8.0; |
|
198 |
|
199 b.mPlanes[1].mData = buffer + mMetadata.frameHeight * b.mPlanes[0].mStride; |
|
200 b.mPlanes[1].mStride = cbcrStride; |
|
201 b.mPlanes[1].mHeight = mMetadata.frameHeight / 2; |
|
202 b.mPlanes[1].mWidth = mMetadata.frameWidth / 2; |
|
203 b.mPlanes[1].mOffset = b.mPlanes[1].mSkip = 0; |
|
204 |
|
205 b.mPlanes[2].mData = b.mPlanes[1].mData + mMetadata.frameHeight * cbcrStride / 2; |
|
206 b.mPlanes[2].mStride = cbcrStride; |
|
207 b.mPlanes[2].mHeight = mMetadata.frameHeight / 2; |
|
208 b.mPlanes[2].mWidth = mMetadata.frameWidth / 2; |
|
209 b.mPlanes[2].mOffset = b.mPlanes[2].mSkip = 0; |
|
210 |
|
211 VideoData *v = VideoData::Create(mInfo.mVideo, |
|
212 mDecoder->GetImageContainer(), |
|
213 -1, |
|
214 currentFrameTime, |
|
215 (USECS_PER_S / mFrameRate), |
|
216 b, |
|
217 1, // In raw video every frame is a keyframe |
|
218 -1, |
|
219 ToIntRect(mPicture)); |
|
220 if (!v) |
|
221 return false; |
|
222 |
|
223 mVideoQueue.Push(v); |
|
224 mCurrentFrame++; |
|
225 decoded++; |
|
226 currentFrameTime += USECS_PER_S / mFrameRate; |
|
227 |
|
228 return true; |
|
229 } |
|
230 |
|
231 nsresult RawReader::Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime) |
|
232 { |
|
233 NS_ASSERTION(mDecoder->OnDecodeThread(), |
|
234 "Should be on decode thread."); |
|
235 |
|
236 MediaResource *resource = mDecoder->GetResource(); |
|
237 NS_ASSERTION(resource, "Decoder has no media resource"); |
|
238 |
|
239 uint32_t frame = mCurrentFrame; |
|
240 if (aTime >= UINT_MAX) |
|
241 return NS_ERROR_FAILURE; |
|
242 mCurrentFrame = aTime * mFrameRate / USECS_PER_S; |
|
243 |
|
244 CheckedUint32 offset = CheckedUint32(mCurrentFrame) * mFrameSize; |
|
245 offset += sizeof(RawVideoHeader); |
|
246 NS_ENSURE_TRUE(offset.isValid(), NS_ERROR_FAILURE); |
|
247 |
|
248 nsresult rv = resource->Seek(nsISeekableStream::NS_SEEK_SET, offset.value()); |
|
249 NS_ENSURE_SUCCESS(rv, rv); |
|
250 |
|
251 mVideoQueue.Reset(); |
|
252 |
|
253 while(mVideoQueue.GetSize() == 0) { |
|
254 bool keyframeSkip = false; |
|
255 if (!DecodeVideoFrame(keyframeSkip, 0)) { |
|
256 mCurrentFrame = frame; |
|
257 return NS_ERROR_FAILURE; |
|
258 } |
|
259 |
|
260 { |
|
261 ReentrantMonitorAutoEnter autoMonitor(mDecoder->GetReentrantMonitor()); |
|
262 if (mDecoder->IsShutdown()) { |
|
263 mCurrentFrame = frame; |
|
264 return NS_ERROR_FAILURE; |
|
265 } |
|
266 } |
|
267 |
|
268 nsAutoPtr<VideoData> video(mVideoQueue.PeekFront()); |
|
269 if (video && video->GetEndTime() < aTime) { |
|
270 mVideoQueue.PopFront(); |
|
271 video = nullptr; |
|
272 } else { |
|
273 video.forget(); |
|
274 } |
|
275 } |
|
276 |
|
277 return NS_OK; |
|
278 } |
|
279 |
|
280 nsresult RawReader::GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime) |
|
281 { |
|
282 return NS_OK; |
|
283 } |