1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/content/media/raw/RawReader.cpp Wed Dec 31 06:09:35 2014 +0100 1.3 @@ -0,0 +1,283 @@ 1.4 +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ 1.5 +/* This Source Code Form is subject to the terms of the Mozilla Public 1.6 + * License, v. 2.0. If a copy of the MPL was not distributed with this 1.7 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 1.8 + 1.9 +#include "MediaDecoderStateMachine.h" 1.10 +#include "AbstractMediaDecoder.h" 1.11 +#include "RawReader.h" 1.12 +#include "RawDecoder.h" 1.13 +#include "VideoUtils.h" 1.14 +#include "nsISeekableStream.h" 1.15 +#include "gfx2DGlue.h" 1.16 + 1.17 +using namespace mozilla; 1.18 + 1.19 +RawReader::RawReader(AbstractMediaDecoder* aDecoder) 1.20 + : MediaDecoderReader(aDecoder), 1.21 + mCurrentFrame(0), mFrameSize(0) 1.22 +{ 1.23 + MOZ_COUNT_CTOR(RawReader); 1.24 +} 1.25 + 1.26 +RawReader::~RawReader() 1.27 +{ 1.28 + MOZ_COUNT_DTOR(RawReader); 1.29 +} 1.30 + 1.31 +nsresult RawReader::Init(MediaDecoderReader* aCloneDonor) 1.32 +{ 1.33 + return NS_OK; 1.34 +} 1.35 + 1.36 +nsresult RawReader::ResetDecode() 1.37 +{ 1.38 + mCurrentFrame = 0; 1.39 + return MediaDecoderReader::ResetDecode(); 1.40 +} 1.41 + 1.42 +nsresult RawReader::ReadMetadata(MediaInfo* aInfo, 1.43 + MetadataTags** aTags) 1.44 +{ 1.45 + NS_ASSERTION(mDecoder->OnDecodeThread(), 1.46 + "Should be on decode thread."); 1.47 + 1.48 + MediaResource* resource = mDecoder->GetResource(); 1.49 + NS_ASSERTION(resource, "Decoder has no media resource"); 1.50 + 1.51 + if (!ReadFromResource(resource, reinterpret_cast<uint8_t*>(&mMetadata), 1.52 + sizeof(mMetadata))) 1.53 + return NS_ERROR_FAILURE; 1.54 + 1.55 + // Validate the header 1.56 + if (!(mMetadata.headerPacketID == 0 /* Packet ID of 0 for the header*/ && 1.57 + mMetadata.codecID == RAW_ID /* "YUV" */ && 1.58 + mMetadata.majorVersion == 0 && 1.59 + mMetadata.minorVersion == 1)) 1.60 + return NS_ERROR_FAILURE; 1.61 + 1.62 + CheckedUint32 dummy = CheckedUint32(static_cast<uint32_t>(mMetadata.frameWidth)) * 1.63 + static_cast<uint32_t>(mMetadata.frameHeight); 1.64 + NS_ENSURE_TRUE(dummy.isValid(), NS_ERROR_FAILURE); 1.65 + 1.66 + if (mMetadata.aspectDenominator == 0 || 1.67 + mMetadata.framerateDenominator == 0) 1.68 + return NS_ERROR_FAILURE; // Invalid data 1.69 + 1.70 + // Determine and verify frame display size. 1.71 + float pixelAspectRatio = static_cast<float>(mMetadata.aspectNumerator) / 1.72 + mMetadata.aspectDenominator; 1.73 + nsIntSize display(mMetadata.frameWidth, mMetadata.frameHeight); 1.74 + ScaleDisplayByAspectRatio(display, pixelAspectRatio); 1.75 + mPicture = nsIntRect(0, 0, mMetadata.frameWidth, mMetadata.frameHeight); 1.76 + nsIntSize frameSize(mMetadata.frameWidth, mMetadata.frameHeight); 1.77 + if (!IsValidVideoRegion(frameSize, mPicture, display)) { 1.78 + // Video track's frame sizes will overflow. Fail. 1.79 + return NS_ERROR_FAILURE; 1.80 + } 1.81 + 1.82 + mInfo.mVideo.mHasVideo = true; 1.83 + mInfo.mVideo.mDisplay = display; 1.84 + 1.85 + mFrameRate = static_cast<float>(mMetadata.framerateNumerator) / 1.86 + mMetadata.framerateDenominator; 1.87 + 1.88 + // Make some sanity checks 1.89 + if (mFrameRate > 45 || 1.90 + mFrameRate == 0 || 1.91 + pixelAspectRatio == 0 || 1.92 + mMetadata.frameWidth > 2000 || 1.93 + mMetadata.frameHeight > 2000 || 1.94 + mMetadata.chromaChannelBpp != 4 || 1.95 + mMetadata.lumaChannelBpp != 8 || 1.96 + mMetadata.colorspace != 1 /* 4:2:0 */) 1.97 + return NS_ERROR_FAILURE; 1.98 + 1.99 + mFrameSize = mMetadata.frameWidth * mMetadata.frameHeight * 1.100 + (mMetadata.lumaChannelBpp + mMetadata.chromaChannelBpp) / 8.0 + 1.101 + sizeof(RawPacketHeader); 1.102 + 1.103 + int64_t length = resource->GetLength(); 1.104 + if (length != -1) { 1.105 + ReentrantMonitorAutoEnter autoMonitor(mDecoder->GetReentrantMonitor()); 1.106 + mDecoder->SetMediaDuration(USECS_PER_S * 1.107 + (length - sizeof(RawVideoHeader)) / 1.108 + (mFrameSize * mFrameRate)); 1.109 + } 1.110 + 1.111 + *aInfo = mInfo; 1.112 + 1.113 + *aTags = nullptr; 1.114 + 1.115 + return NS_OK; 1.116 +} 1.117 + 1.118 + bool RawReader::DecodeAudioData() 1.119 +{ 1.120 + NS_ASSERTION(mDecoder->OnStateMachineThread() || mDecoder->OnDecodeThread(), 1.121 + "Should be on state machine thread or decode thread."); 1.122 + return false; 1.123 +} 1.124 + 1.125 +// Helper method that either reads until it gets aLength bytes 1.126 +// or returns false 1.127 +bool RawReader::ReadFromResource(MediaResource *aResource, uint8_t* aBuf, 1.128 + uint32_t aLength) 1.129 +{ 1.130 + while (aLength > 0) { 1.131 + uint32_t bytesRead = 0; 1.132 + nsresult rv; 1.133 + 1.134 + rv = aResource->Read(reinterpret_cast<char*>(aBuf), aLength, &bytesRead); 1.135 + NS_ENSURE_SUCCESS(rv, false); 1.136 + 1.137 + if (bytesRead == 0) { 1.138 + return false; 1.139 + } 1.140 + 1.141 + aLength -= bytesRead; 1.142 + aBuf += bytesRead; 1.143 + } 1.144 + 1.145 + return true; 1.146 +} 1.147 + 1.148 +bool RawReader::DecodeVideoFrame(bool &aKeyframeSkip, 1.149 + int64_t aTimeThreshold) 1.150 +{ 1.151 + NS_ASSERTION(mDecoder->OnDecodeThread(), 1.152 + "Should be on decode thread."); 1.153 + 1.154 + // Record number of frames decoded and parsed. Automatically update the 1.155 + // stats counters using the AutoNotifyDecoded stack-based class. 1.156 + uint32_t parsed = 0, decoded = 0; 1.157 + AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded); 1.158 + 1.159 + if (!mFrameSize) 1.160 + return false; // Metadata read failed. We should refuse to play. 1.161 + 1.162 + int64_t currentFrameTime = USECS_PER_S * mCurrentFrame / mFrameRate; 1.163 + uint32_t length = mFrameSize - sizeof(RawPacketHeader); 1.164 + 1.165 + nsAutoArrayPtr<uint8_t> buffer(new uint8_t[length]); 1.166 + MediaResource* resource = mDecoder->GetResource(); 1.167 + NS_ASSERTION(resource, "Decoder has no media resource"); 1.168 + 1.169 + // We're always decoding one frame when called 1.170 + while(true) { 1.171 + RawPacketHeader header; 1.172 + 1.173 + // Read in a packet header and validate 1.174 + if (!(ReadFromResource(resource, reinterpret_cast<uint8_t*>(&header), 1.175 + sizeof(header))) || 1.176 + !(header.packetID == 0xFF && header.codecID == RAW_ID /* "YUV" */)) { 1.177 + return false; 1.178 + } 1.179 + 1.180 + if (!ReadFromResource(resource, buffer, length)) { 1.181 + return false; 1.182 + } 1.183 + 1.184 + parsed++; 1.185 + 1.186 + if (currentFrameTime >= aTimeThreshold) 1.187 + break; 1.188 + 1.189 + mCurrentFrame++; 1.190 + currentFrameTime += static_cast<double>(USECS_PER_S) / mFrameRate; 1.191 + } 1.192 + 1.193 + VideoData::YCbCrBuffer b; 1.194 + b.mPlanes[0].mData = buffer; 1.195 + b.mPlanes[0].mStride = mMetadata.frameWidth * mMetadata.lumaChannelBpp / 8.0; 1.196 + b.mPlanes[0].mHeight = mMetadata.frameHeight; 1.197 + b.mPlanes[0].mWidth = mMetadata.frameWidth; 1.198 + b.mPlanes[0].mOffset = b.mPlanes[0].mSkip = 0; 1.199 + 1.200 + uint32_t cbcrStride = mMetadata.frameWidth * mMetadata.chromaChannelBpp / 8.0; 1.201 + 1.202 + b.mPlanes[1].mData = buffer + mMetadata.frameHeight * b.mPlanes[0].mStride; 1.203 + b.mPlanes[1].mStride = cbcrStride; 1.204 + b.mPlanes[1].mHeight = mMetadata.frameHeight / 2; 1.205 + b.mPlanes[1].mWidth = mMetadata.frameWidth / 2; 1.206 + b.mPlanes[1].mOffset = b.mPlanes[1].mSkip = 0; 1.207 + 1.208 + b.mPlanes[2].mData = b.mPlanes[1].mData + mMetadata.frameHeight * cbcrStride / 2; 1.209 + b.mPlanes[2].mStride = cbcrStride; 1.210 + b.mPlanes[2].mHeight = mMetadata.frameHeight / 2; 1.211 + b.mPlanes[2].mWidth = mMetadata.frameWidth / 2; 1.212 + b.mPlanes[2].mOffset = b.mPlanes[2].mSkip = 0; 1.213 + 1.214 + VideoData *v = VideoData::Create(mInfo.mVideo, 1.215 + mDecoder->GetImageContainer(), 1.216 + -1, 1.217 + currentFrameTime, 1.218 + (USECS_PER_S / mFrameRate), 1.219 + b, 1.220 + 1, // In raw video every frame is a keyframe 1.221 + -1, 1.222 + ToIntRect(mPicture)); 1.223 + if (!v) 1.224 + return false; 1.225 + 1.226 + mVideoQueue.Push(v); 1.227 + mCurrentFrame++; 1.228 + decoded++; 1.229 + currentFrameTime += USECS_PER_S / mFrameRate; 1.230 + 1.231 + return true; 1.232 +} 1.233 + 1.234 +nsresult RawReader::Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime) 1.235 +{ 1.236 + NS_ASSERTION(mDecoder->OnDecodeThread(), 1.237 + "Should be on decode thread."); 1.238 + 1.239 + MediaResource *resource = mDecoder->GetResource(); 1.240 + NS_ASSERTION(resource, "Decoder has no media resource"); 1.241 + 1.242 + uint32_t frame = mCurrentFrame; 1.243 + if (aTime >= UINT_MAX) 1.244 + return NS_ERROR_FAILURE; 1.245 + mCurrentFrame = aTime * mFrameRate / USECS_PER_S; 1.246 + 1.247 + CheckedUint32 offset = CheckedUint32(mCurrentFrame) * mFrameSize; 1.248 + offset += sizeof(RawVideoHeader); 1.249 + NS_ENSURE_TRUE(offset.isValid(), NS_ERROR_FAILURE); 1.250 + 1.251 + nsresult rv = resource->Seek(nsISeekableStream::NS_SEEK_SET, offset.value()); 1.252 + NS_ENSURE_SUCCESS(rv, rv); 1.253 + 1.254 + mVideoQueue.Reset(); 1.255 + 1.256 + while(mVideoQueue.GetSize() == 0) { 1.257 + bool keyframeSkip = false; 1.258 + if (!DecodeVideoFrame(keyframeSkip, 0)) { 1.259 + mCurrentFrame = frame; 1.260 + return NS_ERROR_FAILURE; 1.261 + } 1.262 + 1.263 + { 1.264 + ReentrantMonitorAutoEnter autoMonitor(mDecoder->GetReentrantMonitor()); 1.265 + if (mDecoder->IsShutdown()) { 1.266 + mCurrentFrame = frame; 1.267 + return NS_ERROR_FAILURE; 1.268 + } 1.269 + } 1.270 + 1.271 + nsAutoPtr<VideoData> video(mVideoQueue.PeekFront()); 1.272 + if (video && video->GetEndTime() < aTime) { 1.273 + mVideoQueue.PopFront(); 1.274 + video = nullptr; 1.275 + } else { 1.276 + video.forget(); 1.277 + } 1.278 + } 1.279 + 1.280 + return NS_OK; 1.281 +} 1.282 + 1.283 +nsresult RawReader::GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime) 1.284 +{ 1.285 + return NS_OK; 1.286 +}