1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/content/media/omx/OmxDecoder.cpp Wed Dec 31 06:09:35 2014 +0100 1.3 @@ -0,0 +1,1128 @@ 1.4 +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ 1.5 +/* vim:set ts=2 sw=2 sts=2 et cindent: */ 1.6 +/* This Source Code Form is subject to the terms of the Mozilla Public 1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this file, 1.8 + * You can obtain one at http://mozilla.org/MPL/2.0/. */ 1.9 +#include <unistd.h> 1.10 +#include <fcntl.h> 1.11 + 1.12 +#include "base/basictypes.h" 1.13 +#include <cutils/properties.h> 1.14 +#include <stagefright/foundation/ADebug.h> 1.15 +#include <stagefright/foundation/AMessage.h> 1.16 +#include <stagefright/MediaExtractor.h> 1.17 +#include <stagefright/MetaData.h> 1.18 +#include <stagefright/OMXClient.h> 1.19 +#include <stagefright/OMXCodec.h> 1.20 +#include <OMX.h> 1.21 +#if MOZ_WIDGET_GONK && ANDROID_VERSION >= 17 1.22 +#include <ui/Fence.h> 1.23 +#endif 1.24 + 1.25 +#include "mozilla/layers/GrallocTextureClient.h" 1.26 +#include "mozilla/layers/TextureClient.h" 1.27 +#include "mozilla/Preferences.h" 1.28 +#include "mozilla/Types.h" 1.29 +#include "mozilla/Monitor.h" 1.30 +#include "nsMimeTypes.h" 1.31 +#include "MPAPI.h" 1.32 +#include "prlog.h" 1.33 + 1.34 +#include "GonkNativeWindow.h" 1.35 +#include "GonkNativeWindowClient.h" 1.36 +#include "OMXCodecProxy.h" 1.37 +#include "OmxDecoder.h" 1.38 +#include "nsISeekableStream.h" 1.39 + 1.40 +#ifdef PR_LOGGING 1.41 +PRLogModuleInfo *gOmxDecoderLog; 1.42 +#define LOG(type, msg...) PR_LOG(gOmxDecoderLog, type, (msg)) 1.43 +#else 1.44 +#define LOG(x...) 1.45 +#endif 1.46 + 1.47 +using namespace MPAPI; 1.48 +using namespace mozilla; 1.49 +using namespace mozilla::gfx; 1.50 +using namespace mozilla::layers; 1.51 + 1.52 +namespace mozilla { 1.53 + 1.54 +class ReleaseOmxDecoderRunnable : public nsRunnable 1.55 +{ 1.56 +public: 1.57 + ReleaseOmxDecoderRunnable(const android::sp<android::OmxDecoder>& aOmxDecoder) 1.58 + : mOmxDecoder(aOmxDecoder) 1.59 + { 1.60 + } 1.61 + 1.62 + NS_METHOD Run() MOZ_OVERRIDE 1.63 + { 1.64 + MOZ_ASSERT(NS_IsMainThread()); 1.65 + mOmxDecoder = nullptr; // release OmxDecoder 1.66 + return NS_OK; 1.67 + } 1.68 + 1.69 +private: 1.70 + android::sp<android::OmxDecoder> mOmxDecoder; 1.71 +}; 1.72 + 1.73 +class OmxDecoderProcessCachedDataTask : public Task 1.74 +{ 1.75 +public: 1.76 + OmxDecoderProcessCachedDataTask(android::OmxDecoder* aOmxDecoder, int64_t aOffset) 1.77 + : mOmxDecoder(aOmxDecoder), 1.78 + mOffset(aOffset) 1.79 + { } 1.80 + 1.81 + void Run() 1.82 + { 1.83 + MOZ_ASSERT(!NS_IsMainThread()); 1.84 + MOZ_ASSERT(mOmxDecoder.get()); 1.85 + int64_t rem = mOmxDecoder->ProcessCachedData(mOffset, false); 1.86 + 1.87 + if (rem <= 0) { 1.88 + ReleaseOmxDecoderRunnable* r = new ReleaseOmxDecoderRunnable(mOmxDecoder); 1.89 + mOmxDecoder.clear(); 1.90 + NS_DispatchToMainThread(r); 1.91 + } 1.92 + } 1.93 + 1.94 +private: 1.95 + android::sp<android::OmxDecoder> mOmxDecoder; 1.96 + int64_t mOffset; 1.97 +}; 1.98 + 1.99 +// When loading an MP3 stream from a file, we need to parse the file's 1.100 +// content to find its duration. Reading files of 100 MiB or more can 1.101 +// delay the player app noticably, so the file is read and decoded in 1.102 +// smaller chunks. 1.103 +// 1.104 +// We first read on the decode thread, but parsing must be done on the 1.105 +// main thread. After we read the file's initial MiBs in the decode 1.106 +// thread, an instance of this class is scheduled to the main thread for 1.107 +// parsing the MP3 stream. The decode thread waits until it has finished. 1.108 +// 1.109 +// If there is more data available from the file, the runnable dispatches 1.110 +// a task to the IO thread for retrieving the next chunk of data, and 1.111 +// the IO task dispatches a runnable to the main thread for parsing the 1.112 +// data. This goes on until all of the MP3 file has been parsed. 1.113 + 1.114 +class OmxDecoderNotifyDataArrivedRunnable : public nsRunnable 1.115 +{ 1.116 +public: 1.117 + OmxDecoderNotifyDataArrivedRunnable(android::OmxDecoder* aOmxDecoder, 1.118 + const char* aBuffer, uint64_t aLength, 1.119 + int64_t aOffset, uint64_t aFullLength) 1.120 + : mOmxDecoder(aOmxDecoder), 1.121 + mBuffer(aBuffer), 1.122 + mLength(aLength), 1.123 + mOffset(aOffset), 1.124 + mFullLength(aFullLength), 1.125 + mCompletedMonitor("OmxDecoderNotifyDataArrived.mCompleted"), 1.126 + mCompleted(false) 1.127 + { 1.128 + MOZ_ASSERT(mOmxDecoder.get()); 1.129 + MOZ_ASSERT(mBuffer.get() || !mLength); 1.130 + } 1.131 + 1.132 + NS_IMETHOD Run() 1.133 + { 1.134 + NS_ASSERTION(NS_IsMainThread(), "Should be on main thread."); 1.135 + 1.136 + NotifyDataArrived(); 1.137 + Completed(); 1.138 + 1.139 + return NS_OK; 1.140 + } 1.141 + 1.142 + void WaitForCompletion() 1.143 + { 1.144 + MOZ_ASSERT(!NS_IsMainThread()); 1.145 + 1.146 + MonitorAutoLock mon(mCompletedMonitor); 1.147 + if (!mCompleted) { 1.148 + mCompletedMonitor.Wait(); 1.149 + } 1.150 + } 1.151 + 1.152 +private: 1.153 + void NotifyDataArrived() 1.154 + { 1.155 + const char* buffer = mBuffer.get(); 1.156 + 1.157 + while (mLength) { 1.158 + uint32_t length = std::min<uint64_t>(mLength, UINT32_MAX); 1.159 + bool success = mOmxDecoder->NotifyDataArrived(buffer, mLength, 1.160 + mOffset); 1.161 + if (!success) { 1.162 + return; 1.163 + } 1.164 + 1.165 + buffer += length; 1.166 + mLength -= length; 1.167 + mOffset += length; 1.168 + } 1.169 + 1.170 + if (mOffset < mFullLength) { 1.171 + // We cannot read data in the main thread because it 1.172 + // might block for too long. Instead we post an IO task 1.173 + // to the IO thread if there is more data available. 1.174 + XRE_GetIOMessageLoop()->PostTask(FROM_HERE, 1.175 + new OmxDecoderProcessCachedDataTask(mOmxDecoder.get(), mOffset)); 1.176 + } 1.177 + } 1.178 + 1.179 + // Call this function at the end of Run() to notify waiting 1.180 + // threads. 1.181 + void Completed() 1.182 + { 1.183 + MonitorAutoLock mon(mCompletedMonitor); 1.184 + MOZ_ASSERT(!mCompleted); 1.185 + mCompleted = true; 1.186 + mCompletedMonitor.Notify(); 1.187 + } 1.188 + 1.189 + android::sp<android::OmxDecoder> mOmxDecoder; 1.190 + nsAutoArrayPtr<const char> mBuffer; 1.191 + uint64_t mLength; 1.192 + int64_t mOffset; 1.193 + uint64_t mFullLength; 1.194 + 1.195 + Monitor mCompletedMonitor; 1.196 + bool mCompleted; 1.197 +}; 1.198 + 1.199 +} 1.200 + 1.201 +namespace android { 1.202 + 1.203 +MediaStreamSource::MediaStreamSource(MediaResource *aResource, 1.204 + AbstractMediaDecoder *aDecoder) : 1.205 + mResource(aResource), mDecoder(aDecoder) 1.206 +{ 1.207 +} 1.208 + 1.209 +MediaStreamSource::~MediaStreamSource() 1.210 +{ 1.211 +} 1.212 + 1.213 +status_t MediaStreamSource::initCheck() const 1.214 +{ 1.215 + return OK; 1.216 +} 1.217 + 1.218 +ssize_t MediaStreamSource::readAt(off64_t offset, void *data, size_t size) 1.219 +{ 1.220 + char *ptr = static_cast<char *>(data); 1.221 + size_t todo = size; 1.222 + while (todo > 0) { 1.223 + Mutex::Autolock autoLock(mLock); 1.224 + uint32_t bytesRead; 1.225 + if ((offset != mResource->Tell() && 1.226 + NS_FAILED(mResource->Seek(nsISeekableStream::NS_SEEK_SET, offset))) || 1.227 + NS_FAILED(mResource->Read(ptr, todo, &bytesRead))) { 1.228 + return ERROR_IO; 1.229 + } 1.230 + 1.231 + if (bytesRead == 0) { 1.232 + return size - todo; 1.233 + } 1.234 + 1.235 + offset += bytesRead; 1.236 + todo -= bytesRead; 1.237 + ptr += bytesRead; 1.238 + } 1.239 + return size; 1.240 +} 1.241 + 1.242 +status_t MediaStreamSource::getSize(off64_t *size) 1.243 +{ 1.244 + uint64_t length = mResource->GetLength(); 1.245 + if (length == static_cast<uint64_t>(-1)) 1.246 + return ERROR_UNSUPPORTED; 1.247 + 1.248 + *size = length; 1.249 + 1.250 + return OK; 1.251 +} 1.252 + 1.253 +} // namespace android 1.254 + 1.255 +using namespace android; 1.256 + 1.257 +OmxDecoder::OmxDecoder(MediaResource *aResource, 1.258 + AbstractMediaDecoder *aDecoder) : 1.259 + mDecoder(aDecoder), 1.260 + mResource(aResource), 1.261 + mDisplayWidth(0), 1.262 + mDisplayHeight(0), 1.263 + mVideoWidth(0), 1.264 + mVideoHeight(0), 1.265 + mVideoColorFormat(0), 1.266 + mVideoStride(0), 1.267 + mVideoSliceHeight(0), 1.268 + mVideoRotation(0), 1.269 + mAudioChannels(-1), 1.270 + mAudioSampleRate(-1), 1.271 + mDurationUs(-1), 1.272 + mMP3FrameParser(aResource->GetLength()), 1.273 + mIsMp3(false), 1.274 + mVideoBuffer(nullptr), 1.275 + mAudioBuffer(nullptr), 1.276 + mIsVideoSeeking(false), 1.277 + mAudioMetadataRead(false), 1.278 + mAudioPaused(false), 1.279 + mVideoPaused(false) 1.280 +{ 1.281 + mLooper = new ALooper; 1.282 + mLooper->setName("OmxDecoder"); 1.283 + 1.284 + mReflector = new AHandlerReflector<OmxDecoder>(this); 1.285 + // Register AMessage handler to ALooper. 1.286 + mLooper->registerHandler(mReflector); 1.287 + // Start ALooper thread. 1.288 + mLooper->start(); 1.289 +} 1.290 + 1.291 +OmxDecoder::~OmxDecoder() 1.292 +{ 1.293 + MOZ_ASSERT(NS_IsMainThread()); 1.294 + 1.295 + ReleaseMediaResources(); 1.296 + 1.297 + // unregister AMessage handler from ALooper. 1.298 + mLooper->unregisterHandler(mReflector->id()); 1.299 + // Stop ALooper thread. 1.300 + mLooper->stop(); 1.301 +} 1.302 + 1.303 +void OmxDecoder::statusChanged() 1.304 +{ 1.305 + sp<AMessage> notify = 1.306 + new AMessage(kNotifyStatusChanged, mReflector->id()); 1.307 + // post AMessage to OmxDecoder via ALooper. 1.308 + notify->post(); 1.309 +} 1.310 + 1.311 +static sp<IOMX> sOMX = nullptr; 1.312 +static sp<IOMX> GetOMX() 1.313 +{ 1.314 + if(sOMX.get() == nullptr) { 1.315 + sOMX = new OMX; 1.316 + } 1.317 + return sOMX; 1.318 +} 1.319 + 1.320 +bool OmxDecoder::Init(sp<MediaExtractor>& extractor) { 1.321 +#ifdef PR_LOGGING 1.322 + if (!gOmxDecoderLog) { 1.323 + gOmxDecoderLog = PR_NewLogModule("OmxDecoder"); 1.324 + } 1.325 +#endif 1.326 + 1.327 + const char* extractorMime; 1.328 + sp<MetaData> meta = extractor->getMetaData(); 1.329 + if (meta->findCString(kKeyMIMEType, &extractorMime) && !strcasecmp(extractorMime, AUDIO_MP3)) { 1.330 + mIsMp3 = true; 1.331 + } 1.332 + 1.333 + ssize_t audioTrackIndex = -1; 1.334 + ssize_t videoTrackIndex = -1; 1.335 + 1.336 + for (size_t i = 0; i < extractor->countTracks(); ++i) { 1.337 + sp<MetaData> meta = extractor->getTrackMetaData(i); 1.338 + 1.339 + int32_t bitRate; 1.340 + if (!meta->findInt32(kKeyBitRate, &bitRate)) 1.341 + bitRate = 0; 1.342 + 1.343 + const char *mime; 1.344 + if (!meta->findCString(kKeyMIMEType, &mime)) { 1.345 + continue; 1.346 + } 1.347 + 1.348 + if (videoTrackIndex == -1 && !strncasecmp(mime, "video/", 6)) { 1.349 + videoTrackIndex = i; 1.350 + } else if (audioTrackIndex == -1 && !strncasecmp(mime, "audio/", 6)) { 1.351 + audioTrackIndex = i; 1.352 + } 1.353 + } 1.354 + 1.355 + if (videoTrackIndex == -1 && audioTrackIndex == -1) { 1.356 + NS_WARNING("OMX decoder could not find video or audio tracks"); 1.357 + return false; 1.358 + } 1.359 + 1.360 + mResource->SetReadMode(MediaCacheStream::MODE_PLAYBACK); 1.361 + 1.362 + if (videoTrackIndex != -1) { 1.363 + mVideoTrack = extractor->getTrack(videoTrackIndex); 1.364 + } 1.365 + 1.366 + if (audioTrackIndex != -1) { 1.367 + mAudioTrack = extractor->getTrack(audioTrackIndex); 1.368 + 1.369 +#ifdef MOZ_AUDIO_OFFLOAD 1.370 + // mAudioTrack is be used by OMXCodec. For offloaded audio track, using same 1.371 + // object gives undetermined behavior. So get a new track 1.372 + mAudioOffloadTrack = extractor->getTrack(audioTrackIndex); 1.373 +#endif 1.374 + } 1.375 + return true; 1.376 +} 1.377 + 1.378 +bool OmxDecoder::TryLoad() { 1.379 + 1.380 + if (!AllocateMediaResources()) { 1.381 + return false; 1.382 + } 1.383 + 1.384 + //check if video is waiting resources 1.385 + if (mVideoSource.get()) { 1.386 + if (mVideoSource->IsWaitingResources()) { 1.387 + return true; 1.388 + } 1.389 + } 1.390 + 1.391 + // calculate duration 1.392 + int64_t totalDurationUs = 0; 1.393 + int64_t durationUs = 0; 1.394 + if (mVideoTrack.get() && mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { 1.395 + if (durationUs > totalDurationUs) 1.396 + totalDurationUs = durationUs; 1.397 + } 1.398 + if (mAudioTrack.get()) { 1.399 + durationUs = -1; 1.400 + const char* audioMime; 1.401 + sp<MetaData> meta = mAudioTrack->getFormat(); 1.402 + 1.403 + if (mIsMp3) { 1.404 + // Feed MP3 parser with cached data. Local files will be fully 1.405 + // cached already, network streams will update with sucessive 1.406 + // calls to NotifyDataArrived. 1.407 + if (ProcessCachedData(0, true) >= 0) { 1.408 + durationUs = mMP3FrameParser.GetDuration(); 1.409 + if (durationUs > totalDurationUs) { 1.410 + totalDurationUs = durationUs; 1.411 + } 1.412 + } 1.413 + } 1.414 + if ((durationUs == -1) && meta->findInt64(kKeyDuration, &durationUs)) { 1.415 + if (durationUs > totalDurationUs) { 1.416 + totalDurationUs = durationUs; 1.417 + } 1.418 + } 1.419 + } 1.420 + mDurationUs = totalDurationUs; 1.421 + 1.422 + // read video metadata 1.423 + if (mVideoSource.get() && !SetVideoFormat()) { 1.424 + NS_WARNING("Couldn't set OMX video format"); 1.425 + return false; 1.426 + } 1.427 + 1.428 + // read audio metadata 1.429 + if (mAudioSource.get()) { 1.430 + // To reliably get the channel and sample rate data we need to read from the 1.431 + // audio source until we get a INFO_FORMAT_CHANGE status 1.432 + status_t err = mAudioSource->read(&mAudioBuffer); 1.433 + if (err != INFO_FORMAT_CHANGED) { 1.434 + if (err != OK) { 1.435 + NS_WARNING("Couldn't read audio buffer from OMX decoder"); 1.436 + return false; 1.437 + } 1.438 + sp<MetaData> meta = mAudioSource->getFormat(); 1.439 + if (!meta->findInt32(kKeyChannelCount, &mAudioChannels) || 1.440 + !meta->findInt32(kKeySampleRate, &mAudioSampleRate)) { 1.441 + NS_WARNING("Couldn't get audio metadata from OMX decoder"); 1.442 + return false; 1.443 + } 1.444 + mAudioMetadataRead = true; 1.445 + } 1.446 + else if (!SetAudioFormat()) { 1.447 + NS_WARNING("Couldn't set audio format"); 1.448 + return false; 1.449 + } 1.450 + } 1.451 + 1.452 + return true; 1.453 +} 1.454 + 1.455 +bool OmxDecoder::IsDormantNeeded() 1.456 +{ 1.457 + if (mVideoTrack.get()) { 1.458 + return true; 1.459 + } 1.460 + return false; 1.461 +} 1.462 + 1.463 +bool OmxDecoder::IsWaitingMediaResources() 1.464 +{ 1.465 + if (mVideoSource.get()) { 1.466 + return mVideoSource->IsWaitingResources(); 1.467 + } 1.468 + return false; 1.469 +} 1.470 + 1.471 +static bool isInEmulator() 1.472 +{ 1.473 + char propQemu[PROPERTY_VALUE_MAX]; 1.474 + property_get("ro.kernel.qemu", propQemu, ""); 1.475 + return !strncmp(propQemu, "1", 1); 1.476 +} 1.477 + 1.478 +bool OmxDecoder::AllocateMediaResources() 1.479 +{ 1.480 + // OMXClient::connect() always returns OK and abort's fatally if 1.481 + // it can't connect. 1.482 + OMXClient client; 1.483 + DebugOnly<status_t> err = client.connect(); 1.484 + NS_ASSERTION(err == OK, "Failed to connect to OMX in mediaserver."); 1.485 + sp<IOMX> omx = client.interface(); 1.486 + 1.487 + if ((mVideoTrack != nullptr) && (mVideoSource == nullptr)) { 1.488 + mNativeWindow = new GonkNativeWindow(); 1.489 +#if defined(MOZ_WIDGET_GONK) && ANDROID_VERSION >= 17 1.490 + mNativeWindowClient = new GonkNativeWindowClient(mNativeWindow->getBufferQueue()); 1.491 +#else 1.492 + mNativeWindowClient = new GonkNativeWindowClient(mNativeWindow); 1.493 +#endif 1.494 + 1.495 + // Experience with OMX codecs is that only the HW decoders are 1.496 + // worth bothering with, at least on the platforms where this code 1.497 + // is currently used, and for formats this code is currently used 1.498 + // for (h.264). So if we don't get a hardware decoder, just give 1.499 + // up. 1.500 + int flags = kHardwareCodecsOnly; 1.501 + 1.502 + if (isInEmulator()) { 1.503 + // If we are in emulator, allow to fall back to software. 1.504 + flags = 0; 1.505 + } 1.506 + mVideoSource = 1.507 + OMXCodecProxy::Create(omx, 1.508 + mVideoTrack->getFormat(), 1.509 + false, // decoder 1.510 + mVideoTrack, 1.511 + nullptr, 1.512 + flags, 1.513 + mNativeWindowClient); 1.514 + if (mVideoSource == nullptr) { 1.515 + NS_WARNING("Couldn't create OMX video source"); 1.516 + return false; 1.517 + } else { 1.518 + sp<OMXCodecProxy::EventListener> listener = this; 1.519 + mVideoSource->setEventListener(listener); 1.520 + mVideoSource->requestResource(); 1.521 + } 1.522 + } 1.523 + 1.524 + if ((mAudioTrack != nullptr) && (mAudioSource == nullptr)) { 1.525 + const char *audioMime = nullptr; 1.526 + sp<MetaData> meta = mAudioTrack->getFormat(); 1.527 + if (!meta->findCString(kKeyMIMEType, &audioMime)) { 1.528 + return false; 1.529 + } 1.530 + if (!strcasecmp(audioMime, "audio/raw")) { 1.531 + mAudioSource = mAudioTrack; 1.532 + } else { 1.533 + // try to load hardware codec in mediaserver process. 1.534 + int flags = kHardwareCodecsOnly; 1.535 + mAudioSource = OMXCodec::Create(omx, 1.536 + mAudioTrack->getFormat(), 1.537 + false, // decoder 1.538 + mAudioTrack, 1.539 + nullptr, 1.540 + flags); 1.541 + } 1.542 + 1.543 + if (mAudioSource == nullptr) { 1.544 + // try to load software codec in this process. 1.545 + int flags = kSoftwareCodecsOnly; 1.546 + mAudioSource = OMXCodec::Create(GetOMX(), 1.547 + mAudioTrack->getFormat(), 1.548 + false, // decoder 1.549 + mAudioTrack, 1.550 + nullptr, 1.551 + flags); 1.552 + if (mAudioSource == nullptr) { 1.553 + NS_WARNING("Couldn't create OMX audio source"); 1.554 + return false; 1.555 + } 1.556 + } 1.557 + if (mAudioSource->start() != OK) { 1.558 + NS_WARNING("Couldn't start OMX audio source"); 1.559 + mAudioSource.clear(); 1.560 + return false; 1.561 + } 1.562 + } 1.563 + return true; 1.564 +} 1.565 + 1.566 + 1.567 +void OmxDecoder::ReleaseMediaResources() { 1.568 + { 1.569 + // Free all pending video buffers. 1.570 + Mutex::Autolock autoLock(mSeekLock); 1.571 + ReleaseAllPendingVideoBuffersLocked(); 1.572 + } 1.573 + 1.574 + ReleaseVideoBuffer(); 1.575 + ReleaseAudioBuffer(); 1.576 + 1.577 + if (mVideoSource.get()) { 1.578 + mVideoSource->stop(); 1.579 + mVideoSource.clear(); 1.580 + } 1.581 + 1.582 + if (mAudioSource.get()) { 1.583 + mAudioSource->stop(); 1.584 + mAudioSource.clear(); 1.585 + } 1.586 + 1.587 + mNativeWindowClient.clear(); 1.588 + mNativeWindow.clear(); 1.589 +} 1.590 + 1.591 +bool OmxDecoder::SetVideoFormat() { 1.592 + const char *componentName; 1.593 + 1.594 + if (!mVideoSource->getFormat()->findInt32(kKeyWidth, &mVideoWidth) || 1.595 + !mVideoSource->getFormat()->findInt32(kKeyHeight, &mVideoHeight) || 1.596 + !mVideoSource->getFormat()->findCString(kKeyDecoderComponent, &componentName) || 1.597 + !mVideoSource->getFormat()->findInt32(kKeyColorFormat, &mVideoColorFormat) ) { 1.598 + return false; 1.599 + } 1.600 + 1.601 + if (!mVideoTrack.get() || !mVideoTrack->getFormat()->findInt32(kKeyDisplayWidth, &mDisplayWidth)) { 1.602 + mDisplayWidth = mVideoWidth; 1.603 + NS_WARNING("display width not available, assuming width"); 1.604 + } 1.605 + 1.606 + if (!mVideoTrack.get() || !mVideoTrack->getFormat()->findInt32(kKeyDisplayHeight, &mDisplayHeight)) { 1.607 + mDisplayHeight = mVideoHeight; 1.608 + NS_WARNING("display height not available, assuming height"); 1.609 + } 1.610 + 1.611 + if (!mVideoSource->getFormat()->findInt32(kKeyStride, &mVideoStride)) { 1.612 + mVideoStride = mVideoWidth; 1.613 + NS_WARNING("stride not available, assuming width"); 1.614 + } 1.615 + 1.616 + if (!mVideoSource->getFormat()->findInt32(kKeySliceHeight, &mVideoSliceHeight)) { 1.617 + mVideoSliceHeight = mVideoHeight; 1.618 + NS_WARNING("slice height not available, assuming height"); 1.619 + } 1.620 + 1.621 + // Since ICS, valid video side is caluculated from kKeyCropRect. 1.622 + // kKeyWidth means decoded video buffer width. 1.623 + // kKeyHeight means decoded video buffer height. 1.624 + // On some hardwares, decoded video buffer and valid video size are different. 1.625 + int32_t crop_left, crop_top, crop_right, crop_bottom; 1.626 + if (mVideoSource->getFormat()->findRect(kKeyCropRect, 1.627 + &crop_left, 1.628 + &crop_top, 1.629 + &crop_right, 1.630 + &crop_bottom)) { 1.631 + mVideoWidth = crop_right - crop_left + 1; 1.632 + mVideoHeight = crop_bottom - crop_top + 1; 1.633 + } 1.634 + 1.635 + if (!mVideoSource->getFormat()->findInt32(kKeyRotation, &mVideoRotation)) { 1.636 + mVideoRotation = 0; 1.637 + NS_WARNING("rotation not available, assuming 0"); 1.638 + } 1.639 + 1.640 + LOG(PR_LOG_DEBUG, "display width: %d display height %d width: %d height: %d component: %s format: %d stride: %d sliceHeight: %d rotation: %d", 1.641 + mDisplayWidth, mDisplayHeight, mVideoWidth, mVideoHeight, componentName, 1.642 + mVideoColorFormat, mVideoStride, mVideoSliceHeight, mVideoRotation); 1.643 + 1.644 + return true; 1.645 +} 1.646 + 1.647 +bool OmxDecoder::SetAudioFormat() { 1.648 + // If the format changed, update our cached info. 1.649 + if (!mAudioSource->getFormat()->findInt32(kKeyChannelCount, &mAudioChannels) || 1.650 + !mAudioSource->getFormat()->findInt32(kKeySampleRate, &mAudioSampleRate)) { 1.651 + return false; 1.652 + } 1.653 + 1.654 + LOG(PR_LOG_DEBUG, "channelCount: %d sampleRate: %d", 1.655 + mAudioChannels, mAudioSampleRate); 1.656 + 1.657 + return true; 1.658 +} 1.659 + 1.660 +void OmxDecoder::ReleaseDecoder() 1.661 +{ 1.662 + mDecoder = nullptr; 1.663 +} 1.664 + 1.665 +bool OmxDecoder::NotifyDataArrived(const char* aBuffer, uint32_t aLength, int64_t aOffset) 1.666 +{ 1.667 + if (!mAudioTrack.get() || !mIsMp3 || !mMP3FrameParser.IsMP3() || !mDecoder) { 1.668 + return false; 1.669 + } 1.670 + 1.671 + mMP3FrameParser.Parse(aBuffer, aLength, aOffset); 1.672 + 1.673 + int64_t durationUs = mMP3FrameParser.GetDuration(); 1.674 + 1.675 + if (durationUs != mDurationUs) { 1.676 + mDurationUs = durationUs; 1.677 + 1.678 + MOZ_ASSERT(mDecoder); 1.679 + ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); 1.680 + mDecoder->UpdateEstimatedMediaDuration(mDurationUs); 1.681 + } 1.682 + 1.683 + return true; 1.684 +} 1.685 + 1.686 +void OmxDecoder::ReleaseVideoBuffer() { 1.687 + if (mVideoBuffer) { 1.688 + mVideoBuffer->release(); 1.689 + mVideoBuffer = nullptr; 1.690 + } 1.691 +} 1.692 + 1.693 +void OmxDecoder::ReleaseAudioBuffer() { 1.694 + if (mAudioBuffer) { 1.695 + mAudioBuffer->release(); 1.696 + mAudioBuffer = nullptr; 1.697 + } 1.698 +} 1.699 + 1.700 +void OmxDecoder::PlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) { 1.701 + void *y = aData; 1.702 + void *u = static_cast<uint8_t *>(y) + mVideoStride * mVideoSliceHeight; 1.703 + void *v = static_cast<uint8_t *>(u) + mVideoStride/2 * mVideoSliceHeight/2; 1.704 + 1.705 + aFrame->Set(aTimeUs, aKeyFrame, 1.706 + aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation, 1.707 + y, mVideoStride, mVideoWidth, mVideoHeight, 0, 0, 1.708 + u, mVideoStride/2, mVideoWidth/2, mVideoHeight/2, 0, 0, 1.709 + v, mVideoStride/2, mVideoWidth/2, mVideoHeight/2, 0, 0); 1.710 +} 1.711 + 1.712 +void OmxDecoder::CbYCrYFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) { 1.713 + aFrame->Set(aTimeUs, aKeyFrame, 1.714 + aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation, 1.715 + aData, mVideoStride, mVideoWidth, mVideoHeight, 1, 1, 1.716 + aData, mVideoStride, mVideoWidth/2, mVideoHeight/2, 0, 3, 1.717 + aData, mVideoStride, mVideoWidth/2, mVideoHeight/2, 2, 3); 1.718 +} 1.719 + 1.720 +void OmxDecoder::SemiPlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) { 1.721 + void *y = aData; 1.722 + void *uv = static_cast<uint8_t *>(y) + (mVideoStride * mVideoSliceHeight); 1.723 + 1.724 + aFrame->Set(aTimeUs, aKeyFrame, 1.725 + aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation, 1.726 + y, mVideoStride, mVideoWidth, mVideoHeight, 0, 0, 1.727 + uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 0, 1, 1.728 + uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 1, 1); 1.729 +} 1.730 + 1.731 +void OmxDecoder::SemiPlanarYVU420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) { 1.732 + SemiPlanarYUV420Frame(aFrame, aTimeUs, aData, aSize, aKeyFrame); 1.733 + aFrame->Cb.mOffset = 1; 1.734 + aFrame->Cr.mOffset = 0; 1.735 +} 1.736 + 1.737 +bool OmxDecoder::ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) { 1.738 + const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00; 1.739 + 1.740 + aFrame->mGraphicBuffer = nullptr; 1.741 + 1.742 + switch (mVideoColorFormat) { 1.743 + case OMX_COLOR_FormatYUV420Planar: 1.744 + PlanarYUV420Frame(aFrame, aTimeUs, aData, aSize, aKeyFrame); 1.745 + break; 1.746 + case OMX_COLOR_FormatCbYCrY: 1.747 + CbYCrYFrame(aFrame, aTimeUs, aData, aSize, aKeyFrame); 1.748 + break; 1.749 + case OMX_COLOR_FormatYUV420SemiPlanar: 1.750 + SemiPlanarYUV420Frame(aFrame, aTimeUs, aData, aSize, aKeyFrame); 1.751 + break; 1.752 + case OMX_QCOM_COLOR_FormatYVU420SemiPlanar: 1.753 + SemiPlanarYVU420Frame(aFrame, aTimeUs, aData, aSize, aKeyFrame); 1.754 + break; 1.755 + default: 1.756 + LOG(PR_LOG_DEBUG, "Unknown video color format %08x", mVideoColorFormat); 1.757 + return false; 1.758 + } 1.759 + return true; 1.760 +} 1.761 + 1.762 +bool OmxDecoder::ToAudioFrame(AudioFrame *aFrame, int64_t aTimeUs, void *aData, size_t aDataOffset, size_t aSize, int32_t aAudioChannels, int32_t aAudioSampleRate) 1.763 +{ 1.764 + aFrame->Set(aTimeUs, static_cast<char *>(aData) + aDataOffset, aSize, aAudioChannels, aAudioSampleRate); 1.765 + return true; 1.766 +} 1.767 + 1.768 +bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aTimeUs, 1.769 + bool aKeyframeSkip, bool aDoSeek) 1.770 +{ 1.771 + if (!mVideoSource.get()) 1.772 + return false; 1.773 + 1.774 + ReleaseVideoBuffer(); 1.775 + 1.776 + status_t err; 1.777 + 1.778 + if (aDoSeek) { 1.779 + { 1.780 + Mutex::Autolock autoLock(mSeekLock); 1.781 + mIsVideoSeeking = true; 1.782 + } 1.783 + MediaSource::ReadOptions options; 1.784 + options.setSeekTo(aTimeUs, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC); 1.785 + err = mVideoSource->read(&mVideoBuffer, &options); 1.786 + { 1.787 + Mutex::Autolock autoLock(mSeekLock); 1.788 + mIsVideoSeeking = false; 1.789 + PostReleaseVideoBuffer(nullptr, FenceHandle()); 1.790 + } 1.791 + 1.792 + aDoSeek = false; 1.793 + } else { 1.794 + err = mVideoSource->read(&mVideoBuffer); 1.795 + } 1.796 + 1.797 + aFrame->mSize = 0; 1.798 + 1.799 + if (err == OK) { 1.800 + int64_t timeUs; 1.801 + int32_t unreadable; 1.802 + int32_t keyFrame; 1.803 + 1.804 + if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs) ) { 1.805 + NS_WARNING("OMX decoder did not return frame time"); 1.806 + return false; 1.807 + } 1.808 + 1.809 + if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) { 1.810 + keyFrame = 0; 1.811 + } 1.812 + 1.813 + if (!mVideoBuffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable)) { 1.814 + unreadable = 0; 1.815 + } 1.816 + 1.817 + RefPtr<mozilla::layers::TextureClient> textureClient; 1.818 + if ((mVideoBuffer->graphicBuffer().get())) { 1.819 + textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get()); 1.820 + } 1.821 + 1.822 + if (textureClient) { 1.823 + // Manually increment reference count to keep MediaBuffer alive 1.824 + // during TextureClient is in use. 1.825 + mVideoBuffer->add_ref(); 1.826 + GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get()); 1.827 + grallocClient->SetMediaBuffer(mVideoBuffer); 1.828 + // Set recycle callback for TextureClient 1.829 + textureClient->SetRecycleCallback(OmxDecoder::RecycleCallback, this); 1.830 + 1.831 + aFrame->mGraphicBuffer = textureClient; 1.832 + aFrame->mRotation = mVideoRotation; 1.833 + aFrame->mTimeUs = timeUs; 1.834 + aFrame->mKeyFrame = keyFrame; 1.835 + aFrame->Y.mWidth = mVideoWidth; 1.836 + aFrame->Y.mHeight = mVideoHeight; 1.837 + // Release to hold video buffer in OmxDecoder more. 1.838 + // MediaBuffer's ref count is changed from 2 to 1. 1.839 + ReleaseVideoBuffer(); 1.840 + } else if (mVideoBuffer->range_length() > 0) { 1.841 + char *data = static_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset(); 1.842 + size_t length = mVideoBuffer->range_length(); 1.843 + 1.844 + if (unreadable) { 1.845 + LOG(PR_LOG_DEBUG, "video frame is unreadable"); 1.846 + } 1.847 + 1.848 + if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame)) { 1.849 + return false; 1.850 + } 1.851 + } 1.852 + 1.853 + if (aKeyframeSkip && timeUs < aTimeUs) { 1.854 + aFrame->mShouldSkip = true; 1.855 + } 1.856 + } 1.857 + else if (err == INFO_FORMAT_CHANGED) { 1.858 + // If the format changed, update our cached info. 1.859 + if (!SetVideoFormat()) { 1.860 + return false; 1.861 + } else { 1.862 + return ReadVideo(aFrame, aTimeUs, aKeyframeSkip, aDoSeek); 1.863 + } 1.864 + } 1.865 + else if (err == ERROR_END_OF_STREAM) { 1.866 + return false; 1.867 + } 1.868 + else if (err == -ETIMEDOUT) { 1.869 + LOG(PR_LOG_DEBUG, "OmxDecoder::ReadVideo timed out, will retry"); 1.870 + return true; 1.871 + } 1.872 + else { 1.873 + // UNKNOWN_ERROR is sometimes is used to mean "out of memory", but 1.874 + // regardless, don't keep trying to decode if the decoder doesn't want to. 1.875 + LOG(PR_LOG_DEBUG, "OmxDecoder::ReadVideo failed, err=%d", err); 1.876 + return false; 1.877 + } 1.878 + 1.879 + return true; 1.880 +} 1.881 + 1.882 +bool OmxDecoder::ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs) 1.883 +{ 1.884 + status_t err; 1.885 + 1.886 + if (mAudioMetadataRead && aSeekTimeUs == -1) { 1.887 + // Use the data read into the buffer during metadata time 1.888 + err = OK; 1.889 + } 1.890 + else { 1.891 + ReleaseAudioBuffer(); 1.892 + if (aSeekTimeUs != -1) { 1.893 + MediaSource::ReadOptions options; 1.894 + options.setSeekTo(aSeekTimeUs); 1.895 + err = mAudioSource->read(&mAudioBuffer, &options); 1.896 + } else { 1.897 + err = mAudioSource->read(&mAudioBuffer); 1.898 + } 1.899 + } 1.900 + mAudioMetadataRead = false; 1.901 + 1.902 + aSeekTimeUs = -1; 1.903 + aFrame->mSize = 0; 1.904 + 1.905 + if (err == OK && mAudioBuffer && mAudioBuffer->range_length() != 0) { 1.906 + int64_t timeUs; 1.907 + if (!mAudioBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) 1.908 + return false; 1.909 + 1.910 + return ToAudioFrame(aFrame, timeUs, 1.911 + mAudioBuffer->data(), 1.912 + mAudioBuffer->range_offset(), 1.913 + mAudioBuffer->range_length(), 1.914 + mAudioChannels, mAudioSampleRate); 1.915 + } 1.916 + else if (err == INFO_FORMAT_CHANGED) { 1.917 + // If the format changed, update our cached info. 1.918 + if (!SetAudioFormat()) { 1.919 + return false; 1.920 + } else { 1.921 + return ReadAudio(aFrame, aSeekTimeUs); 1.922 + } 1.923 + } 1.924 + else if (err == ERROR_END_OF_STREAM) { 1.925 + if (aFrame->mSize == 0) { 1.926 + return false; 1.927 + } 1.928 + } 1.929 + else if (err == -ETIMEDOUT) { 1.930 + LOG(PR_LOG_DEBUG, "OmxDecoder::ReadAudio timed out, will retry"); 1.931 + return true; 1.932 + } 1.933 + else if (err != OK) { 1.934 + LOG(PR_LOG_DEBUG, "OmxDecoder::ReadAudio failed, err=%d", err); 1.935 + return false; 1.936 + } 1.937 + 1.938 + return true; 1.939 +} 1.940 + 1.941 +nsresult OmxDecoder::Play() 1.942 +{ 1.943 + if (!mVideoPaused && !mAudioPaused) { 1.944 + return NS_OK; 1.945 + } 1.946 + 1.947 + if (mVideoPaused && mVideoSource.get() && mVideoSource->start() != OK) { 1.948 + return NS_ERROR_UNEXPECTED; 1.949 + } 1.950 + mVideoPaused = false; 1.951 + 1.952 + if (mAudioPaused && mAudioSource.get() && mAudioSource->start() != OK) { 1.953 + return NS_ERROR_UNEXPECTED; 1.954 + } 1.955 + mAudioPaused = false; 1.956 + 1.957 + return NS_OK; 1.958 +} 1.959 + 1.960 +// AOSP didn't give implementation on OMXCodec::Pause() and not define 1.961 +// OMXCodec::Start() should be called for resuming the decoding. Currently 1.962 +// it is customized by a specific open source repository only. 1.963 +// ToDo The one not supported OMXCodec::Pause() should return error code here, 1.964 +// so OMXCodec::Start() doesn't be called again for resuming. But if someone 1.965 +// implement the OMXCodec::Pause() and need a following OMXCodec::Read() with 1.966 +// seek option (define in MediaSource.h) then it is still not supported here. 1.967 +// We need to fix it until it is really happened. 1.968 +void OmxDecoder::Pause() 1.969 +{ 1.970 + /* The implementation of OMXCodec::pause is flawed. 1.971 + * OMXCodec::start will not restore from the paused state and result in 1.972 + * buffer timeout which causes timeouts in mochitests. 1.973 + * Since there is not power consumption problem in emulator, we will just 1.974 + * return when running in emulator to fix timeouts in mochitests. 1.975 + */ 1.976 + if (isInEmulator()) { 1.977 + return; 1.978 + } 1.979 + 1.980 + if (mVideoPaused || mAudioPaused) { 1.981 + return; 1.982 + } 1.983 + 1.984 + if (mVideoSource.get() && mVideoSource->pause() == OK) { 1.985 + mVideoPaused = true; 1.986 + } 1.987 + 1.988 + if (mAudioSource.get() && mAudioSource->pause() == OK) { 1.989 + mAudioPaused = true; 1.990 + } 1.991 +} 1.992 + 1.993 +// Called on ALooper thread. 1.994 +void OmxDecoder::onMessageReceived(const sp<AMessage> &msg) 1.995 +{ 1.996 + switch (msg->what()) { 1.997 + case kNotifyPostReleaseVideoBuffer: 1.998 + { 1.999 + Mutex::Autolock autoLock(mSeekLock); 1.1000 + // Free pending video buffers when OmxDecoder is not seeking video. 1.1001 + // If OmxDecoder is seeking video, the buffers are freed on seek exit. 1.1002 + if (!mIsVideoSeeking) { 1.1003 + ReleaseAllPendingVideoBuffersLocked(); 1.1004 + } 1.1005 + break; 1.1006 + } 1.1007 + 1.1008 + case kNotifyStatusChanged: 1.1009 + { 1.1010 + // Our decode may have acquired the hardware resource that it needs 1.1011 + // to start. Notify the state machine to resume loading metadata. 1.1012 + mDecoder->NotifyWaitingForResourcesStatusChanged(); 1.1013 + break; 1.1014 + } 1.1015 + 1.1016 + default: 1.1017 + TRESPASS(); 1.1018 + break; 1.1019 + } 1.1020 +} 1.1021 + 1.1022 +void OmxDecoder::PostReleaseVideoBuffer(MediaBuffer *aBuffer, const FenceHandle& aReleaseFenceHandle) 1.1023 +{ 1.1024 + { 1.1025 + Mutex::Autolock autoLock(mPendingVideoBuffersLock); 1.1026 + if (aBuffer) { 1.1027 + mPendingVideoBuffers.push(BufferItem(aBuffer, aReleaseFenceHandle)); 1.1028 + } 1.1029 + } 1.1030 + 1.1031 + sp<AMessage> notify = 1.1032 + new AMessage(kNotifyPostReleaseVideoBuffer, mReflector->id()); 1.1033 + // post AMessage to OmxDecoder via ALooper. 1.1034 + notify->post(); 1.1035 +} 1.1036 + 1.1037 +void OmxDecoder::ReleaseAllPendingVideoBuffersLocked() 1.1038 +{ 1.1039 + Vector<BufferItem> releasingVideoBuffers; 1.1040 + { 1.1041 + Mutex::Autolock autoLock(mPendingVideoBuffersLock); 1.1042 + 1.1043 + int size = mPendingVideoBuffers.size(); 1.1044 + for (int i = 0; i < size; i++) { 1.1045 + releasingVideoBuffers.push(mPendingVideoBuffers[i]); 1.1046 + } 1.1047 + mPendingVideoBuffers.clear(); 1.1048 + } 1.1049 + // Free all pending video buffers without holding mPendingVideoBuffersLock. 1.1050 + int size = releasingVideoBuffers.size(); 1.1051 + for (int i = 0; i < size; i++) { 1.1052 + MediaBuffer *buffer; 1.1053 + buffer = releasingVideoBuffers[i].mMediaBuffer; 1.1054 +#if defined(MOZ_WIDGET_GONK) && ANDROID_VERSION >= 17 1.1055 + android::sp<Fence> fence; 1.1056 + int fenceFd = -1; 1.1057 + fence = releasingVideoBuffers[i].mReleaseFenceHandle.mFence; 1.1058 + if (fence.get() && fence->isValid()) { 1.1059 + fenceFd = fence->dup(); 1.1060 + } 1.1061 + MOZ_ASSERT(buffer->refcount() == 1); 1.1062 + // This code expect MediaBuffer's ref count is 1. 1.1063 + // Return gralloc buffer to ANativeWindow 1.1064 + ANativeWindow* window = static_cast<ANativeWindow*>(mNativeWindowClient.get()); 1.1065 + window->cancelBuffer(window, 1.1066 + buffer->graphicBuffer().get(), 1.1067 + fenceFd); 1.1068 + // Mark MediaBuffer as rendered. 1.1069 + // When gralloc buffer is directly returned to ANativeWindow, 1.1070 + // this mark is necesary. 1.1071 + sp<MetaData> metaData = buffer->meta_data(); 1.1072 + metaData->setInt32(kKeyRendered, 1); 1.1073 +#endif 1.1074 + // Return MediaBuffer to OMXCodec. 1.1075 + buffer->release(); 1.1076 + } 1.1077 + releasingVideoBuffers.clear(); 1.1078 +} 1.1079 + 1.1080 +/* static */ void 1.1081 +OmxDecoder::RecycleCallback(TextureClient* aClient, void* aClosure) 1.1082 +{ 1.1083 + OmxDecoder* decoder = static_cast<OmxDecoder*>(aClosure); 1.1084 + GrallocTextureClientOGL* client = static_cast<GrallocTextureClientOGL*>(aClient); 1.1085 + 1.1086 + aClient->ClearRecycleCallback(); 1.1087 + decoder->PostReleaseVideoBuffer(client->GetMediaBuffer(), client->GetReleaseFenceHandle()); 1.1088 +} 1.1089 + 1.1090 +int64_t OmxDecoder::ProcessCachedData(int64_t aOffset, bool aWaitForCompletion) 1.1091 +{ 1.1092 + // We read data in chunks of 32 KiB. We can reduce this 1.1093 + // value if media, such as sdcards, is too slow. 1.1094 + // Because of SD card's slowness, need to keep sReadSize to small size. 1.1095 + // See Bug 914870. 1.1096 + static const int64_t sReadSize = 32 * 1024; 1.1097 + 1.1098 + NS_ASSERTION(!NS_IsMainThread(), "Should not be on main thread."); 1.1099 + 1.1100 + MOZ_ASSERT(mResource); 1.1101 + 1.1102 + int64_t resourceLength = mResource->GetCachedDataEnd(0); 1.1103 + NS_ENSURE_TRUE(resourceLength >= 0, -1); 1.1104 + 1.1105 + if (aOffset >= resourceLength) { 1.1106 + return 0; // Cache is empty, nothing to do 1.1107 + } 1.1108 + 1.1109 + int64_t bufferLength = std::min<int64_t>(resourceLength-aOffset, sReadSize); 1.1110 + 1.1111 + nsAutoArrayPtr<char> buffer(new char[bufferLength]); 1.1112 + 1.1113 + nsresult rv = mResource->ReadFromCache(buffer.get(), aOffset, bufferLength); 1.1114 + NS_ENSURE_SUCCESS(rv, -1); 1.1115 + 1.1116 + nsRefPtr<OmxDecoderNotifyDataArrivedRunnable> runnable( 1.1117 + new OmxDecoderNotifyDataArrivedRunnable(this, 1.1118 + buffer.forget(), 1.1119 + bufferLength, 1.1120 + aOffset, 1.1121 + resourceLength)); 1.1122 + 1.1123 + rv = NS_DispatchToMainThread(runnable.get()); 1.1124 + NS_ENSURE_SUCCESS(rv, -1); 1.1125 + 1.1126 + if (aWaitForCompletion) { 1.1127 + runnable->WaitForCompletion(); 1.1128 + } 1.1129 + 1.1130 + return resourceLength - aOffset - bufferLength; 1.1131 +}