Fri, 16 Jan 2015 04:50:19 +0100
Replace accessor implementation with direct member state manipulation, by
request https://trac.torproject.org/projects/tor/ticket/9701#comment:32
michael@0 | 1 | /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ |
michael@0 | 2 | /* vim:set ts=2 sw=2 sts=2 et cindent: */ |
michael@0 | 3 | /* This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this file, |
michael@0 | 5 | * You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 6 | |
michael@0 | 7 | #include "MediaOmxReader.h" |
michael@0 | 8 | |
michael@0 | 9 | #include "MediaDecoderStateMachine.h" |
michael@0 | 10 | #include "mozilla/TimeStamp.h" |
michael@0 | 11 | #include "mozilla/dom/TimeRanges.h" |
michael@0 | 12 | #include "MediaResource.h" |
michael@0 | 13 | #include "VideoUtils.h" |
michael@0 | 14 | #include "MediaOmxDecoder.h" |
michael@0 | 15 | #include "AbstractMediaDecoder.h" |
michael@0 | 16 | #include "AudioChannelService.h" |
michael@0 | 17 | #include "OmxDecoder.h" |
michael@0 | 18 | #include "MPAPI.h" |
michael@0 | 19 | #include "gfx2DGlue.h" |
michael@0 | 20 | |
michael@0 | 21 | #ifdef MOZ_AUDIO_OFFLOAD |
michael@0 | 22 | #include <stagefright/Utils.h> |
michael@0 | 23 | #include <cutils/properties.h> |
michael@0 | 24 | #include <stagefright/MetaData.h> |
michael@0 | 25 | #endif |
michael@0 | 26 | |
michael@0 | 27 | #define MAX_DROPPED_FRAMES 25 |
michael@0 | 28 | // Try not to spend more than this much time in a single call to DecodeVideoFrame. |
michael@0 | 29 | #define MAX_VIDEO_DECODE_SECONDS 0.1 |
michael@0 | 30 | |
michael@0 | 31 | using namespace mozilla::gfx; |
michael@0 | 32 | using namespace android; |
michael@0 | 33 | |
michael@0 | 34 | namespace mozilla { |
michael@0 | 35 | |
michael@0 | 36 | #ifdef PR_LOGGING |
michael@0 | 37 | extern PRLogModuleInfo* gMediaDecoderLog; |
michael@0 | 38 | #define DECODER_LOG(type, msg) PR_LOG(gMediaDecoderLog, type, msg) |
michael@0 | 39 | #else |
michael@0 | 40 | #define DECODER_LOG(type, msg) |
michael@0 | 41 | #endif |
michael@0 | 42 | |
michael@0 | 43 | MediaOmxReader::MediaOmxReader(AbstractMediaDecoder *aDecoder) |
michael@0 | 44 | : MediaDecoderReader(aDecoder) |
michael@0 | 45 | , mHasVideo(false) |
michael@0 | 46 | , mHasAudio(false) |
michael@0 | 47 | , mVideoSeekTimeUs(-1) |
michael@0 | 48 | , mAudioSeekTimeUs(-1) |
michael@0 | 49 | , mSkipCount(0) |
michael@0 | 50 | #ifdef DEBUG |
michael@0 | 51 | , mIsActive(true) |
michael@0 | 52 | #endif |
michael@0 | 53 | { |
michael@0 | 54 | #ifdef PR_LOGGING |
michael@0 | 55 | if (!gMediaDecoderLog) { |
michael@0 | 56 | gMediaDecoderLog = PR_NewLogModule("MediaDecoder"); |
michael@0 | 57 | } |
michael@0 | 58 | #endif |
michael@0 | 59 | |
michael@0 | 60 | mAudioChannel = dom::AudioChannelService::GetDefaultAudioChannel(); |
michael@0 | 61 | } |
michael@0 | 62 | |
michael@0 | 63 | MediaOmxReader::~MediaOmxReader() |
michael@0 | 64 | { |
michael@0 | 65 | ReleaseMediaResources(); |
michael@0 | 66 | ReleaseDecoder(); |
michael@0 | 67 | mOmxDecoder.clear(); |
michael@0 | 68 | } |
michael@0 | 69 | |
michael@0 | 70 | nsresult MediaOmxReader::Init(MediaDecoderReader* aCloneDonor) |
michael@0 | 71 | { |
michael@0 | 72 | return NS_OK; |
michael@0 | 73 | } |
michael@0 | 74 | |
michael@0 | 75 | bool MediaOmxReader::IsWaitingMediaResources() |
michael@0 | 76 | { |
michael@0 | 77 | if (!mOmxDecoder.get()) { |
michael@0 | 78 | return false; |
michael@0 | 79 | } |
michael@0 | 80 | return mOmxDecoder->IsWaitingMediaResources(); |
michael@0 | 81 | } |
michael@0 | 82 | |
michael@0 | 83 | bool MediaOmxReader::IsDormantNeeded() |
michael@0 | 84 | { |
michael@0 | 85 | if (!mOmxDecoder.get()) { |
michael@0 | 86 | return false; |
michael@0 | 87 | } |
michael@0 | 88 | return mOmxDecoder->IsDormantNeeded(); |
michael@0 | 89 | } |
michael@0 | 90 | |
michael@0 | 91 | void MediaOmxReader::ReleaseMediaResources() |
michael@0 | 92 | { |
michael@0 | 93 | ResetDecode(); |
michael@0 | 94 | // Before freeing a video codec, all video buffers needed to be released |
michael@0 | 95 | // even from graphics pipeline. |
michael@0 | 96 | VideoFrameContainer* container = mDecoder->GetVideoFrameContainer(); |
michael@0 | 97 | if (container) { |
michael@0 | 98 | container->ClearCurrentFrame(); |
michael@0 | 99 | } |
michael@0 | 100 | if (mOmxDecoder.get()) { |
michael@0 | 101 | mOmxDecoder->ReleaseMediaResources(); |
michael@0 | 102 | } |
michael@0 | 103 | } |
michael@0 | 104 | |
michael@0 | 105 | void MediaOmxReader::ReleaseDecoder() |
michael@0 | 106 | { |
michael@0 | 107 | if (mOmxDecoder.get()) { |
michael@0 | 108 | mOmxDecoder->ReleaseDecoder(); |
michael@0 | 109 | } |
michael@0 | 110 | } |
michael@0 | 111 | |
michael@0 | 112 | nsresult MediaOmxReader::InitOmxDecoder() |
michael@0 | 113 | { |
michael@0 | 114 | if (!mOmxDecoder.get()) { |
michael@0 | 115 | //register sniffers, if they are not registered in this process. |
michael@0 | 116 | DataSource::RegisterDefaultSniffers(); |
michael@0 | 117 | mDecoder->GetResource()->SetReadMode(MediaCacheStream::MODE_METADATA); |
michael@0 | 118 | |
michael@0 | 119 | sp<DataSource> dataSource = new MediaStreamSource(mDecoder->GetResource(), mDecoder); |
michael@0 | 120 | dataSource->initCheck(); |
michael@0 | 121 | |
michael@0 | 122 | mExtractor = MediaExtractor::Create(dataSource); |
michael@0 | 123 | if (!mExtractor.get()) { |
michael@0 | 124 | return NS_ERROR_FAILURE; |
michael@0 | 125 | } |
michael@0 | 126 | mOmxDecoder = new OmxDecoder(mDecoder->GetResource(), mDecoder); |
michael@0 | 127 | if (!mOmxDecoder->Init(mExtractor)) { |
michael@0 | 128 | return NS_ERROR_FAILURE; |
michael@0 | 129 | } |
michael@0 | 130 | } |
michael@0 | 131 | return NS_OK; |
michael@0 | 132 | } |
michael@0 | 133 | |
michael@0 | 134 | nsresult MediaOmxReader::ReadMetadata(MediaInfo* aInfo, |
michael@0 | 135 | MetadataTags** aTags) |
michael@0 | 136 | { |
michael@0 | 137 | NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); |
michael@0 | 138 | MOZ_ASSERT(mIsActive); |
michael@0 | 139 | |
michael@0 | 140 | *aTags = nullptr; |
michael@0 | 141 | |
michael@0 | 142 | // Initialize the internal OMX Decoder. |
michael@0 | 143 | nsresult rv = InitOmxDecoder(); |
michael@0 | 144 | if (NS_FAILED(rv)) { |
michael@0 | 145 | return rv; |
michael@0 | 146 | } |
michael@0 | 147 | |
michael@0 | 148 | if (!mOmxDecoder->TryLoad()) { |
michael@0 | 149 | return NS_ERROR_FAILURE; |
michael@0 | 150 | } |
michael@0 | 151 | |
michael@0 | 152 | #ifdef MOZ_AUDIO_OFFLOAD |
michael@0 | 153 | CheckAudioOffload(); |
michael@0 | 154 | #endif |
michael@0 | 155 | |
michael@0 | 156 | if (IsWaitingMediaResources()) { |
michael@0 | 157 | return NS_OK; |
michael@0 | 158 | } |
michael@0 | 159 | |
michael@0 | 160 | // Set the total duration (the max of the audio and video track). |
michael@0 | 161 | int64_t durationUs; |
michael@0 | 162 | mOmxDecoder->GetDuration(&durationUs); |
michael@0 | 163 | if (durationUs) { |
michael@0 | 164 | ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); |
michael@0 | 165 | mDecoder->SetMediaDuration(durationUs); |
michael@0 | 166 | } |
michael@0 | 167 | |
michael@0 | 168 | // Check the MediaExtract flag if the source is seekable. |
michael@0 | 169 | mDecoder->SetMediaSeekable(mExtractor->flags() & MediaExtractor::CAN_SEEK); |
michael@0 | 170 | |
michael@0 | 171 | if (mOmxDecoder->HasVideo()) { |
michael@0 | 172 | int32_t displayWidth, displayHeight, width, height; |
michael@0 | 173 | mOmxDecoder->GetVideoParameters(&displayWidth, &displayHeight, |
michael@0 | 174 | &width, &height); |
michael@0 | 175 | nsIntRect pictureRect(0, 0, width, height); |
michael@0 | 176 | |
michael@0 | 177 | // Validate the container-reported frame and pictureRect sizes. This ensures |
michael@0 | 178 | // that our video frame creation code doesn't overflow. |
michael@0 | 179 | nsIntSize displaySize(displayWidth, displayHeight); |
michael@0 | 180 | nsIntSize frameSize(width, height); |
michael@0 | 181 | if (!IsValidVideoRegion(frameSize, pictureRect, displaySize)) { |
michael@0 | 182 | return NS_ERROR_FAILURE; |
michael@0 | 183 | } |
michael@0 | 184 | |
michael@0 | 185 | // Video track's frame sizes will not overflow. Activate the video track. |
michael@0 | 186 | mHasVideo = mInfo.mVideo.mHasVideo = true; |
michael@0 | 187 | mInfo.mVideo.mDisplay = displaySize; |
michael@0 | 188 | mPicture = pictureRect; |
michael@0 | 189 | mInitialFrame = frameSize; |
michael@0 | 190 | VideoFrameContainer* container = mDecoder->GetVideoFrameContainer(); |
michael@0 | 191 | if (container) { |
michael@0 | 192 | container->SetCurrentFrame(gfxIntSize(displaySize.width, displaySize.height), |
michael@0 | 193 | nullptr, |
michael@0 | 194 | mozilla::TimeStamp::Now()); |
michael@0 | 195 | } |
michael@0 | 196 | } |
michael@0 | 197 | |
michael@0 | 198 | if (mOmxDecoder->HasAudio()) { |
michael@0 | 199 | int32_t numChannels, sampleRate; |
michael@0 | 200 | mOmxDecoder->GetAudioParameters(&numChannels, &sampleRate); |
michael@0 | 201 | mHasAudio = mInfo.mAudio.mHasAudio = true; |
michael@0 | 202 | mInfo.mAudio.mChannels = numChannels; |
michael@0 | 203 | mInfo.mAudio.mRate = sampleRate; |
michael@0 | 204 | } |
michael@0 | 205 | |
michael@0 | 206 | *aInfo = mInfo; |
michael@0 | 207 | |
michael@0 | 208 | return NS_OK; |
michael@0 | 209 | } |
michael@0 | 210 | |
michael@0 | 211 | bool MediaOmxReader::DecodeVideoFrame(bool &aKeyframeSkip, |
michael@0 | 212 | int64_t aTimeThreshold) |
michael@0 | 213 | { |
michael@0 | 214 | MOZ_ASSERT(mIsActive); |
michael@0 | 215 | |
michael@0 | 216 | // Record number of frames decoded and parsed. Automatically update the |
michael@0 | 217 | // stats counters using the AutoNotifyDecoded stack-based class. |
michael@0 | 218 | uint32_t parsed = 0, decoded = 0; |
michael@0 | 219 | AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded); |
michael@0 | 220 | |
michael@0 | 221 | bool doSeek = mVideoSeekTimeUs != -1; |
michael@0 | 222 | if (doSeek) { |
michael@0 | 223 | aTimeThreshold = mVideoSeekTimeUs; |
michael@0 | 224 | } |
michael@0 | 225 | |
michael@0 | 226 | TimeStamp start = TimeStamp::Now(); |
michael@0 | 227 | |
michael@0 | 228 | // Read next frame. Don't let this loop run for too long. |
michael@0 | 229 | while ((TimeStamp::Now() - start) < TimeDuration::FromSeconds(MAX_VIDEO_DECODE_SECONDS)) { |
michael@0 | 230 | MPAPI::VideoFrame frame; |
michael@0 | 231 | frame.mGraphicBuffer = nullptr; |
michael@0 | 232 | frame.mShouldSkip = false; |
michael@0 | 233 | if (!mOmxDecoder->ReadVideo(&frame, aTimeThreshold, aKeyframeSkip, doSeek)) { |
michael@0 | 234 | return false; |
michael@0 | 235 | } |
michael@0 | 236 | doSeek = false; |
michael@0 | 237 | |
michael@0 | 238 | // Ignore empty buffer which stagefright media read will sporadically return |
michael@0 | 239 | if (frame.mSize == 0 && !frame.mGraphicBuffer) { |
michael@0 | 240 | continue; |
michael@0 | 241 | } |
michael@0 | 242 | |
michael@0 | 243 | parsed++; |
michael@0 | 244 | if (frame.mShouldSkip && mSkipCount < MAX_DROPPED_FRAMES) { |
michael@0 | 245 | mSkipCount++; |
michael@0 | 246 | continue; |
michael@0 | 247 | } |
michael@0 | 248 | |
michael@0 | 249 | mSkipCount = 0; |
michael@0 | 250 | |
michael@0 | 251 | mVideoSeekTimeUs = -1; |
michael@0 | 252 | aKeyframeSkip = false; |
michael@0 | 253 | |
michael@0 | 254 | IntRect picture = ToIntRect(mPicture); |
michael@0 | 255 | if (frame.Y.mWidth != mInitialFrame.width || |
michael@0 | 256 | frame.Y.mHeight != mInitialFrame.height) { |
michael@0 | 257 | |
michael@0 | 258 | // Frame size is different from what the container reports. This is legal, |
michael@0 | 259 | // and we will preserve the ratio of the crop rectangle as it |
michael@0 | 260 | // was reported relative to the picture size reported by the container. |
michael@0 | 261 | picture.x = (mPicture.x * frame.Y.mWidth) / mInitialFrame.width; |
michael@0 | 262 | picture.y = (mPicture.y * frame.Y.mHeight) / mInitialFrame.height; |
michael@0 | 263 | picture.width = (frame.Y.mWidth * mPicture.width) / mInitialFrame.width; |
michael@0 | 264 | picture.height = (frame.Y.mHeight * mPicture.height) / mInitialFrame.height; |
michael@0 | 265 | } |
michael@0 | 266 | |
michael@0 | 267 | // This is the approximate byte position in the stream. |
michael@0 | 268 | int64_t pos = mDecoder->GetResource()->Tell(); |
michael@0 | 269 | |
michael@0 | 270 | VideoData *v; |
michael@0 | 271 | if (!frame.mGraphicBuffer) { |
michael@0 | 272 | |
michael@0 | 273 | VideoData::YCbCrBuffer b; |
michael@0 | 274 | b.mPlanes[0].mData = static_cast<uint8_t *>(frame.Y.mData); |
michael@0 | 275 | b.mPlanes[0].mStride = frame.Y.mStride; |
michael@0 | 276 | b.mPlanes[0].mHeight = frame.Y.mHeight; |
michael@0 | 277 | b.mPlanes[0].mWidth = frame.Y.mWidth; |
michael@0 | 278 | b.mPlanes[0].mOffset = frame.Y.mOffset; |
michael@0 | 279 | b.mPlanes[0].mSkip = frame.Y.mSkip; |
michael@0 | 280 | |
michael@0 | 281 | b.mPlanes[1].mData = static_cast<uint8_t *>(frame.Cb.mData); |
michael@0 | 282 | b.mPlanes[1].mStride = frame.Cb.mStride; |
michael@0 | 283 | b.mPlanes[1].mHeight = frame.Cb.mHeight; |
michael@0 | 284 | b.mPlanes[1].mWidth = frame.Cb.mWidth; |
michael@0 | 285 | b.mPlanes[1].mOffset = frame.Cb.mOffset; |
michael@0 | 286 | b.mPlanes[1].mSkip = frame.Cb.mSkip; |
michael@0 | 287 | |
michael@0 | 288 | b.mPlanes[2].mData = static_cast<uint8_t *>(frame.Cr.mData); |
michael@0 | 289 | b.mPlanes[2].mStride = frame.Cr.mStride; |
michael@0 | 290 | b.mPlanes[2].mHeight = frame.Cr.mHeight; |
michael@0 | 291 | b.mPlanes[2].mWidth = frame.Cr.mWidth; |
michael@0 | 292 | b.mPlanes[2].mOffset = frame.Cr.mOffset; |
michael@0 | 293 | b.mPlanes[2].mSkip = frame.Cr.mSkip; |
michael@0 | 294 | |
michael@0 | 295 | v = VideoData::Create(mInfo.mVideo, |
michael@0 | 296 | mDecoder->GetImageContainer(), |
michael@0 | 297 | pos, |
michael@0 | 298 | frame.mTimeUs, |
michael@0 | 299 | 1, // We don't know the duration. |
michael@0 | 300 | b, |
michael@0 | 301 | frame.mKeyFrame, |
michael@0 | 302 | -1, |
michael@0 | 303 | picture); |
michael@0 | 304 | } else { |
michael@0 | 305 | v = VideoData::Create(mInfo.mVideo, |
michael@0 | 306 | mDecoder->GetImageContainer(), |
michael@0 | 307 | pos, |
michael@0 | 308 | frame.mTimeUs, |
michael@0 | 309 | 1, // We don't know the duration. |
michael@0 | 310 | frame.mGraphicBuffer, |
michael@0 | 311 | frame.mKeyFrame, |
michael@0 | 312 | -1, |
michael@0 | 313 | picture); |
michael@0 | 314 | } |
michael@0 | 315 | |
michael@0 | 316 | if (!v) { |
michael@0 | 317 | NS_WARNING("Unable to create VideoData"); |
michael@0 | 318 | return false; |
michael@0 | 319 | } |
michael@0 | 320 | |
michael@0 | 321 | decoded++; |
michael@0 | 322 | NS_ASSERTION(decoded <= parsed, "Expect to decode fewer frames than parsed in MediaPlugin..."); |
michael@0 | 323 | |
michael@0 | 324 | mVideoQueue.Push(v); |
michael@0 | 325 | |
michael@0 | 326 | break; |
michael@0 | 327 | } |
michael@0 | 328 | |
michael@0 | 329 | return true; |
michael@0 | 330 | } |
michael@0 | 331 | |
michael@0 | 332 | void MediaOmxReader::NotifyDataArrived(const char* aBuffer, uint32_t aLength, int64_t aOffset) |
michael@0 | 333 | { |
michael@0 | 334 | android::OmxDecoder *omxDecoder = mOmxDecoder.get(); |
michael@0 | 335 | |
michael@0 | 336 | if (omxDecoder) { |
michael@0 | 337 | omxDecoder->NotifyDataArrived(aBuffer, aLength, aOffset); |
michael@0 | 338 | } |
michael@0 | 339 | } |
michael@0 | 340 | |
michael@0 | 341 | bool MediaOmxReader::DecodeAudioData() |
michael@0 | 342 | { |
michael@0 | 343 | NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); |
michael@0 | 344 | MOZ_ASSERT(mIsActive); |
michael@0 | 345 | |
michael@0 | 346 | // This is the approximate byte position in the stream. |
michael@0 | 347 | int64_t pos = mDecoder->GetResource()->Tell(); |
michael@0 | 348 | |
michael@0 | 349 | // Read next frame |
michael@0 | 350 | MPAPI::AudioFrame source; |
michael@0 | 351 | if (!mOmxDecoder->ReadAudio(&source, mAudioSeekTimeUs)) { |
michael@0 | 352 | return false; |
michael@0 | 353 | } |
michael@0 | 354 | mAudioSeekTimeUs = -1; |
michael@0 | 355 | |
michael@0 | 356 | // Ignore empty buffer which stagefright media read will sporadically return |
michael@0 | 357 | if (source.mSize == 0) { |
michael@0 | 358 | return true; |
michael@0 | 359 | } |
michael@0 | 360 | |
michael@0 | 361 | uint32_t frames = source.mSize / (source.mAudioChannels * |
michael@0 | 362 | sizeof(AudioDataValue)); |
michael@0 | 363 | |
michael@0 | 364 | typedef AudioCompactor::NativeCopy OmxCopy; |
michael@0 | 365 | return mAudioCompactor.Push(pos, |
michael@0 | 366 | source.mTimeUs, |
michael@0 | 367 | source.mAudioSampleRate, |
michael@0 | 368 | frames, |
michael@0 | 369 | source.mAudioChannels, |
michael@0 | 370 | OmxCopy(static_cast<uint8_t *>(source.mData), |
michael@0 | 371 | source.mSize, |
michael@0 | 372 | source.mAudioChannels)); |
michael@0 | 373 | } |
michael@0 | 374 | |
michael@0 | 375 | nsresult MediaOmxReader::Seek(int64_t aTarget, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime) |
michael@0 | 376 | { |
michael@0 | 377 | NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); |
michael@0 | 378 | MOZ_ASSERT(mIsActive); |
michael@0 | 379 | |
michael@0 | 380 | ResetDecode(); |
michael@0 | 381 | VideoFrameContainer* container = mDecoder->GetVideoFrameContainer(); |
michael@0 | 382 | if (container && container->GetImageContainer()) { |
michael@0 | 383 | container->GetImageContainer()->ClearAllImagesExceptFront(); |
michael@0 | 384 | } |
michael@0 | 385 | |
michael@0 | 386 | if (mHasAudio && mHasVideo) { |
michael@0 | 387 | // The OMXDecoder seeks/demuxes audio and video streams separately. So if |
michael@0 | 388 | // we seek both audio and video to aTarget, the audio stream can typically |
michael@0 | 389 | // seek closer to the seek target, since typically every audio block is |
michael@0 | 390 | // a sync point, whereas for video there are only keyframes once every few |
michael@0 | 391 | // seconds. So if we have both audio and video, we must seek the video |
michael@0 | 392 | // stream to the preceeding keyframe first, get the stream time, and then |
michael@0 | 393 | // seek the audio stream to match the video stream's time. Otherwise, the |
michael@0 | 394 | // audio and video streams won't be in sync after the seek. |
michael@0 | 395 | mVideoSeekTimeUs = aTarget; |
michael@0 | 396 | const VideoData* v = DecodeToFirstVideoData(); |
michael@0 | 397 | mAudioSeekTimeUs = v ? v->mTime : aTarget; |
michael@0 | 398 | } else { |
michael@0 | 399 | mAudioSeekTimeUs = mVideoSeekTimeUs = aTarget; |
michael@0 | 400 | } |
michael@0 | 401 | |
michael@0 | 402 | return NS_OK; |
michael@0 | 403 | } |
michael@0 | 404 | |
michael@0 | 405 | static uint64_t BytesToTime(int64_t offset, uint64_t length, uint64_t durationUs) { |
michael@0 | 406 | double perc = double(offset) / double(length); |
michael@0 | 407 | if (perc > 1.0) |
michael@0 | 408 | perc = 1.0; |
michael@0 | 409 | return uint64_t(double(durationUs) * perc); |
michael@0 | 410 | } |
michael@0 | 411 | |
michael@0 | 412 | void MediaOmxReader::SetIdle() { |
michael@0 | 413 | #ifdef DEBUG |
michael@0 | 414 | mIsActive = false; |
michael@0 | 415 | #endif |
michael@0 | 416 | if (!mOmxDecoder.get()) { |
michael@0 | 417 | return; |
michael@0 | 418 | } |
michael@0 | 419 | mOmxDecoder->Pause(); |
michael@0 | 420 | } |
michael@0 | 421 | |
michael@0 | 422 | void MediaOmxReader::SetActive() { |
michael@0 | 423 | #ifdef DEBUG |
michael@0 | 424 | mIsActive = true; |
michael@0 | 425 | #endif |
michael@0 | 426 | if (!mOmxDecoder.get()) { |
michael@0 | 427 | return; |
michael@0 | 428 | } |
michael@0 | 429 | DebugOnly<nsresult> result = mOmxDecoder->Play(); |
michael@0 | 430 | NS_ASSERTION(result == NS_OK, "OmxDecoder should be in play state to continue decoding"); |
michael@0 | 431 | } |
michael@0 | 432 | |
michael@0 | 433 | #ifdef MOZ_AUDIO_OFFLOAD |
michael@0 | 434 | void MediaOmxReader::CheckAudioOffload() |
michael@0 | 435 | { |
michael@0 | 436 | NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); |
michael@0 | 437 | |
michael@0 | 438 | char offloadProp[128]; |
michael@0 | 439 | property_get("audio.offload.disable", offloadProp, "0"); |
michael@0 | 440 | bool offloadDisable = atoi(offloadProp) != 0; |
michael@0 | 441 | if (offloadDisable) { |
michael@0 | 442 | return; |
michael@0 | 443 | } |
michael@0 | 444 | |
michael@0 | 445 | mAudioOffloadTrack = mOmxDecoder->GetAudioOffloadTrack(); |
michael@0 | 446 | sp<MetaData> meta = (mAudioOffloadTrack.get()) ? |
michael@0 | 447 | mAudioOffloadTrack->getFormat() : nullptr; |
michael@0 | 448 | |
michael@0 | 449 | // Supporting audio offload only when there is no video, no streaming |
michael@0 | 450 | bool hasNoVideo = !mOmxDecoder->HasVideo(); |
michael@0 | 451 | bool isNotStreaming |
michael@0 | 452 | = mDecoder->GetResource()->IsDataCachedToEndOfResource(0); |
michael@0 | 453 | |
michael@0 | 454 | // Not much benefit in trying to offload other channel types. Most of them |
michael@0 | 455 | // aren't supported and also duration would be less than a minute |
michael@0 | 456 | bool isTypeMusic = mAudioChannel == dom::AudioChannel::Content; |
michael@0 | 457 | |
michael@0 | 458 | DECODER_LOG(PR_LOG_DEBUG, ("%s meta %p, no video %d, no streaming %d," |
michael@0 | 459 | " channel type %d", __FUNCTION__, meta.get(), hasNoVideo, |
michael@0 | 460 | isNotStreaming, mAudioChannel)); |
michael@0 | 461 | |
michael@0 | 462 | if ((meta.get()) && hasNoVideo && isNotStreaming && isTypeMusic && |
michael@0 | 463 | canOffloadStream(meta, false, false, AUDIO_STREAM_MUSIC)) { |
michael@0 | 464 | DECODER_LOG(PR_LOG_DEBUG, ("Can offload this audio stream")); |
michael@0 | 465 | mDecoder->SetCanOffloadAudio(true); |
michael@0 | 466 | } |
michael@0 | 467 | } |
michael@0 | 468 | #endif |
michael@0 | 469 | |
michael@0 | 470 | } // namespace mozilla |