Fri, 16 Jan 2015 04:50:19 +0100
Replace accessor implementation with direct member state manipulation, by
request https://trac.torproject.org/projects/tor/ticket/9701#comment:32
michael@0 | 1 | /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/ |
michael@0 | 2 | /* This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 3 | * License, v. 2.0. If a copy of the MPL was not distributed with this file, |
michael@0 | 4 | * You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 5 | |
michael@0 | 6 | #include "MediaStreamGraphImpl.h" |
michael@0 | 7 | #include "mozilla/LinkedList.h" |
michael@0 | 8 | #include "mozilla/MathAlgorithms.h" |
michael@0 | 9 | #include "mozilla/unused.h" |
michael@0 | 10 | |
michael@0 | 11 | #include "AudioSegment.h" |
michael@0 | 12 | #include "VideoSegment.h" |
michael@0 | 13 | #include "nsContentUtils.h" |
michael@0 | 14 | #include "nsIAppShell.h" |
michael@0 | 15 | #include "nsIObserver.h" |
michael@0 | 16 | #include "nsPrintfCString.h" |
michael@0 | 17 | #include "nsServiceManagerUtils.h" |
michael@0 | 18 | #include "nsWidgetsCID.h" |
michael@0 | 19 | #include "prerror.h" |
michael@0 | 20 | #include "prlog.h" |
michael@0 | 21 | #include "mozilla/Attributes.h" |
michael@0 | 22 | #include "TrackUnionStream.h" |
michael@0 | 23 | #include "ImageContainer.h" |
michael@0 | 24 | #include "AudioChannelService.h" |
michael@0 | 25 | #include "AudioNodeEngine.h" |
michael@0 | 26 | #include "AudioNodeStream.h" |
michael@0 | 27 | #include "AudioNodeExternalInputStream.h" |
michael@0 | 28 | #include <algorithm> |
michael@0 | 29 | #include "DOMMediaStream.h" |
michael@0 | 30 | #include "GeckoProfiler.h" |
michael@0 | 31 | #include "mozilla/unused.h" |
michael@0 | 32 | #include "speex/speex_resampler.h" |
michael@0 | 33 | #ifdef MOZ_WEBRTC |
michael@0 | 34 | #include "AudioOutputObserver.h" |
michael@0 | 35 | #endif |
michael@0 | 36 | |
michael@0 | 37 | using namespace mozilla::layers; |
michael@0 | 38 | using namespace mozilla::dom; |
michael@0 | 39 | using namespace mozilla::gfx; |
michael@0 | 40 | |
michael@0 | 41 | namespace mozilla { |
michael@0 | 42 | |
michael@0 | 43 | #ifdef PR_LOGGING |
michael@0 | 44 | PRLogModuleInfo* gMediaStreamGraphLog; |
michael@0 | 45 | #define STREAM_LOG(type, msg) PR_LOG(gMediaStreamGraphLog, type, msg) |
michael@0 | 46 | #else |
michael@0 | 47 | #define STREAM_LOG(type, msg) |
michael@0 | 48 | #endif |
michael@0 | 49 | |
michael@0 | 50 | /** |
michael@0 | 51 | * The singleton graph instance. |
michael@0 | 52 | */ |
michael@0 | 53 | static MediaStreamGraphImpl* gGraph; |
michael@0 | 54 | |
michael@0 | 55 | MediaStreamGraphImpl::~MediaStreamGraphImpl() |
michael@0 | 56 | { |
michael@0 | 57 | NS_ASSERTION(IsEmpty(), |
michael@0 | 58 | "All streams should have been destroyed by messages from the main thread"); |
michael@0 | 59 | STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p destroyed", this)); |
michael@0 | 60 | } |
michael@0 | 61 | |
michael@0 | 62 | |
michael@0 | 63 | StreamTime |
michael@0 | 64 | MediaStreamGraphImpl::GetDesiredBufferEnd(MediaStream* aStream) |
michael@0 | 65 | { |
michael@0 | 66 | StreamTime current = mCurrentTime - aStream->mBufferStartTime; |
michael@0 | 67 | // When waking up media decoders, we need a longer safety margin, as it can |
michael@0 | 68 | // take more time to get new samples. A factor of two seem to work. |
michael@0 | 69 | return current + |
michael@0 | 70 | 2 * MillisecondsToMediaTime(std::max(AUDIO_TARGET_MS, VIDEO_TARGET_MS)); |
michael@0 | 71 | } |
michael@0 | 72 | |
michael@0 | 73 | void |
michael@0 | 74 | MediaStreamGraphImpl::FinishStream(MediaStream* aStream) |
michael@0 | 75 | { |
michael@0 | 76 | if (aStream->mFinished) |
michael@0 | 77 | return; |
michael@0 | 78 | STREAM_LOG(PR_LOG_DEBUG, ("MediaStream %p will finish", aStream)); |
michael@0 | 79 | aStream->mFinished = true; |
michael@0 | 80 | aStream->mBuffer.AdvanceKnownTracksTime(STREAM_TIME_MAX); |
michael@0 | 81 | // Force at least one more iteration of the control loop, since we rely |
michael@0 | 82 | // on UpdateCurrentTime to notify our listeners once the stream end |
michael@0 | 83 | // has been reached. |
michael@0 | 84 | EnsureNextIteration(); |
michael@0 | 85 | |
michael@0 | 86 | SetStreamOrderDirty(); |
michael@0 | 87 | } |
michael@0 | 88 | |
michael@0 | 89 | void |
michael@0 | 90 | MediaStreamGraphImpl::AddStream(MediaStream* aStream) |
michael@0 | 91 | { |
michael@0 | 92 | aStream->mBufferStartTime = mCurrentTime; |
michael@0 | 93 | *mStreams.AppendElement() = already_AddRefed<MediaStream>(aStream); |
michael@0 | 94 | STREAM_LOG(PR_LOG_DEBUG, ("Adding media stream %p to the graph", aStream)); |
michael@0 | 95 | |
michael@0 | 96 | SetStreamOrderDirty(); |
michael@0 | 97 | } |
michael@0 | 98 | |
michael@0 | 99 | void |
michael@0 | 100 | MediaStreamGraphImpl::RemoveStream(MediaStream* aStream) |
michael@0 | 101 | { |
michael@0 | 102 | // Remove references in mStreamUpdates before we allow aStream to die. |
michael@0 | 103 | // Pending updates are not needed (since the main thread has already given |
michael@0 | 104 | // up the stream) so we will just drop them. |
michael@0 | 105 | { |
michael@0 | 106 | MonitorAutoLock lock(mMonitor); |
michael@0 | 107 | for (uint32_t i = 0; i < mStreamUpdates.Length(); ++i) { |
michael@0 | 108 | if (mStreamUpdates[i].mStream == aStream) { |
michael@0 | 109 | mStreamUpdates[i].mStream = nullptr; |
michael@0 | 110 | } |
michael@0 | 111 | } |
michael@0 | 112 | } |
michael@0 | 113 | |
michael@0 | 114 | // Ensure that mMixer is updated when necessary. |
michael@0 | 115 | SetStreamOrderDirty(); |
michael@0 | 116 | |
michael@0 | 117 | // This unrefs the stream, probably destroying it |
michael@0 | 118 | mStreams.RemoveElement(aStream); |
michael@0 | 119 | |
michael@0 | 120 | STREAM_LOG(PR_LOG_DEBUG, ("Removing media stream %p from the graph", aStream)); |
michael@0 | 121 | } |
michael@0 | 122 | |
michael@0 | 123 | void |
michael@0 | 124 | MediaStreamGraphImpl::UpdateConsumptionState(SourceMediaStream* aStream) |
michael@0 | 125 | { |
michael@0 | 126 | MediaStreamListener::Consumption state = |
michael@0 | 127 | aStream->mIsConsumed ? MediaStreamListener::CONSUMED |
michael@0 | 128 | : MediaStreamListener::NOT_CONSUMED; |
michael@0 | 129 | if (state != aStream->mLastConsumptionState) { |
michael@0 | 130 | aStream->mLastConsumptionState = state; |
michael@0 | 131 | for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) { |
michael@0 | 132 | MediaStreamListener* l = aStream->mListeners[j]; |
michael@0 | 133 | l->NotifyConsumptionChanged(this, state); |
michael@0 | 134 | } |
michael@0 | 135 | } |
michael@0 | 136 | } |
michael@0 | 137 | |
michael@0 | 138 | void |
michael@0 | 139 | MediaStreamGraphImpl::ExtractPendingInput(SourceMediaStream* aStream, |
michael@0 | 140 | GraphTime aDesiredUpToTime, |
michael@0 | 141 | bool* aEnsureNextIteration) |
michael@0 | 142 | { |
michael@0 | 143 | bool finished; |
michael@0 | 144 | { |
michael@0 | 145 | MutexAutoLock lock(aStream->mMutex); |
michael@0 | 146 | if (aStream->mPullEnabled && !aStream->mFinished && |
michael@0 | 147 | !aStream->mListeners.IsEmpty()) { |
michael@0 | 148 | // Compute how much stream time we'll need assuming we don't block |
michael@0 | 149 | // the stream at all between mBlockingDecisionsMadeUntilTime and |
michael@0 | 150 | // aDesiredUpToTime. |
michael@0 | 151 | StreamTime t = |
michael@0 | 152 | GraphTimeToStreamTime(aStream, mStateComputedTime) + |
michael@0 | 153 | (aDesiredUpToTime - mStateComputedTime); |
michael@0 | 154 | STREAM_LOG(PR_LOG_DEBUG+1, ("Calling NotifyPull aStream=%p t=%f current end=%f", aStream, |
michael@0 | 155 | MediaTimeToSeconds(t), |
michael@0 | 156 | MediaTimeToSeconds(aStream->mBuffer.GetEnd()))); |
michael@0 | 157 | if (t > aStream->mBuffer.GetEnd()) { |
michael@0 | 158 | *aEnsureNextIteration = true; |
michael@0 | 159 | #ifdef DEBUG |
michael@0 | 160 | if (aStream->mListeners.Length() == 0) { |
michael@0 | 161 | STREAM_LOG(PR_LOG_ERROR, ("No listeners in NotifyPull aStream=%p desired=%f current end=%f", |
michael@0 | 162 | aStream, MediaTimeToSeconds(t), |
michael@0 | 163 | MediaTimeToSeconds(aStream->mBuffer.GetEnd()))); |
michael@0 | 164 | aStream->DumpTrackInfo(); |
michael@0 | 165 | } |
michael@0 | 166 | #endif |
michael@0 | 167 | for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) { |
michael@0 | 168 | MediaStreamListener* l = aStream->mListeners[j]; |
michael@0 | 169 | { |
michael@0 | 170 | MutexAutoUnlock unlock(aStream->mMutex); |
michael@0 | 171 | l->NotifyPull(this, t); |
michael@0 | 172 | } |
michael@0 | 173 | } |
michael@0 | 174 | } |
michael@0 | 175 | } |
michael@0 | 176 | finished = aStream->mUpdateFinished; |
michael@0 | 177 | for (int32_t i = aStream->mUpdateTracks.Length() - 1; i >= 0; --i) { |
michael@0 | 178 | SourceMediaStream::TrackData* data = &aStream->mUpdateTracks[i]; |
michael@0 | 179 | aStream->ApplyTrackDisabling(data->mID, data->mData); |
michael@0 | 180 | for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) { |
michael@0 | 181 | MediaStreamListener* l = aStream->mListeners[j]; |
michael@0 | 182 | TrackTicks offset = (data->mCommands & SourceMediaStream::TRACK_CREATE) |
michael@0 | 183 | ? data->mStart : aStream->mBuffer.FindTrack(data->mID)->GetSegment()->GetDuration(); |
michael@0 | 184 | l->NotifyQueuedTrackChanges(this, data->mID, data->mOutputRate, |
michael@0 | 185 | offset, data->mCommands, *data->mData); |
michael@0 | 186 | } |
michael@0 | 187 | if (data->mCommands & SourceMediaStream::TRACK_CREATE) { |
michael@0 | 188 | MediaSegment* segment = data->mData.forget(); |
michael@0 | 189 | STREAM_LOG(PR_LOG_DEBUG, ("SourceMediaStream %p creating track %d, rate %d, start %lld, initial end %lld", |
michael@0 | 190 | aStream, data->mID, data->mOutputRate, int64_t(data->mStart), |
michael@0 | 191 | int64_t(segment->GetDuration()))); |
michael@0 | 192 | |
michael@0 | 193 | aStream->mBuffer.AddTrack(data->mID, data->mOutputRate, data->mStart, segment); |
michael@0 | 194 | // The track has taken ownership of data->mData, so let's replace |
michael@0 | 195 | // data->mData with an empty clone. |
michael@0 | 196 | data->mData = segment->CreateEmptyClone(); |
michael@0 | 197 | data->mCommands &= ~SourceMediaStream::TRACK_CREATE; |
michael@0 | 198 | } else if (data->mData->GetDuration() > 0) { |
michael@0 | 199 | MediaSegment* dest = aStream->mBuffer.FindTrack(data->mID)->GetSegment(); |
michael@0 | 200 | STREAM_LOG(PR_LOG_DEBUG+1, ("SourceMediaStream %p track %d, advancing end from %lld to %lld", |
michael@0 | 201 | aStream, data->mID, |
michael@0 | 202 | int64_t(dest->GetDuration()), |
michael@0 | 203 | int64_t(dest->GetDuration() + data->mData->GetDuration()))); |
michael@0 | 204 | dest->AppendFrom(data->mData); |
michael@0 | 205 | } |
michael@0 | 206 | if (data->mCommands & SourceMediaStream::TRACK_END) { |
michael@0 | 207 | aStream->mBuffer.FindTrack(data->mID)->SetEnded(); |
michael@0 | 208 | aStream->mUpdateTracks.RemoveElementAt(i); |
michael@0 | 209 | } |
michael@0 | 210 | } |
michael@0 | 211 | if (!aStream->mFinished) { |
michael@0 | 212 | aStream->mBuffer.AdvanceKnownTracksTime(aStream->mUpdateKnownTracksTime); |
michael@0 | 213 | } |
michael@0 | 214 | } |
michael@0 | 215 | if (aStream->mBuffer.GetEnd() > 0) { |
michael@0 | 216 | aStream->mHasCurrentData = true; |
michael@0 | 217 | } |
michael@0 | 218 | if (finished) { |
michael@0 | 219 | FinishStream(aStream); |
michael@0 | 220 | } |
michael@0 | 221 | } |
michael@0 | 222 | |
michael@0 | 223 | void |
michael@0 | 224 | MediaStreamGraphImpl::UpdateBufferSufficiencyState(SourceMediaStream* aStream) |
michael@0 | 225 | { |
michael@0 | 226 | StreamTime desiredEnd = GetDesiredBufferEnd(aStream); |
michael@0 | 227 | nsTArray<SourceMediaStream::ThreadAndRunnable> runnables; |
michael@0 | 228 | |
michael@0 | 229 | { |
michael@0 | 230 | MutexAutoLock lock(aStream->mMutex); |
michael@0 | 231 | for (uint32_t i = 0; i < aStream->mUpdateTracks.Length(); ++i) { |
michael@0 | 232 | SourceMediaStream::TrackData* data = &aStream->mUpdateTracks[i]; |
michael@0 | 233 | if (data->mCommands & SourceMediaStream::TRACK_CREATE) { |
michael@0 | 234 | // This track hasn't been created yet, so we have no sufficiency |
michael@0 | 235 | // data. The track will be created in the next iteration of the |
michael@0 | 236 | // control loop and then we'll fire insufficiency notifications |
michael@0 | 237 | // if necessary. |
michael@0 | 238 | continue; |
michael@0 | 239 | } |
michael@0 | 240 | if (data->mCommands & SourceMediaStream::TRACK_END) { |
michael@0 | 241 | // This track will end, so no point in firing not-enough-data |
michael@0 | 242 | // callbacks. |
michael@0 | 243 | continue; |
michael@0 | 244 | } |
michael@0 | 245 | StreamBuffer::Track* track = aStream->mBuffer.FindTrack(data->mID); |
michael@0 | 246 | // Note that track->IsEnded() must be false, otherwise we would have |
michael@0 | 247 | // removed the track from mUpdateTracks already. |
michael@0 | 248 | NS_ASSERTION(!track->IsEnded(), "What is this track doing here?"); |
michael@0 | 249 | data->mHaveEnough = track->GetEndTimeRoundDown() >= desiredEnd; |
michael@0 | 250 | if (!data->mHaveEnough) { |
michael@0 | 251 | runnables.MoveElementsFrom(data->mDispatchWhenNotEnough); |
michael@0 | 252 | } |
michael@0 | 253 | } |
michael@0 | 254 | } |
michael@0 | 255 | |
michael@0 | 256 | for (uint32_t i = 0; i < runnables.Length(); ++i) { |
michael@0 | 257 | runnables[i].mTarget->Dispatch(runnables[i].mRunnable, 0); |
michael@0 | 258 | } |
michael@0 | 259 | } |
michael@0 | 260 | |
michael@0 | 261 | StreamTime |
michael@0 | 262 | MediaStreamGraphImpl::GraphTimeToStreamTime(MediaStream* aStream, |
michael@0 | 263 | GraphTime aTime) |
michael@0 | 264 | { |
michael@0 | 265 | NS_ASSERTION(aTime <= mStateComputedTime, |
michael@0 | 266 | "Don't ask about times where we haven't made blocking decisions yet"); |
michael@0 | 267 | if (aTime <= mCurrentTime) { |
michael@0 | 268 | return std::max<StreamTime>(0, aTime - aStream->mBufferStartTime); |
michael@0 | 269 | } |
michael@0 | 270 | GraphTime t = mCurrentTime; |
michael@0 | 271 | StreamTime s = t - aStream->mBufferStartTime; |
michael@0 | 272 | while (t < aTime) { |
michael@0 | 273 | GraphTime end; |
michael@0 | 274 | if (!aStream->mBlocked.GetAt(t, &end)) { |
michael@0 | 275 | s += std::min(aTime, end) - t; |
michael@0 | 276 | } |
michael@0 | 277 | t = end; |
michael@0 | 278 | } |
michael@0 | 279 | return std::max<StreamTime>(0, s); |
michael@0 | 280 | } |
michael@0 | 281 | |
michael@0 | 282 | StreamTime |
michael@0 | 283 | MediaStreamGraphImpl::GraphTimeToStreamTimeOptimistic(MediaStream* aStream, |
michael@0 | 284 | GraphTime aTime) |
michael@0 | 285 | { |
michael@0 | 286 | GraphTime computedUpToTime = std::min(mStateComputedTime, aTime); |
michael@0 | 287 | StreamTime s = GraphTimeToStreamTime(aStream, computedUpToTime); |
michael@0 | 288 | return s + (aTime - computedUpToTime); |
michael@0 | 289 | } |
michael@0 | 290 | |
michael@0 | 291 | GraphTime |
michael@0 | 292 | MediaStreamGraphImpl::StreamTimeToGraphTime(MediaStream* aStream, |
michael@0 | 293 | StreamTime aTime, uint32_t aFlags) |
michael@0 | 294 | { |
michael@0 | 295 | if (aTime >= STREAM_TIME_MAX) { |
michael@0 | 296 | return GRAPH_TIME_MAX; |
michael@0 | 297 | } |
michael@0 | 298 | MediaTime bufferElapsedToCurrentTime = mCurrentTime - aStream->mBufferStartTime; |
michael@0 | 299 | if (aTime < bufferElapsedToCurrentTime || |
michael@0 | 300 | (aTime == bufferElapsedToCurrentTime && !(aFlags & INCLUDE_TRAILING_BLOCKED_INTERVAL))) { |
michael@0 | 301 | return aTime + aStream->mBufferStartTime; |
michael@0 | 302 | } |
michael@0 | 303 | |
michael@0 | 304 | MediaTime streamAmount = aTime - bufferElapsedToCurrentTime; |
michael@0 | 305 | NS_ASSERTION(streamAmount >= 0, "Can't answer queries before current time"); |
michael@0 | 306 | |
michael@0 | 307 | GraphTime t = mCurrentTime; |
michael@0 | 308 | while (t < GRAPH_TIME_MAX) { |
michael@0 | 309 | if (!(aFlags & INCLUDE_TRAILING_BLOCKED_INTERVAL) && streamAmount == 0) { |
michael@0 | 310 | return t; |
michael@0 | 311 | } |
michael@0 | 312 | bool blocked; |
michael@0 | 313 | GraphTime end; |
michael@0 | 314 | if (t < mStateComputedTime) { |
michael@0 | 315 | blocked = aStream->mBlocked.GetAt(t, &end); |
michael@0 | 316 | end = std::min(end, mStateComputedTime); |
michael@0 | 317 | } else { |
michael@0 | 318 | blocked = false; |
michael@0 | 319 | end = GRAPH_TIME_MAX; |
michael@0 | 320 | } |
michael@0 | 321 | if (blocked) { |
michael@0 | 322 | t = end; |
michael@0 | 323 | } else { |
michael@0 | 324 | if (streamAmount == 0) { |
michael@0 | 325 | // No more stream time to consume at time t, so we're done. |
michael@0 | 326 | break; |
michael@0 | 327 | } |
michael@0 | 328 | MediaTime consume = std::min(end - t, streamAmount); |
michael@0 | 329 | streamAmount -= consume; |
michael@0 | 330 | t += consume; |
michael@0 | 331 | } |
michael@0 | 332 | } |
michael@0 | 333 | return t; |
michael@0 | 334 | } |
michael@0 | 335 | |
michael@0 | 336 | GraphTime |
michael@0 | 337 | MediaStreamGraphImpl::GetAudioPosition(MediaStream* aStream) |
michael@0 | 338 | { |
michael@0 | 339 | if (aStream->mAudioOutputStreams.IsEmpty()) { |
michael@0 | 340 | return mCurrentTime; |
michael@0 | 341 | } |
michael@0 | 342 | int64_t positionInFrames = aStream->mAudioOutputStreams[0].mStream->GetPositionInFrames(); |
michael@0 | 343 | if (positionInFrames < 0) { |
michael@0 | 344 | return mCurrentTime; |
michael@0 | 345 | } |
michael@0 | 346 | return aStream->mAudioOutputStreams[0].mAudioPlaybackStartTime + |
michael@0 | 347 | TicksToTimeRoundDown(mSampleRate, |
michael@0 | 348 | positionInFrames); |
michael@0 | 349 | } |
michael@0 | 350 | |
michael@0 | 351 | void |
michael@0 | 352 | MediaStreamGraphImpl::UpdateCurrentTime() |
michael@0 | 353 | { |
michael@0 | 354 | GraphTime prevCurrentTime, nextCurrentTime; |
michael@0 | 355 | if (mRealtime) { |
michael@0 | 356 | TimeStamp now = TimeStamp::Now(); |
michael@0 | 357 | prevCurrentTime = mCurrentTime; |
michael@0 | 358 | nextCurrentTime = |
michael@0 | 359 | SecondsToMediaTime((now - mCurrentTimeStamp).ToSeconds()) + mCurrentTime; |
michael@0 | 360 | |
michael@0 | 361 | mCurrentTimeStamp = now; |
michael@0 | 362 | STREAM_LOG(PR_LOG_DEBUG+1, ("Updating current time to %f (real %f, mStateComputedTime %f)", |
michael@0 | 363 | MediaTimeToSeconds(nextCurrentTime), |
michael@0 | 364 | (now - mInitialTimeStamp).ToSeconds(), |
michael@0 | 365 | MediaTimeToSeconds(mStateComputedTime))); |
michael@0 | 366 | } else { |
michael@0 | 367 | prevCurrentTime = mCurrentTime; |
michael@0 | 368 | nextCurrentTime = mCurrentTime + MillisecondsToMediaTime(MEDIA_GRAPH_TARGET_PERIOD_MS); |
michael@0 | 369 | STREAM_LOG(PR_LOG_DEBUG+1, ("Updating offline current time to %f (mStateComputedTime %f)", |
michael@0 | 370 | MediaTimeToSeconds(nextCurrentTime), |
michael@0 | 371 | MediaTimeToSeconds(mStateComputedTime))); |
michael@0 | 372 | } |
michael@0 | 373 | |
michael@0 | 374 | if (mStateComputedTime < nextCurrentTime) { |
michael@0 | 375 | STREAM_LOG(PR_LOG_WARNING, ("Media graph global underrun detected")); |
michael@0 | 376 | nextCurrentTime = mStateComputedTime; |
michael@0 | 377 | } |
michael@0 | 378 | |
michael@0 | 379 | if (prevCurrentTime >= nextCurrentTime) { |
michael@0 | 380 | NS_ASSERTION(prevCurrentTime == nextCurrentTime, "Time can't go backwards!"); |
michael@0 | 381 | // This could happen due to low clock resolution, maybe? |
michael@0 | 382 | STREAM_LOG(PR_LOG_DEBUG, ("Time did not advance")); |
michael@0 | 383 | // There's not much left to do here, but the code below that notifies |
michael@0 | 384 | // listeners that streams have ended still needs to run. |
michael@0 | 385 | } |
michael@0 | 386 | |
michael@0 | 387 | nsTArray<MediaStream*> streamsReadyToFinish; |
michael@0 | 388 | nsAutoTArray<bool,800> streamHasOutput; |
michael@0 | 389 | streamHasOutput.SetLength(mStreams.Length()); |
michael@0 | 390 | for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
michael@0 | 391 | MediaStream* stream = mStreams[i]; |
michael@0 | 392 | |
michael@0 | 393 | // Calculate blocked time and fire Blocked/Unblocked events |
michael@0 | 394 | GraphTime blockedTime = 0; |
michael@0 | 395 | GraphTime t = prevCurrentTime; |
michael@0 | 396 | // include |nextCurrentTime| to ensure NotifyBlockingChanged() is called |
michael@0 | 397 | // before NotifyFinished() when |nextCurrentTime == stream end time| |
michael@0 | 398 | while (t <= nextCurrentTime) { |
michael@0 | 399 | GraphTime end; |
michael@0 | 400 | bool blocked = stream->mBlocked.GetAt(t, &end); |
michael@0 | 401 | if (blocked) { |
michael@0 | 402 | blockedTime += std::min(end, nextCurrentTime) - t; |
michael@0 | 403 | } |
michael@0 | 404 | if (blocked != stream->mNotifiedBlocked) { |
michael@0 | 405 | for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) { |
michael@0 | 406 | MediaStreamListener* l = stream->mListeners[j]; |
michael@0 | 407 | l->NotifyBlockingChanged(this, |
michael@0 | 408 | blocked ? MediaStreamListener::BLOCKED : MediaStreamListener::UNBLOCKED); |
michael@0 | 409 | } |
michael@0 | 410 | stream->mNotifiedBlocked = blocked; |
michael@0 | 411 | } |
michael@0 | 412 | t = end; |
michael@0 | 413 | } |
michael@0 | 414 | |
michael@0 | 415 | stream->AdvanceTimeVaryingValuesToCurrentTime(nextCurrentTime, blockedTime); |
michael@0 | 416 | // Advance mBlocked last so that implementations of |
michael@0 | 417 | // AdvanceTimeVaryingValuesToCurrentTime can rely on the value of mBlocked. |
michael@0 | 418 | stream->mBlocked.AdvanceCurrentTime(nextCurrentTime); |
michael@0 | 419 | |
michael@0 | 420 | streamHasOutput[i] = blockedTime < nextCurrentTime - prevCurrentTime; |
michael@0 | 421 | // Make this an assertion when bug 957832 is fixed. |
michael@0 | 422 | NS_WARN_IF_FALSE(!streamHasOutput[i] || !stream->mNotifiedFinished, |
michael@0 | 423 | "Shouldn't have already notified of finish *and* have output!"); |
michael@0 | 424 | |
michael@0 | 425 | if (stream->mFinished && !stream->mNotifiedFinished) { |
michael@0 | 426 | streamsReadyToFinish.AppendElement(stream); |
michael@0 | 427 | } |
michael@0 | 428 | STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p bufferStartTime=%f blockedTime=%f", |
michael@0 | 429 | stream, MediaTimeToSeconds(stream->mBufferStartTime), |
michael@0 | 430 | MediaTimeToSeconds(blockedTime))); |
michael@0 | 431 | } |
michael@0 | 432 | |
michael@0 | 433 | mCurrentTime = nextCurrentTime; |
michael@0 | 434 | |
michael@0 | 435 | // Do these after setting mCurrentTime so that StreamTimeToGraphTime works properly. |
michael@0 | 436 | for (uint32_t i = 0; i < streamHasOutput.Length(); ++i) { |
michael@0 | 437 | if (!streamHasOutput[i]) { |
michael@0 | 438 | continue; |
michael@0 | 439 | } |
michael@0 | 440 | MediaStream* stream = mStreams[i]; |
michael@0 | 441 | for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) { |
michael@0 | 442 | MediaStreamListener* l = stream->mListeners[j]; |
michael@0 | 443 | l->NotifyOutput(this, mCurrentTime); |
michael@0 | 444 | } |
michael@0 | 445 | } |
michael@0 | 446 | |
michael@0 | 447 | for (uint32_t i = 0; i < streamsReadyToFinish.Length(); ++i) { |
michael@0 | 448 | MediaStream* stream = streamsReadyToFinish[i]; |
michael@0 | 449 | // The stream is fully finished when all of its track data has been played |
michael@0 | 450 | // out. |
michael@0 | 451 | if (mCurrentTime >= |
michael@0 | 452 | stream->StreamTimeToGraphTime(stream->GetStreamBuffer().GetAllTracksEnd())) { |
michael@0 | 453 | NS_WARN_IF_FALSE(stream->mNotifiedBlocked, |
michael@0 | 454 | "Should've notified blocked=true for a fully finished stream"); |
michael@0 | 455 | stream->mNotifiedFinished = true; |
michael@0 | 456 | stream->mLastPlayedVideoFrame.SetNull(); |
michael@0 | 457 | SetStreamOrderDirty(); |
michael@0 | 458 | for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) { |
michael@0 | 459 | MediaStreamListener* l = stream->mListeners[j]; |
michael@0 | 460 | l->NotifyFinished(this); |
michael@0 | 461 | } |
michael@0 | 462 | } |
michael@0 | 463 | } |
michael@0 | 464 | } |
michael@0 | 465 | |
michael@0 | 466 | bool |
michael@0 | 467 | MediaStreamGraphImpl::WillUnderrun(MediaStream* aStream, GraphTime aTime, |
michael@0 | 468 | GraphTime aEndBlockingDecisions, GraphTime* aEnd) |
michael@0 | 469 | { |
michael@0 | 470 | // Finished streams can't underrun. ProcessedMediaStreams also can't cause |
michael@0 | 471 | // underrun currently, since we'll always be able to produce data for them |
michael@0 | 472 | // unless they block on some other stream. |
michael@0 | 473 | if (aStream->mFinished || aStream->AsProcessedStream()) { |
michael@0 | 474 | return false; |
michael@0 | 475 | } |
michael@0 | 476 | GraphTime bufferEnd = |
michael@0 | 477 | StreamTimeToGraphTime(aStream, aStream->GetBufferEnd(), |
michael@0 | 478 | INCLUDE_TRAILING_BLOCKED_INTERVAL); |
michael@0 | 479 | #ifdef DEBUG |
michael@0 | 480 | if (bufferEnd < mCurrentTime) { |
michael@0 | 481 | STREAM_LOG(PR_LOG_ERROR, ("MediaStream %p underrun, " |
michael@0 | 482 | "bufferEnd %f < mCurrentTime %f (%lld < %lld), Streamtime %lld", |
michael@0 | 483 | aStream, MediaTimeToSeconds(bufferEnd), MediaTimeToSeconds(mCurrentTime), |
michael@0 | 484 | bufferEnd, mCurrentTime, aStream->GetBufferEnd())); |
michael@0 | 485 | aStream->DumpTrackInfo(); |
michael@0 | 486 | NS_ASSERTION(bufferEnd >= mCurrentTime, "Buffer underran"); |
michael@0 | 487 | } |
michael@0 | 488 | #endif |
michael@0 | 489 | // We should block after bufferEnd. |
michael@0 | 490 | if (bufferEnd <= aTime) { |
michael@0 | 491 | STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p will block due to data underrun, " |
michael@0 | 492 | "bufferEnd %f", |
michael@0 | 493 | aStream, MediaTimeToSeconds(bufferEnd))); |
michael@0 | 494 | return true; |
michael@0 | 495 | } |
michael@0 | 496 | // We should keep blocking if we're currently blocked and we don't have |
michael@0 | 497 | // data all the way through to aEndBlockingDecisions. If we don't have |
michael@0 | 498 | // data all the way through to aEndBlockingDecisions, we'll block soon, |
michael@0 | 499 | // but we might as well remain unblocked and play the data we've got while |
michael@0 | 500 | // we can. |
michael@0 | 501 | if (bufferEnd <= aEndBlockingDecisions && aStream->mBlocked.GetBefore(aTime)) { |
michael@0 | 502 | STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p will block due to speculative data underrun, " |
michael@0 | 503 | "bufferEnd %f", |
michael@0 | 504 | aStream, MediaTimeToSeconds(bufferEnd))); |
michael@0 | 505 | return true; |
michael@0 | 506 | } |
michael@0 | 507 | // Reconsider decisions at bufferEnd |
michael@0 | 508 | *aEnd = std::min(*aEnd, bufferEnd); |
michael@0 | 509 | return false; |
michael@0 | 510 | } |
michael@0 | 511 | |
michael@0 | 512 | void |
michael@0 | 513 | MediaStreamGraphImpl::MarkConsumed(MediaStream* aStream) |
michael@0 | 514 | { |
michael@0 | 515 | if (aStream->mIsConsumed) { |
michael@0 | 516 | return; |
michael@0 | 517 | } |
michael@0 | 518 | aStream->mIsConsumed = true; |
michael@0 | 519 | |
michael@0 | 520 | ProcessedMediaStream* ps = aStream->AsProcessedStream(); |
michael@0 | 521 | if (!ps) { |
michael@0 | 522 | return; |
michael@0 | 523 | } |
michael@0 | 524 | // Mark all the inputs to this stream as consumed |
michael@0 | 525 | for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) { |
michael@0 | 526 | MarkConsumed(ps->mInputs[i]->mSource); |
michael@0 | 527 | } |
michael@0 | 528 | } |
michael@0 | 529 | |
michael@0 | 530 | void |
michael@0 | 531 | MediaStreamGraphImpl::UpdateStreamOrderForStream(mozilla::LinkedList<MediaStream>* aStack, |
michael@0 | 532 | already_AddRefed<MediaStream> aStream) |
michael@0 | 533 | { |
michael@0 | 534 | nsRefPtr<MediaStream> stream = aStream; |
michael@0 | 535 | NS_ASSERTION(!stream->mHasBeenOrdered, "stream should not have already been ordered"); |
michael@0 | 536 | if (stream->mIsOnOrderingStack) { |
michael@0 | 537 | MediaStream* iter = aStack->getLast(); |
michael@0 | 538 | AudioNodeStream* ns = stream->AsAudioNodeStream(); |
michael@0 | 539 | bool delayNodePresent = ns ? ns->Engine()->AsDelayNodeEngine() != nullptr : false; |
michael@0 | 540 | bool cycleFound = false; |
michael@0 | 541 | if (iter) { |
michael@0 | 542 | do { |
michael@0 | 543 | cycleFound = true; |
michael@0 | 544 | iter->AsProcessedStream()->mInCycle = true; |
michael@0 | 545 | AudioNodeStream* ns = iter->AsAudioNodeStream(); |
michael@0 | 546 | if (ns && ns->Engine()->AsDelayNodeEngine()) { |
michael@0 | 547 | delayNodePresent = true; |
michael@0 | 548 | } |
michael@0 | 549 | iter = iter->getPrevious(); |
michael@0 | 550 | } while (iter && iter != stream); |
michael@0 | 551 | } |
michael@0 | 552 | if (cycleFound && !delayNodePresent) { |
michael@0 | 553 | // If we have detected a cycle, the previous loop should exit with stream |
michael@0 | 554 | // == iter, or the node is connected to itself. Go back in the cycle and |
michael@0 | 555 | // mute all nodes we find, or just mute the node itself. |
michael@0 | 556 | if (!iter) { |
michael@0 | 557 | // The node is connected to itself. |
michael@0 | 558 | // There can't be a non-AudioNodeStream here, because only AudioNodes |
michael@0 | 559 | // can be self-connected. |
michael@0 | 560 | iter = aStack->getLast(); |
michael@0 | 561 | MOZ_ASSERT(iter->AsAudioNodeStream()); |
michael@0 | 562 | iter->AsAudioNodeStream()->Mute(); |
michael@0 | 563 | } else { |
michael@0 | 564 | MOZ_ASSERT(iter); |
michael@0 | 565 | do { |
michael@0 | 566 | AudioNodeStream* nodeStream = iter->AsAudioNodeStream(); |
michael@0 | 567 | if (nodeStream) { |
michael@0 | 568 | nodeStream->Mute(); |
michael@0 | 569 | } |
michael@0 | 570 | } while((iter = iter->getNext())); |
michael@0 | 571 | } |
michael@0 | 572 | } |
michael@0 | 573 | return; |
michael@0 | 574 | } |
michael@0 | 575 | ProcessedMediaStream* ps = stream->AsProcessedStream(); |
michael@0 | 576 | if (ps) { |
michael@0 | 577 | aStack->insertBack(stream); |
michael@0 | 578 | stream->mIsOnOrderingStack = true; |
michael@0 | 579 | for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) { |
michael@0 | 580 | MediaStream* source = ps->mInputs[i]->mSource; |
michael@0 | 581 | if (!source->mHasBeenOrdered) { |
michael@0 | 582 | nsRefPtr<MediaStream> s = source; |
michael@0 | 583 | UpdateStreamOrderForStream(aStack, s.forget()); |
michael@0 | 584 | } |
michael@0 | 585 | } |
michael@0 | 586 | aStack->popLast(); |
michael@0 | 587 | stream->mIsOnOrderingStack = false; |
michael@0 | 588 | } |
michael@0 | 589 | |
michael@0 | 590 | stream->mHasBeenOrdered = true; |
michael@0 | 591 | *mStreams.AppendElement() = stream.forget(); |
michael@0 | 592 | } |
michael@0 | 593 | |
michael@0 | 594 | static void AudioMixerCallback(AudioDataValue* aMixedBuffer, |
michael@0 | 595 | AudioSampleFormat aFormat, |
michael@0 | 596 | uint32_t aChannels, |
michael@0 | 597 | uint32_t aFrames, |
michael@0 | 598 | uint32_t aSampleRate) |
michael@0 | 599 | { |
michael@0 | 600 | // Need an api to register mixer callbacks, bug 989921 |
michael@0 | 601 | #ifdef MOZ_WEBRTC |
michael@0 | 602 | if (aFrames > 0 && aChannels > 0) { |
michael@0 | 603 | // XXX need Observer base class and registration API |
michael@0 | 604 | if (gFarendObserver) { |
michael@0 | 605 | gFarendObserver->InsertFarEnd(aMixedBuffer, aFrames, false, |
michael@0 | 606 | aSampleRate, aChannels, aFormat); |
michael@0 | 607 | } |
michael@0 | 608 | } |
michael@0 | 609 | #endif |
michael@0 | 610 | } |
michael@0 | 611 | |
michael@0 | 612 | void |
michael@0 | 613 | MediaStreamGraphImpl::UpdateStreamOrder() |
michael@0 | 614 | { |
michael@0 | 615 | mOldStreams.SwapElements(mStreams); |
michael@0 | 616 | mStreams.ClearAndRetainStorage(); |
michael@0 | 617 | bool shouldMix = false; |
michael@0 | 618 | for (uint32_t i = 0; i < mOldStreams.Length(); ++i) { |
michael@0 | 619 | MediaStream* stream = mOldStreams[i]; |
michael@0 | 620 | stream->mHasBeenOrdered = false; |
michael@0 | 621 | stream->mIsConsumed = false; |
michael@0 | 622 | stream->mIsOnOrderingStack = false; |
michael@0 | 623 | stream->mInBlockingSet = false; |
michael@0 | 624 | if (stream->AsSourceStream() && |
michael@0 | 625 | stream->AsSourceStream()->NeedsMixing()) { |
michael@0 | 626 | shouldMix = true; |
michael@0 | 627 | } |
michael@0 | 628 | ProcessedMediaStream* ps = stream->AsProcessedStream(); |
michael@0 | 629 | if (ps) { |
michael@0 | 630 | ps->mInCycle = false; |
michael@0 | 631 | AudioNodeStream* ns = ps->AsAudioNodeStream(); |
michael@0 | 632 | if (ns) { |
michael@0 | 633 | ns->Unmute(); |
michael@0 | 634 | } |
michael@0 | 635 | } |
michael@0 | 636 | } |
michael@0 | 637 | |
michael@0 | 638 | if (!mMixer && shouldMix) { |
michael@0 | 639 | mMixer = new AudioMixer(AudioMixerCallback); |
michael@0 | 640 | } else if (mMixer && !shouldMix) { |
michael@0 | 641 | mMixer = nullptr; |
michael@0 | 642 | } |
michael@0 | 643 | |
michael@0 | 644 | mozilla::LinkedList<MediaStream> stack; |
michael@0 | 645 | for (uint32_t i = 0; i < mOldStreams.Length(); ++i) { |
michael@0 | 646 | nsRefPtr<MediaStream>& s = mOldStreams[i]; |
michael@0 | 647 | if (s->IsIntrinsicallyConsumed()) { |
michael@0 | 648 | MarkConsumed(s); |
michael@0 | 649 | } |
michael@0 | 650 | if (!s->mHasBeenOrdered) { |
michael@0 | 651 | UpdateStreamOrderForStream(&stack, s.forget()); |
michael@0 | 652 | } |
michael@0 | 653 | } |
michael@0 | 654 | } |
michael@0 | 655 | |
michael@0 | 656 | void |
michael@0 | 657 | MediaStreamGraphImpl::RecomputeBlocking(GraphTime aEndBlockingDecisions) |
michael@0 | 658 | { |
michael@0 | 659 | bool blockingDecisionsWillChange = false; |
michael@0 | 660 | |
michael@0 | 661 | STREAM_LOG(PR_LOG_DEBUG+1, ("Media graph %p computing blocking for time %f", |
michael@0 | 662 | this, MediaTimeToSeconds(mStateComputedTime))); |
michael@0 | 663 | for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
michael@0 | 664 | MediaStream* stream = mStreams[i]; |
michael@0 | 665 | if (!stream->mInBlockingSet) { |
michael@0 | 666 | // Compute a partition of the streams containing 'stream' such that we can |
michael@0 | 667 | // compute the blocking status of each subset independently. |
michael@0 | 668 | nsAutoTArray<MediaStream*,10> streamSet; |
michael@0 | 669 | AddBlockingRelatedStreamsToSet(&streamSet, stream); |
michael@0 | 670 | |
michael@0 | 671 | GraphTime end; |
michael@0 | 672 | for (GraphTime t = mStateComputedTime; |
michael@0 | 673 | t < aEndBlockingDecisions; t = end) { |
michael@0 | 674 | end = GRAPH_TIME_MAX; |
michael@0 | 675 | RecomputeBlockingAt(streamSet, t, aEndBlockingDecisions, &end); |
michael@0 | 676 | if (end < GRAPH_TIME_MAX) { |
michael@0 | 677 | blockingDecisionsWillChange = true; |
michael@0 | 678 | } |
michael@0 | 679 | } |
michael@0 | 680 | } |
michael@0 | 681 | |
michael@0 | 682 | GraphTime end; |
michael@0 | 683 | stream->mBlocked.GetAt(mCurrentTime, &end); |
michael@0 | 684 | if (end < GRAPH_TIME_MAX) { |
michael@0 | 685 | blockingDecisionsWillChange = true; |
michael@0 | 686 | } |
michael@0 | 687 | } |
michael@0 | 688 | STREAM_LOG(PR_LOG_DEBUG+1, ("Media graph %p computed blocking for interval %f to %f", |
michael@0 | 689 | this, MediaTimeToSeconds(mStateComputedTime), |
michael@0 | 690 | MediaTimeToSeconds(aEndBlockingDecisions))); |
michael@0 | 691 | mStateComputedTime = aEndBlockingDecisions; |
michael@0 | 692 | |
michael@0 | 693 | if (blockingDecisionsWillChange) { |
michael@0 | 694 | // Make sure we wake up to notify listeners about these changes. |
michael@0 | 695 | EnsureNextIteration(); |
michael@0 | 696 | } |
michael@0 | 697 | } |
michael@0 | 698 | |
michael@0 | 699 | void |
michael@0 | 700 | MediaStreamGraphImpl::AddBlockingRelatedStreamsToSet(nsTArray<MediaStream*>* aStreams, |
michael@0 | 701 | MediaStream* aStream) |
michael@0 | 702 | { |
michael@0 | 703 | if (aStream->mInBlockingSet) |
michael@0 | 704 | return; |
michael@0 | 705 | aStream->mInBlockingSet = true; |
michael@0 | 706 | aStreams->AppendElement(aStream); |
michael@0 | 707 | for (uint32_t i = 0; i < aStream->mConsumers.Length(); ++i) { |
michael@0 | 708 | MediaInputPort* port = aStream->mConsumers[i]; |
michael@0 | 709 | if (port->mFlags & (MediaInputPort::FLAG_BLOCK_INPUT | MediaInputPort::FLAG_BLOCK_OUTPUT)) { |
michael@0 | 710 | AddBlockingRelatedStreamsToSet(aStreams, port->mDest); |
michael@0 | 711 | } |
michael@0 | 712 | } |
michael@0 | 713 | ProcessedMediaStream* ps = aStream->AsProcessedStream(); |
michael@0 | 714 | if (ps) { |
michael@0 | 715 | for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) { |
michael@0 | 716 | MediaInputPort* port = ps->mInputs[i]; |
michael@0 | 717 | if (port->mFlags & (MediaInputPort::FLAG_BLOCK_INPUT | MediaInputPort::FLAG_BLOCK_OUTPUT)) { |
michael@0 | 718 | AddBlockingRelatedStreamsToSet(aStreams, port->mSource); |
michael@0 | 719 | } |
michael@0 | 720 | } |
michael@0 | 721 | } |
michael@0 | 722 | } |
michael@0 | 723 | |
michael@0 | 724 | void |
michael@0 | 725 | MediaStreamGraphImpl::MarkStreamBlocking(MediaStream* aStream) |
michael@0 | 726 | { |
michael@0 | 727 | if (aStream->mBlockInThisPhase) |
michael@0 | 728 | return; |
michael@0 | 729 | aStream->mBlockInThisPhase = true; |
michael@0 | 730 | for (uint32_t i = 0; i < aStream->mConsumers.Length(); ++i) { |
michael@0 | 731 | MediaInputPort* port = aStream->mConsumers[i]; |
michael@0 | 732 | if (port->mFlags & MediaInputPort::FLAG_BLOCK_OUTPUT) { |
michael@0 | 733 | MarkStreamBlocking(port->mDest); |
michael@0 | 734 | } |
michael@0 | 735 | } |
michael@0 | 736 | ProcessedMediaStream* ps = aStream->AsProcessedStream(); |
michael@0 | 737 | if (ps) { |
michael@0 | 738 | for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) { |
michael@0 | 739 | MediaInputPort* port = ps->mInputs[i]; |
michael@0 | 740 | if (port->mFlags & MediaInputPort::FLAG_BLOCK_INPUT) { |
michael@0 | 741 | MarkStreamBlocking(port->mSource); |
michael@0 | 742 | } |
michael@0 | 743 | } |
michael@0 | 744 | } |
michael@0 | 745 | } |
michael@0 | 746 | |
michael@0 | 747 | void |
michael@0 | 748 | MediaStreamGraphImpl::RecomputeBlockingAt(const nsTArray<MediaStream*>& aStreams, |
michael@0 | 749 | GraphTime aTime, |
michael@0 | 750 | GraphTime aEndBlockingDecisions, |
michael@0 | 751 | GraphTime* aEnd) |
michael@0 | 752 | { |
michael@0 | 753 | for (uint32_t i = 0; i < aStreams.Length(); ++i) { |
michael@0 | 754 | MediaStream* stream = aStreams[i]; |
michael@0 | 755 | stream->mBlockInThisPhase = false; |
michael@0 | 756 | } |
michael@0 | 757 | |
michael@0 | 758 | for (uint32_t i = 0; i < aStreams.Length(); ++i) { |
michael@0 | 759 | MediaStream* stream = aStreams[i]; |
michael@0 | 760 | |
michael@0 | 761 | if (stream->mFinished) { |
michael@0 | 762 | GraphTime endTime = StreamTimeToGraphTime(stream, |
michael@0 | 763 | stream->GetStreamBuffer().GetAllTracksEnd()); |
michael@0 | 764 | if (endTime <= aTime) { |
michael@0 | 765 | STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p is blocked due to being finished", stream)); |
michael@0 | 766 | // We'll block indefinitely |
michael@0 | 767 | MarkStreamBlocking(stream); |
michael@0 | 768 | *aEnd = std::min(*aEnd, aEndBlockingDecisions); |
michael@0 | 769 | continue; |
michael@0 | 770 | } else { |
michael@0 | 771 | STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p is finished, but not blocked yet (end at %f, with blocking at %f)", |
michael@0 | 772 | stream, MediaTimeToSeconds(stream->GetBufferEnd()), |
michael@0 | 773 | MediaTimeToSeconds(endTime))); |
michael@0 | 774 | *aEnd = std::min(*aEnd, endTime); |
michael@0 | 775 | } |
michael@0 | 776 | } |
michael@0 | 777 | |
michael@0 | 778 | GraphTime end; |
michael@0 | 779 | bool explicitBlock = stream->mExplicitBlockerCount.GetAt(aTime, &end) > 0; |
michael@0 | 780 | *aEnd = std::min(*aEnd, end); |
michael@0 | 781 | if (explicitBlock) { |
michael@0 | 782 | STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p is blocked due to explicit blocker", stream)); |
michael@0 | 783 | MarkStreamBlocking(stream); |
michael@0 | 784 | continue; |
michael@0 | 785 | } |
michael@0 | 786 | |
michael@0 | 787 | bool underrun = WillUnderrun(stream, aTime, aEndBlockingDecisions, aEnd); |
michael@0 | 788 | if (underrun) { |
michael@0 | 789 | // We'll block indefinitely |
michael@0 | 790 | MarkStreamBlocking(stream); |
michael@0 | 791 | *aEnd = std::min(*aEnd, aEndBlockingDecisions); |
michael@0 | 792 | continue; |
michael@0 | 793 | } |
michael@0 | 794 | } |
michael@0 | 795 | NS_ASSERTION(*aEnd > aTime, "Failed to advance!"); |
michael@0 | 796 | |
michael@0 | 797 | for (uint32_t i = 0; i < aStreams.Length(); ++i) { |
michael@0 | 798 | MediaStream* stream = aStreams[i]; |
michael@0 | 799 | stream->mBlocked.SetAtAndAfter(aTime, stream->mBlockInThisPhase); |
michael@0 | 800 | } |
michael@0 | 801 | } |
michael@0 | 802 | |
michael@0 | 803 | void |
michael@0 | 804 | MediaStreamGraphImpl::NotifyHasCurrentData(MediaStream* aStream) |
michael@0 | 805 | { |
michael@0 | 806 | if (!aStream->mNotifiedHasCurrentData && aStream->mHasCurrentData) { |
michael@0 | 807 | for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) { |
michael@0 | 808 | MediaStreamListener* l = aStream->mListeners[j]; |
michael@0 | 809 | l->NotifyHasCurrentData(this); |
michael@0 | 810 | } |
michael@0 | 811 | aStream->mNotifiedHasCurrentData = true; |
michael@0 | 812 | } |
michael@0 | 813 | } |
michael@0 | 814 | |
michael@0 | 815 | void |
michael@0 | 816 | MediaStreamGraphImpl::CreateOrDestroyAudioStreams(GraphTime aAudioOutputStartTime, |
michael@0 | 817 | MediaStream* aStream) |
michael@0 | 818 | { |
michael@0 | 819 | MOZ_ASSERT(mRealtime, "Should only attempt to create audio streams in real-time mode"); |
michael@0 | 820 | |
michael@0 | 821 | nsAutoTArray<bool,2> audioOutputStreamsFound; |
michael@0 | 822 | for (uint32_t i = 0; i < aStream->mAudioOutputStreams.Length(); ++i) { |
michael@0 | 823 | audioOutputStreamsFound.AppendElement(false); |
michael@0 | 824 | } |
michael@0 | 825 | |
michael@0 | 826 | if (!aStream->mAudioOutputs.IsEmpty()) { |
michael@0 | 827 | for (StreamBuffer::TrackIter tracks(aStream->GetStreamBuffer(), MediaSegment::AUDIO); |
michael@0 | 828 | !tracks.IsEnded(); tracks.Next()) { |
michael@0 | 829 | uint32_t i; |
michael@0 | 830 | for (i = 0; i < audioOutputStreamsFound.Length(); ++i) { |
michael@0 | 831 | if (aStream->mAudioOutputStreams[i].mTrackID == tracks->GetID()) { |
michael@0 | 832 | break; |
michael@0 | 833 | } |
michael@0 | 834 | } |
michael@0 | 835 | if (i < audioOutputStreamsFound.Length()) { |
michael@0 | 836 | audioOutputStreamsFound[i] = true; |
michael@0 | 837 | } else { |
michael@0 | 838 | // No output stream created for this track yet. Check if it's time to |
michael@0 | 839 | // create one. |
michael@0 | 840 | GraphTime startTime = |
michael@0 | 841 | StreamTimeToGraphTime(aStream, tracks->GetStartTimeRoundDown(), |
michael@0 | 842 | INCLUDE_TRAILING_BLOCKED_INTERVAL); |
michael@0 | 843 | if (startTime >= mStateComputedTime) { |
michael@0 | 844 | // The stream wants to play audio, but nothing will play for the forseeable |
michael@0 | 845 | // future, so don't create the stream. |
michael@0 | 846 | continue; |
michael@0 | 847 | } |
michael@0 | 848 | |
michael@0 | 849 | // Allocating a AudioStream would be slow, so we finish the Init async |
michael@0 | 850 | MediaStream::AudioOutputStream* audioOutputStream = |
michael@0 | 851 | aStream->mAudioOutputStreams.AppendElement(); |
michael@0 | 852 | audioOutputStream->mAudioPlaybackStartTime = aAudioOutputStartTime; |
michael@0 | 853 | audioOutputStream->mBlockedAudioTime = 0; |
michael@0 | 854 | audioOutputStream->mLastTickWritten = 0; |
michael@0 | 855 | audioOutputStream->mStream = new AudioStream(); |
michael@0 | 856 | // XXX for now, allocate stereo output. But we need to fix this to |
michael@0 | 857 | // match the system's ideal channel configuration. |
michael@0 | 858 | // NOTE: we presume this is either fast or async-under-the-covers |
michael@0 | 859 | audioOutputStream->mStream->Init(2, mSampleRate, |
michael@0 | 860 | aStream->mAudioChannelType, |
michael@0 | 861 | AudioStream::LowLatency); |
michael@0 | 862 | audioOutputStream->mTrackID = tracks->GetID(); |
michael@0 | 863 | |
michael@0 | 864 | LogLatency(AsyncLatencyLogger::AudioStreamCreate, |
michael@0 | 865 | reinterpret_cast<uint64_t>(aStream), |
michael@0 | 866 | reinterpret_cast<int64_t>(audioOutputStream->mStream.get())); |
michael@0 | 867 | } |
michael@0 | 868 | } |
michael@0 | 869 | } |
michael@0 | 870 | |
michael@0 | 871 | for (int32_t i = audioOutputStreamsFound.Length() - 1; i >= 0; --i) { |
michael@0 | 872 | if (!audioOutputStreamsFound[i]) { |
michael@0 | 873 | aStream->mAudioOutputStreams[i].mStream->Shutdown(); |
michael@0 | 874 | aStream->mAudioOutputStreams.RemoveElementAt(i); |
michael@0 | 875 | } |
michael@0 | 876 | } |
michael@0 | 877 | } |
michael@0 | 878 | |
michael@0 | 879 | TrackTicks |
michael@0 | 880 | MediaStreamGraphImpl::PlayAudio(MediaStream* aStream, |
michael@0 | 881 | GraphTime aFrom, GraphTime aTo) |
michael@0 | 882 | { |
michael@0 | 883 | MOZ_ASSERT(mRealtime, "Should only attempt to play audio in realtime mode"); |
michael@0 | 884 | |
michael@0 | 885 | TrackTicks ticksWritten = 0; |
michael@0 | 886 | // We compute the number of needed ticks by converting a difference of graph |
michael@0 | 887 | // time rather than by substracting two converted stream time to ensure that |
michael@0 | 888 | // the rounding between {Graph,Stream}Time and track ticks is not dependant |
michael@0 | 889 | // on the absolute value of the {Graph,Stream}Time, and so that number of |
michael@0 | 890 | // ticks to play is the same for each cycle. |
michael@0 | 891 | TrackTicks ticksNeeded = TimeToTicksRoundDown(mSampleRate, aTo) - TimeToTicksRoundDown(mSampleRate, aFrom); |
michael@0 | 892 | |
michael@0 | 893 | if (aStream->mAudioOutputStreams.IsEmpty()) { |
michael@0 | 894 | return 0; |
michael@0 | 895 | } |
michael@0 | 896 | |
michael@0 | 897 | // When we're playing multiple copies of this stream at the same time, they're |
michael@0 | 898 | // perfectly correlated so adding volumes is the right thing to do. |
michael@0 | 899 | float volume = 0.0f; |
michael@0 | 900 | for (uint32_t i = 0; i < aStream->mAudioOutputs.Length(); ++i) { |
michael@0 | 901 | volume += aStream->mAudioOutputs[i].mVolume; |
michael@0 | 902 | } |
michael@0 | 903 | |
michael@0 | 904 | for (uint32_t i = 0; i < aStream->mAudioOutputStreams.Length(); ++i) { |
michael@0 | 905 | MediaStream::AudioOutputStream& audioOutput = aStream->mAudioOutputStreams[i]; |
michael@0 | 906 | StreamBuffer::Track* track = aStream->mBuffer.FindTrack(audioOutput.mTrackID); |
michael@0 | 907 | AudioSegment* audio = track->Get<AudioSegment>(); |
michael@0 | 908 | AudioSegment output; |
michael@0 | 909 | MOZ_ASSERT(track->GetRate() == mSampleRate); |
michael@0 | 910 | |
michael@0 | 911 | // offset and audioOutput.mLastTickWritten can differ by at most one sample, |
michael@0 | 912 | // because of the rounding issue. We track that to ensure we don't skip a |
michael@0 | 913 | // sample. One sample may be played twice, but this should not happen |
michael@0 | 914 | // again during an unblocked sequence of track samples. |
michael@0 | 915 | TrackTicks offset = track->TimeToTicksRoundDown(GraphTimeToStreamTime(aStream, aFrom)); |
michael@0 | 916 | if (audioOutput.mLastTickWritten && |
michael@0 | 917 | audioOutput.mLastTickWritten != offset) { |
michael@0 | 918 | // If there is a global underrun of the MSG, this property won't hold, and |
michael@0 | 919 | // we reset the sample count tracking. |
michael@0 | 920 | if (offset - audioOutput.mLastTickWritten == 1) { |
michael@0 | 921 | offset = audioOutput.mLastTickWritten; |
michael@0 | 922 | } |
michael@0 | 923 | } |
michael@0 | 924 | |
michael@0 | 925 | // We don't update aStream->mBufferStartTime here to account for |
michael@0 | 926 | // time spent blocked. Instead, we'll update it in UpdateCurrentTime after the |
michael@0 | 927 | // blocked period has completed. But we do need to make sure we play from the |
michael@0 | 928 | // right offsets in the stream buffer, even if we've already written silence for |
michael@0 | 929 | // some amount of blocked time after the current time. |
michael@0 | 930 | GraphTime t = aFrom; |
michael@0 | 931 | while (ticksNeeded) { |
michael@0 | 932 | GraphTime end; |
michael@0 | 933 | bool blocked = aStream->mBlocked.GetAt(t, &end); |
michael@0 | 934 | end = std::min(end, aTo); |
michael@0 | 935 | |
michael@0 | 936 | // Check how many ticks of sound we can provide if we are blocked some |
michael@0 | 937 | // time in the middle of this cycle. |
michael@0 | 938 | TrackTicks toWrite = 0; |
michael@0 | 939 | if (end >= aTo) { |
michael@0 | 940 | toWrite = ticksNeeded; |
michael@0 | 941 | } else { |
michael@0 | 942 | toWrite = TimeToTicksRoundDown(mSampleRate, end - aFrom); |
michael@0 | 943 | } |
michael@0 | 944 | ticksNeeded -= toWrite; |
michael@0 | 945 | |
michael@0 | 946 | if (blocked) { |
michael@0 | 947 | output.InsertNullDataAtStart(toWrite); |
michael@0 | 948 | STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing %ld blocking-silence samples for %f to %f (%ld to %ld)\n", |
michael@0 | 949 | aStream, toWrite, MediaTimeToSeconds(t), MediaTimeToSeconds(end), |
michael@0 | 950 | offset, offset + toWrite)); |
michael@0 | 951 | } else { |
michael@0 | 952 | TrackTicks endTicksNeeded = offset + toWrite; |
michael@0 | 953 | TrackTicks endTicksAvailable = audio->GetDuration(); |
michael@0 | 954 | STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing %ld samples for %f to %f (samples %ld to %ld)\n", |
michael@0 | 955 | aStream, toWrite, MediaTimeToSeconds(t), MediaTimeToSeconds(end), |
michael@0 | 956 | offset, endTicksNeeded)); |
michael@0 | 957 | |
michael@0 | 958 | if (endTicksNeeded <= endTicksAvailable) { |
michael@0 | 959 | output.AppendSlice(*audio, offset, endTicksNeeded); |
michael@0 | 960 | offset = endTicksNeeded; |
michael@0 | 961 | } else { |
michael@0 | 962 | MOZ_ASSERT(track->IsEnded(), "Not enough data, and track not ended."); |
michael@0 | 963 | // If we are at the end of the track, maybe write the remaining |
michael@0 | 964 | // samples, and pad with/output silence. |
michael@0 | 965 | if (endTicksNeeded > endTicksAvailable && |
michael@0 | 966 | offset < endTicksAvailable) { |
michael@0 | 967 | output.AppendSlice(*audio, offset, endTicksAvailable); |
michael@0 | 968 | toWrite -= endTicksAvailable - offset; |
michael@0 | 969 | offset = endTicksAvailable; |
michael@0 | 970 | } |
michael@0 | 971 | output.AppendNullData(toWrite); |
michael@0 | 972 | } |
michael@0 | 973 | output.ApplyVolume(volume); |
michael@0 | 974 | } |
michael@0 | 975 | t = end; |
michael@0 | 976 | } |
michael@0 | 977 | audioOutput.mLastTickWritten = offset; |
michael@0 | 978 | |
michael@0 | 979 | // Need unique id for stream & track - and we want it to match the inserter |
michael@0 | 980 | output.WriteTo(LATENCY_STREAM_ID(aStream, track->GetID()), |
michael@0 | 981 | audioOutput.mStream, mMixer); |
michael@0 | 982 | } |
michael@0 | 983 | return ticksWritten; |
michael@0 | 984 | } |
michael@0 | 985 | |
michael@0 | 986 | static void |
michael@0 | 987 | SetImageToBlackPixel(PlanarYCbCrImage* aImage) |
michael@0 | 988 | { |
michael@0 | 989 | uint8_t blackPixel[] = { 0x10, 0x80, 0x80 }; |
michael@0 | 990 | |
michael@0 | 991 | PlanarYCbCrData data; |
michael@0 | 992 | data.mYChannel = blackPixel; |
michael@0 | 993 | data.mCbChannel = blackPixel + 1; |
michael@0 | 994 | data.mCrChannel = blackPixel + 2; |
michael@0 | 995 | data.mYStride = data.mCbCrStride = 1; |
michael@0 | 996 | data.mPicSize = data.mYSize = data.mCbCrSize = IntSize(1, 1); |
michael@0 | 997 | aImage->SetData(data); |
michael@0 | 998 | } |
michael@0 | 999 | |
michael@0 | 1000 | void |
michael@0 | 1001 | MediaStreamGraphImpl::PlayVideo(MediaStream* aStream) |
michael@0 | 1002 | { |
michael@0 | 1003 | MOZ_ASSERT(mRealtime, "Should only attempt to play video in realtime mode"); |
michael@0 | 1004 | |
michael@0 | 1005 | if (aStream->mVideoOutputs.IsEmpty()) |
michael@0 | 1006 | return; |
michael@0 | 1007 | |
michael@0 | 1008 | // Display the next frame a bit early. This is better than letting the current |
michael@0 | 1009 | // frame be displayed for too long. |
michael@0 | 1010 | GraphTime framePosition = mCurrentTime + MEDIA_GRAPH_TARGET_PERIOD_MS; |
michael@0 | 1011 | NS_ASSERTION(framePosition >= aStream->mBufferStartTime, "frame position before buffer?"); |
michael@0 | 1012 | StreamTime frameBufferTime = GraphTimeToStreamTime(aStream, framePosition); |
michael@0 | 1013 | |
michael@0 | 1014 | TrackTicks start; |
michael@0 | 1015 | const VideoFrame* frame = nullptr; |
michael@0 | 1016 | StreamBuffer::Track* track; |
michael@0 | 1017 | for (StreamBuffer::TrackIter tracks(aStream->GetStreamBuffer(), MediaSegment::VIDEO); |
michael@0 | 1018 | !tracks.IsEnded(); tracks.Next()) { |
michael@0 | 1019 | VideoSegment* segment = tracks->Get<VideoSegment>(); |
michael@0 | 1020 | TrackTicks thisStart; |
michael@0 | 1021 | const VideoFrame* thisFrame = |
michael@0 | 1022 | segment->GetFrameAt(tracks->TimeToTicksRoundDown(frameBufferTime), &thisStart); |
michael@0 | 1023 | if (thisFrame && thisFrame->GetImage()) { |
michael@0 | 1024 | start = thisStart; |
michael@0 | 1025 | frame = thisFrame; |
michael@0 | 1026 | track = tracks.get(); |
michael@0 | 1027 | } |
michael@0 | 1028 | } |
michael@0 | 1029 | if (!frame || *frame == aStream->mLastPlayedVideoFrame) |
michael@0 | 1030 | return; |
michael@0 | 1031 | |
michael@0 | 1032 | STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing video frame %p (%dx%d)", |
michael@0 | 1033 | aStream, frame->GetImage(), frame->GetIntrinsicSize().width, |
michael@0 | 1034 | frame->GetIntrinsicSize().height)); |
michael@0 | 1035 | GraphTime startTime = StreamTimeToGraphTime(aStream, |
michael@0 | 1036 | track->TicksToTimeRoundDown(start), INCLUDE_TRAILING_BLOCKED_INTERVAL); |
michael@0 | 1037 | TimeStamp targetTime = mCurrentTimeStamp + |
michael@0 | 1038 | TimeDuration::FromMilliseconds(double(startTime - mCurrentTime)); |
michael@0 | 1039 | for (uint32_t i = 0; i < aStream->mVideoOutputs.Length(); ++i) { |
michael@0 | 1040 | VideoFrameContainer* output = aStream->mVideoOutputs[i]; |
michael@0 | 1041 | |
michael@0 | 1042 | if (frame->GetForceBlack()) { |
michael@0 | 1043 | nsRefPtr<Image> image = |
michael@0 | 1044 | output->GetImageContainer()->CreateImage(ImageFormat::PLANAR_YCBCR); |
michael@0 | 1045 | if (image) { |
michael@0 | 1046 | // Sets the image to a single black pixel, which will be scaled to fill |
michael@0 | 1047 | // the rendered size. |
michael@0 | 1048 | SetImageToBlackPixel(static_cast<PlanarYCbCrImage*>(image.get())); |
michael@0 | 1049 | } |
michael@0 | 1050 | output->SetCurrentFrame(frame->GetIntrinsicSize(), image, |
michael@0 | 1051 | targetTime); |
michael@0 | 1052 | } else { |
michael@0 | 1053 | output->SetCurrentFrame(frame->GetIntrinsicSize(), frame->GetImage(), |
michael@0 | 1054 | targetTime); |
michael@0 | 1055 | } |
michael@0 | 1056 | |
michael@0 | 1057 | nsCOMPtr<nsIRunnable> event = |
michael@0 | 1058 | NS_NewRunnableMethod(output, &VideoFrameContainer::Invalidate); |
michael@0 | 1059 | NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL); |
michael@0 | 1060 | } |
michael@0 | 1061 | if (!aStream->mNotifiedFinished) { |
michael@0 | 1062 | aStream->mLastPlayedVideoFrame = *frame; |
michael@0 | 1063 | } |
michael@0 | 1064 | } |
michael@0 | 1065 | |
michael@0 | 1066 | bool |
michael@0 | 1067 | MediaStreamGraphImpl::ShouldUpdateMainThread() |
michael@0 | 1068 | { |
michael@0 | 1069 | if (mRealtime) { |
michael@0 | 1070 | return true; |
michael@0 | 1071 | } |
michael@0 | 1072 | |
michael@0 | 1073 | TimeStamp now = TimeStamp::Now(); |
michael@0 | 1074 | if ((now - mLastMainThreadUpdate).ToMilliseconds() > MEDIA_GRAPH_TARGET_PERIOD_MS) { |
michael@0 | 1075 | mLastMainThreadUpdate = now; |
michael@0 | 1076 | return true; |
michael@0 | 1077 | } |
michael@0 | 1078 | return false; |
michael@0 | 1079 | } |
michael@0 | 1080 | |
michael@0 | 1081 | void |
michael@0 | 1082 | MediaStreamGraphImpl::PrepareUpdatesToMainThreadState(bool aFinalUpdate) |
michael@0 | 1083 | { |
michael@0 | 1084 | mMonitor.AssertCurrentThreadOwns(); |
michael@0 | 1085 | |
michael@0 | 1086 | // We don't want to frequently update the main thread about timing update |
michael@0 | 1087 | // when we are not running in realtime. |
michael@0 | 1088 | if (aFinalUpdate || ShouldUpdateMainThread()) { |
michael@0 | 1089 | mStreamUpdates.SetCapacity(mStreamUpdates.Length() + mStreams.Length()); |
michael@0 | 1090 | for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
michael@0 | 1091 | MediaStream* stream = mStreams[i]; |
michael@0 | 1092 | if (!stream->MainThreadNeedsUpdates()) { |
michael@0 | 1093 | continue; |
michael@0 | 1094 | } |
michael@0 | 1095 | StreamUpdate* update = mStreamUpdates.AppendElement(); |
michael@0 | 1096 | update->mGraphUpdateIndex = stream->mGraphUpdateIndices.GetAt(mCurrentTime); |
michael@0 | 1097 | update->mStream = stream; |
michael@0 | 1098 | update->mNextMainThreadCurrentTime = |
michael@0 | 1099 | GraphTimeToStreamTime(stream, mCurrentTime); |
michael@0 | 1100 | update->mNextMainThreadFinished = stream->mNotifiedFinished; |
michael@0 | 1101 | } |
michael@0 | 1102 | if (!mPendingUpdateRunnables.IsEmpty()) { |
michael@0 | 1103 | mUpdateRunnables.MoveElementsFrom(mPendingUpdateRunnables); |
michael@0 | 1104 | } |
michael@0 | 1105 | } |
michael@0 | 1106 | |
michael@0 | 1107 | // Don't send the message to the main thread if it's not going to have |
michael@0 | 1108 | // any work to do. |
michael@0 | 1109 | if (aFinalUpdate || |
michael@0 | 1110 | !mUpdateRunnables.IsEmpty() || |
michael@0 | 1111 | !mStreamUpdates.IsEmpty()) { |
michael@0 | 1112 | EnsureStableStateEventPosted(); |
michael@0 | 1113 | } |
michael@0 | 1114 | } |
michael@0 | 1115 | |
michael@0 | 1116 | void |
michael@0 | 1117 | MediaStreamGraphImpl::EnsureImmediateWakeUpLocked(MonitorAutoLock& aLock) |
michael@0 | 1118 | { |
michael@0 | 1119 | if (mWaitState == WAITSTATE_WAITING_FOR_NEXT_ITERATION || |
michael@0 | 1120 | mWaitState == WAITSTATE_WAITING_INDEFINITELY) { |
michael@0 | 1121 | mWaitState = WAITSTATE_WAKING_UP; |
michael@0 | 1122 | aLock.Notify(); |
michael@0 | 1123 | } |
michael@0 | 1124 | } |
michael@0 | 1125 | |
michael@0 | 1126 | void |
michael@0 | 1127 | MediaStreamGraphImpl::EnsureNextIteration() |
michael@0 | 1128 | { |
michael@0 | 1129 | MonitorAutoLock lock(mMonitor); |
michael@0 | 1130 | EnsureNextIterationLocked(lock); |
michael@0 | 1131 | } |
michael@0 | 1132 | |
michael@0 | 1133 | void |
michael@0 | 1134 | MediaStreamGraphImpl::EnsureNextIterationLocked(MonitorAutoLock& aLock) |
michael@0 | 1135 | { |
michael@0 | 1136 | if (mNeedAnotherIteration) |
michael@0 | 1137 | return; |
michael@0 | 1138 | mNeedAnotherIteration = true; |
michael@0 | 1139 | if (mWaitState == WAITSTATE_WAITING_INDEFINITELY) { |
michael@0 | 1140 | mWaitState = WAITSTATE_WAKING_UP; |
michael@0 | 1141 | aLock.Notify(); |
michael@0 | 1142 | } |
michael@0 | 1143 | } |
michael@0 | 1144 | |
michael@0 | 1145 | /** |
michael@0 | 1146 | * Returns smallest value of t such that |
michael@0 | 1147 | * TimeToTicksRoundUp(aSampleRate, t) is a multiple of WEBAUDIO_BLOCK_SIZE |
michael@0 | 1148 | * and floor(TimeToTicksRoundUp(aSampleRate, t)/WEBAUDIO_BLOCK_SIZE) > |
michael@0 | 1149 | * floor(TimeToTicksRoundUp(aSampleRate, aTime)/WEBAUDIO_BLOCK_SIZE). |
michael@0 | 1150 | */ |
michael@0 | 1151 | static GraphTime |
michael@0 | 1152 | RoundUpToNextAudioBlock(TrackRate aSampleRate, GraphTime aTime) |
michael@0 | 1153 | { |
michael@0 | 1154 | TrackTicks ticks = TimeToTicksRoundUp(aSampleRate, aTime); |
michael@0 | 1155 | uint64_t block = ticks >> WEBAUDIO_BLOCK_SIZE_BITS; |
michael@0 | 1156 | uint64_t nextBlock = block + 1; |
michael@0 | 1157 | TrackTicks nextTicks = nextBlock << WEBAUDIO_BLOCK_SIZE_BITS; |
michael@0 | 1158 | // Find the smallest time t such that TimeToTicksRoundUp(aSampleRate,t) == nextTicks |
michael@0 | 1159 | // That's the smallest integer t such that |
michael@0 | 1160 | // t*aSampleRate > ((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) |
michael@0 | 1161 | // Both sides are integers, so this is equivalent to |
michael@0 | 1162 | // t*aSampleRate >= ((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) + 1 |
michael@0 | 1163 | // t >= (((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) + 1)/aSampleRate |
michael@0 | 1164 | // t = ceil((((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) + 1)/aSampleRate) |
michael@0 | 1165 | // Using integer division, that's |
michael@0 | 1166 | // t = (((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) + 1 + aSampleRate - 1)/aSampleRate |
michael@0 | 1167 | // = ((nextTicks - 1) << MEDIA_TIME_FRAC_BITS)/aSampleRate + 1 |
michael@0 | 1168 | return ((nextTicks - 1) << MEDIA_TIME_FRAC_BITS)/aSampleRate + 1; |
michael@0 | 1169 | } |
michael@0 | 1170 | |
michael@0 | 1171 | void |
michael@0 | 1172 | MediaStreamGraphImpl::ProduceDataForStreamsBlockByBlock(uint32_t aStreamIndex, |
michael@0 | 1173 | TrackRate aSampleRate, |
michael@0 | 1174 | GraphTime aFrom, |
michael@0 | 1175 | GraphTime aTo) |
michael@0 | 1176 | { |
michael@0 | 1177 | GraphTime t = aFrom; |
michael@0 | 1178 | while (t < aTo) { |
michael@0 | 1179 | GraphTime next = RoundUpToNextAudioBlock(aSampleRate, t); |
michael@0 | 1180 | for (uint32_t i = aStreamIndex; i < mStreams.Length(); ++i) { |
michael@0 | 1181 | ProcessedMediaStream* ps = mStreams[i]->AsProcessedStream(); |
michael@0 | 1182 | if (ps) { |
michael@0 | 1183 | ps->ProcessInput(t, next, (next == aTo) ? ProcessedMediaStream::ALLOW_FINISH : 0); |
michael@0 | 1184 | } |
michael@0 | 1185 | } |
michael@0 | 1186 | t = next; |
michael@0 | 1187 | } |
michael@0 | 1188 | NS_ASSERTION(t == aTo, "Something went wrong with rounding to block boundaries"); |
michael@0 | 1189 | } |
michael@0 | 1190 | |
michael@0 | 1191 | bool |
michael@0 | 1192 | MediaStreamGraphImpl::AllFinishedStreamsNotified() |
michael@0 | 1193 | { |
michael@0 | 1194 | for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
michael@0 | 1195 | MediaStream* s = mStreams[i]; |
michael@0 | 1196 | if (s->mFinished && !s->mNotifiedFinished) { |
michael@0 | 1197 | return false; |
michael@0 | 1198 | } |
michael@0 | 1199 | } |
michael@0 | 1200 | return true; |
michael@0 | 1201 | } |
michael@0 | 1202 | |
michael@0 | 1203 | void |
michael@0 | 1204 | MediaStreamGraphImpl::PauseAllAudioOutputs() |
michael@0 | 1205 | { |
michael@0 | 1206 | for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
michael@0 | 1207 | MediaStream* s = mStreams[i]; |
michael@0 | 1208 | for (uint32_t j = 0; j < s->mAudioOutputStreams.Length(); ++j) { |
michael@0 | 1209 | s->mAudioOutputStreams[j].mStream->Pause(); |
michael@0 | 1210 | } |
michael@0 | 1211 | } |
michael@0 | 1212 | } |
michael@0 | 1213 | |
michael@0 | 1214 | void |
michael@0 | 1215 | MediaStreamGraphImpl::ResumeAllAudioOutputs() |
michael@0 | 1216 | { |
michael@0 | 1217 | for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
michael@0 | 1218 | MediaStream* s = mStreams[i]; |
michael@0 | 1219 | for (uint32_t j = 0; j < s->mAudioOutputStreams.Length(); ++j) { |
michael@0 | 1220 | s->mAudioOutputStreams[j].mStream->Resume(); |
michael@0 | 1221 | } |
michael@0 | 1222 | } |
michael@0 | 1223 | } |
michael@0 | 1224 | |
michael@0 | 1225 | struct AutoProfilerUnregisterThread |
michael@0 | 1226 | { |
michael@0 | 1227 | // The empty ctor is used to silence a pre-4.8.0 GCC unused variable warning. |
michael@0 | 1228 | AutoProfilerUnregisterThread() |
michael@0 | 1229 | { |
michael@0 | 1230 | } |
michael@0 | 1231 | |
michael@0 | 1232 | ~AutoProfilerUnregisterThread() |
michael@0 | 1233 | { |
michael@0 | 1234 | profiler_unregister_thread(); |
michael@0 | 1235 | } |
michael@0 | 1236 | }; |
michael@0 | 1237 | |
michael@0 | 1238 | void |
michael@0 | 1239 | MediaStreamGraphImpl::RunThread() |
michael@0 | 1240 | { |
michael@0 | 1241 | nsTArray<MessageBlock> messageQueue; |
michael@0 | 1242 | { |
michael@0 | 1243 | MonitorAutoLock lock(mMonitor); |
michael@0 | 1244 | messageQueue.SwapElements(mMessageQueue); |
michael@0 | 1245 | } |
michael@0 | 1246 | NS_ASSERTION(!messageQueue.IsEmpty(), |
michael@0 | 1247 | "Shouldn't have started a graph with empty message queue!"); |
michael@0 | 1248 | |
michael@0 | 1249 | uint32_t ticksProcessed = 0; |
michael@0 | 1250 | AutoProfilerUnregisterThread autoUnregister; |
michael@0 | 1251 | |
michael@0 | 1252 | for (;;) { |
michael@0 | 1253 | // Check if a memory report has been requested. |
michael@0 | 1254 | { |
michael@0 | 1255 | MonitorAutoLock lock(mMemoryReportMonitor); |
michael@0 | 1256 | if (mNeedsMemoryReport) { |
michael@0 | 1257 | mNeedsMemoryReport = false; |
michael@0 | 1258 | |
michael@0 | 1259 | for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
michael@0 | 1260 | AudioNodeStream* stream = mStreams[i]->AsAudioNodeStream(); |
michael@0 | 1261 | if (stream) { |
michael@0 | 1262 | AudioNodeSizes usage; |
michael@0 | 1263 | stream->SizeOfAudioNodesIncludingThis(MallocSizeOf, usage); |
michael@0 | 1264 | mAudioStreamSizes.AppendElement(usage); |
michael@0 | 1265 | } |
michael@0 | 1266 | } |
michael@0 | 1267 | |
michael@0 | 1268 | lock.Notify(); |
michael@0 | 1269 | } |
michael@0 | 1270 | } |
michael@0 | 1271 | |
michael@0 | 1272 | // Update mCurrentTime to the min of the playing audio times, or using the |
michael@0 | 1273 | // wall-clock time change if no audio is playing. |
michael@0 | 1274 | UpdateCurrentTime(); |
michael@0 | 1275 | |
michael@0 | 1276 | // Calculate independent action times for each batch of messages (each |
michael@0 | 1277 | // batch corresponding to an event loop task). This isolates the performance |
michael@0 | 1278 | // of different scripts to some extent. |
michael@0 | 1279 | for (uint32_t i = 0; i < messageQueue.Length(); ++i) { |
michael@0 | 1280 | mProcessingGraphUpdateIndex = messageQueue[i].mGraphUpdateIndex; |
michael@0 | 1281 | nsTArray<nsAutoPtr<ControlMessage> >& messages = messageQueue[i].mMessages; |
michael@0 | 1282 | |
michael@0 | 1283 | for (uint32_t j = 0; j < messages.Length(); ++j) { |
michael@0 | 1284 | messages[j]->Run(); |
michael@0 | 1285 | } |
michael@0 | 1286 | } |
michael@0 | 1287 | messageQueue.Clear(); |
michael@0 | 1288 | |
michael@0 | 1289 | if (mStreamOrderDirty) { |
michael@0 | 1290 | UpdateStreamOrder(); |
michael@0 | 1291 | } |
michael@0 | 1292 | |
michael@0 | 1293 | GraphTime endBlockingDecisions = |
michael@0 | 1294 | RoundUpToNextAudioBlock(mSampleRate, mCurrentTime + MillisecondsToMediaTime(AUDIO_TARGET_MS)); |
michael@0 | 1295 | bool ensureNextIteration = false; |
michael@0 | 1296 | |
michael@0 | 1297 | // Grab pending stream input. |
michael@0 | 1298 | for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
michael@0 | 1299 | SourceMediaStream* is = mStreams[i]->AsSourceStream(); |
michael@0 | 1300 | if (is) { |
michael@0 | 1301 | UpdateConsumptionState(is); |
michael@0 | 1302 | ExtractPendingInput(is, endBlockingDecisions, &ensureNextIteration); |
michael@0 | 1303 | } |
michael@0 | 1304 | } |
michael@0 | 1305 | |
michael@0 | 1306 | // The loop is woken up so soon that mCurrentTime barely advances and we |
michael@0 | 1307 | // end up having endBlockingDecisions == mStateComputedTime. |
michael@0 | 1308 | // Since stream blocking is computed in the interval of |
michael@0 | 1309 | // [mStateComputedTime, endBlockingDecisions), it won't be computed at all. |
michael@0 | 1310 | // We should ensure next iteration so that pending blocking changes will be |
michael@0 | 1311 | // computed in next loop. |
michael@0 | 1312 | if (endBlockingDecisions == mStateComputedTime) { |
michael@0 | 1313 | ensureNextIteration = true; |
michael@0 | 1314 | } |
michael@0 | 1315 | |
michael@0 | 1316 | // Figure out which streams are blocked and when. |
michael@0 | 1317 | GraphTime prevComputedTime = mStateComputedTime; |
michael@0 | 1318 | RecomputeBlocking(endBlockingDecisions); |
michael@0 | 1319 | |
michael@0 | 1320 | // Play stream contents. |
michael@0 | 1321 | bool allBlockedForever = true; |
michael@0 | 1322 | // True when we've done ProcessInput for all processed streams. |
michael@0 | 1323 | bool doneAllProducing = false; |
michael@0 | 1324 | // This is the number of frame that are written to the AudioStreams, for |
michael@0 | 1325 | // this cycle. |
michael@0 | 1326 | TrackTicks ticksPlayed = 0; |
michael@0 | 1327 | // Figure out what each stream wants to do |
michael@0 | 1328 | for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
michael@0 | 1329 | MediaStream* stream = mStreams[i]; |
michael@0 | 1330 | if (!doneAllProducing) { |
michael@0 | 1331 | ProcessedMediaStream* ps = stream->AsProcessedStream(); |
michael@0 | 1332 | if (ps) { |
michael@0 | 1333 | AudioNodeStream* n = stream->AsAudioNodeStream(); |
michael@0 | 1334 | if (n) { |
michael@0 | 1335 | #ifdef DEBUG |
michael@0 | 1336 | // Verify that the sampling rate for all of the following streams is the same |
michael@0 | 1337 | for (uint32_t j = i + 1; j < mStreams.Length(); ++j) { |
michael@0 | 1338 | AudioNodeStream* nextStream = mStreams[j]->AsAudioNodeStream(); |
michael@0 | 1339 | if (nextStream) { |
michael@0 | 1340 | MOZ_ASSERT(n->SampleRate() == nextStream->SampleRate(), |
michael@0 | 1341 | "All AudioNodeStreams in the graph must have the same sampling rate"); |
michael@0 | 1342 | } |
michael@0 | 1343 | } |
michael@0 | 1344 | #endif |
michael@0 | 1345 | // Since an AudioNodeStream is present, go ahead and |
michael@0 | 1346 | // produce audio block by block for all the rest of the streams. |
michael@0 | 1347 | ProduceDataForStreamsBlockByBlock(i, n->SampleRate(), prevComputedTime, mStateComputedTime); |
michael@0 | 1348 | ticksProcessed += TimeToTicksRoundDown(n->SampleRate(), mStateComputedTime - prevComputedTime); |
michael@0 | 1349 | doneAllProducing = true; |
michael@0 | 1350 | } else { |
michael@0 | 1351 | ps->ProcessInput(prevComputedTime, mStateComputedTime, |
michael@0 | 1352 | ProcessedMediaStream::ALLOW_FINISH); |
michael@0 | 1353 | NS_WARN_IF_FALSE(stream->mBuffer.GetEnd() >= |
michael@0 | 1354 | GraphTimeToStreamTime(stream, mStateComputedTime), |
michael@0 | 1355 | "Stream did not produce enough data"); |
michael@0 | 1356 | } |
michael@0 | 1357 | } |
michael@0 | 1358 | } |
michael@0 | 1359 | NotifyHasCurrentData(stream); |
michael@0 | 1360 | if (mRealtime) { |
michael@0 | 1361 | // Only playback audio and video in real-time mode |
michael@0 | 1362 | CreateOrDestroyAudioStreams(prevComputedTime, stream); |
michael@0 | 1363 | TrackTicks ticksPlayedForThisStream = PlayAudio(stream, prevComputedTime, mStateComputedTime); |
michael@0 | 1364 | if (!ticksPlayed) { |
michael@0 | 1365 | ticksPlayed = ticksPlayedForThisStream; |
michael@0 | 1366 | } else { |
michael@0 | 1367 | MOZ_ASSERT(!ticksPlayedForThisStream || ticksPlayedForThisStream == ticksPlayed, |
michael@0 | 1368 | "Each stream should have the same number of frame."); |
michael@0 | 1369 | } |
michael@0 | 1370 | PlayVideo(stream); |
michael@0 | 1371 | } |
michael@0 | 1372 | SourceMediaStream* is = stream->AsSourceStream(); |
michael@0 | 1373 | if (is) { |
michael@0 | 1374 | UpdateBufferSufficiencyState(is); |
michael@0 | 1375 | } |
michael@0 | 1376 | GraphTime end; |
michael@0 | 1377 | if (!stream->mBlocked.GetAt(mCurrentTime, &end) || end < GRAPH_TIME_MAX) { |
michael@0 | 1378 | allBlockedForever = false; |
michael@0 | 1379 | } |
michael@0 | 1380 | } |
michael@0 | 1381 | |
michael@0 | 1382 | if (mMixer) { |
michael@0 | 1383 | mMixer->FinishMixing(); |
michael@0 | 1384 | } |
michael@0 | 1385 | |
michael@0 | 1386 | if (ensureNextIteration || !allBlockedForever) { |
michael@0 | 1387 | EnsureNextIteration(); |
michael@0 | 1388 | } |
michael@0 | 1389 | |
michael@0 | 1390 | // Send updates to the main thread and wait for the next control loop |
michael@0 | 1391 | // iteration. |
michael@0 | 1392 | { |
michael@0 | 1393 | MonitorAutoLock lock(mMonitor); |
michael@0 | 1394 | bool finalUpdate = mForceShutDown || |
michael@0 | 1395 | (mCurrentTime >= mEndTime && AllFinishedStreamsNotified()) || |
michael@0 | 1396 | (IsEmpty() && mMessageQueue.IsEmpty()); |
michael@0 | 1397 | PrepareUpdatesToMainThreadState(finalUpdate); |
michael@0 | 1398 | if (finalUpdate) { |
michael@0 | 1399 | // Enter shutdown mode. The stable-state handler will detect this |
michael@0 | 1400 | // and complete shutdown. Destroy any streams immediately. |
michael@0 | 1401 | STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p waiting for main thread cleanup", this)); |
michael@0 | 1402 | // We'll shut down this graph object if it does not get restarted. |
michael@0 | 1403 | mLifecycleState = LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP; |
michael@0 | 1404 | // No need to Destroy streams here. The main-thread owner of each |
michael@0 | 1405 | // stream is responsible for calling Destroy on them. |
michael@0 | 1406 | return; |
michael@0 | 1407 | } |
michael@0 | 1408 | |
michael@0 | 1409 | // No need to wait in non-realtime mode, just churn through the input as soon |
michael@0 | 1410 | // as possible. |
michael@0 | 1411 | if (mRealtime) { |
michael@0 | 1412 | PRIntervalTime timeout = PR_INTERVAL_NO_TIMEOUT; |
michael@0 | 1413 | TimeStamp now = TimeStamp::Now(); |
michael@0 | 1414 | bool pausedOutputs = false; |
michael@0 | 1415 | if (mNeedAnotherIteration) { |
michael@0 | 1416 | int64_t timeoutMS = MEDIA_GRAPH_TARGET_PERIOD_MS - |
michael@0 | 1417 | int64_t((now - mCurrentTimeStamp).ToMilliseconds()); |
michael@0 | 1418 | // Make sure timeoutMS doesn't overflow 32 bits by waking up at |
michael@0 | 1419 | // least once a minute, if we need to wake up at all |
michael@0 | 1420 | timeoutMS = std::max<int64_t>(0, std::min<int64_t>(timeoutMS, 60*1000)); |
michael@0 | 1421 | timeout = PR_MillisecondsToInterval(uint32_t(timeoutMS)); |
michael@0 | 1422 | STREAM_LOG(PR_LOG_DEBUG+1, ("Waiting for next iteration; at %f, timeout=%f", |
michael@0 | 1423 | (now - mInitialTimeStamp).ToSeconds(), timeoutMS/1000.0)); |
michael@0 | 1424 | mWaitState = WAITSTATE_WAITING_FOR_NEXT_ITERATION; |
michael@0 | 1425 | } else { |
michael@0 | 1426 | mWaitState = WAITSTATE_WAITING_INDEFINITELY; |
michael@0 | 1427 | PauseAllAudioOutputs(); |
michael@0 | 1428 | pausedOutputs = true; |
michael@0 | 1429 | } |
michael@0 | 1430 | if (timeout > 0) { |
michael@0 | 1431 | mMonitor.Wait(timeout); |
michael@0 | 1432 | STREAM_LOG(PR_LOG_DEBUG+1, ("Resuming after timeout; at %f, elapsed=%f", |
michael@0 | 1433 | (TimeStamp::Now() - mInitialTimeStamp).ToSeconds(), |
michael@0 | 1434 | (TimeStamp::Now() - now).ToSeconds())); |
michael@0 | 1435 | } |
michael@0 | 1436 | if (pausedOutputs) { |
michael@0 | 1437 | ResumeAllAudioOutputs(); |
michael@0 | 1438 | } |
michael@0 | 1439 | } |
michael@0 | 1440 | mWaitState = WAITSTATE_RUNNING; |
michael@0 | 1441 | mNeedAnotherIteration = false; |
michael@0 | 1442 | messageQueue.SwapElements(mMessageQueue); |
michael@0 | 1443 | } |
michael@0 | 1444 | } |
michael@0 | 1445 | } |
michael@0 | 1446 | |
michael@0 | 1447 | void |
michael@0 | 1448 | MediaStreamGraphImpl::ApplyStreamUpdate(StreamUpdate* aUpdate) |
michael@0 | 1449 | { |
michael@0 | 1450 | mMonitor.AssertCurrentThreadOwns(); |
michael@0 | 1451 | |
michael@0 | 1452 | MediaStream* stream = aUpdate->mStream; |
michael@0 | 1453 | if (!stream) |
michael@0 | 1454 | return; |
michael@0 | 1455 | stream->mMainThreadCurrentTime = aUpdate->mNextMainThreadCurrentTime; |
michael@0 | 1456 | stream->mMainThreadFinished = aUpdate->mNextMainThreadFinished; |
michael@0 | 1457 | |
michael@0 | 1458 | if (stream->mWrapper) { |
michael@0 | 1459 | stream->mWrapper->NotifyStreamStateChanged(); |
michael@0 | 1460 | } |
michael@0 | 1461 | for (int32_t i = stream->mMainThreadListeners.Length() - 1; i >= 0; --i) { |
michael@0 | 1462 | stream->mMainThreadListeners[i]->NotifyMainThreadStateChanged(); |
michael@0 | 1463 | } |
michael@0 | 1464 | } |
michael@0 | 1465 | |
michael@0 | 1466 | void |
michael@0 | 1467 | MediaStreamGraphImpl::ShutdownThreads() |
michael@0 | 1468 | { |
michael@0 | 1469 | NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread"); |
michael@0 | 1470 | // mGraph's thread is not running so it's OK to do whatever here |
michael@0 | 1471 | STREAM_LOG(PR_LOG_DEBUG, ("Stopping threads for MediaStreamGraph %p", this)); |
michael@0 | 1472 | |
michael@0 | 1473 | if (mThread) { |
michael@0 | 1474 | mThread->Shutdown(); |
michael@0 | 1475 | mThread = nullptr; |
michael@0 | 1476 | } |
michael@0 | 1477 | } |
michael@0 | 1478 | |
michael@0 | 1479 | void |
michael@0 | 1480 | MediaStreamGraphImpl::ForceShutDown() |
michael@0 | 1481 | { |
michael@0 | 1482 | NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread"); |
michael@0 | 1483 | STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p ForceShutdown", this)); |
michael@0 | 1484 | { |
michael@0 | 1485 | MonitorAutoLock lock(mMonitor); |
michael@0 | 1486 | mForceShutDown = true; |
michael@0 | 1487 | EnsureImmediateWakeUpLocked(lock); |
michael@0 | 1488 | } |
michael@0 | 1489 | } |
michael@0 | 1490 | |
michael@0 | 1491 | namespace { |
michael@0 | 1492 | |
michael@0 | 1493 | class MediaStreamGraphInitThreadRunnable : public nsRunnable { |
michael@0 | 1494 | public: |
michael@0 | 1495 | explicit MediaStreamGraphInitThreadRunnable(MediaStreamGraphImpl* aGraph) |
michael@0 | 1496 | : mGraph(aGraph) |
michael@0 | 1497 | { |
michael@0 | 1498 | } |
michael@0 | 1499 | NS_IMETHOD Run() |
michael@0 | 1500 | { |
michael@0 | 1501 | char aLocal; |
michael@0 | 1502 | profiler_register_thread("MediaStreamGraph", &aLocal); |
michael@0 | 1503 | mGraph->RunThread(); |
michael@0 | 1504 | return NS_OK; |
michael@0 | 1505 | } |
michael@0 | 1506 | private: |
michael@0 | 1507 | MediaStreamGraphImpl* mGraph; |
michael@0 | 1508 | }; |
michael@0 | 1509 | |
michael@0 | 1510 | class MediaStreamGraphThreadRunnable : public nsRunnable { |
michael@0 | 1511 | public: |
michael@0 | 1512 | explicit MediaStreamGraphThreadRunnable(MediaStreamGraphImpl* aGraph) |
michael@0 | 1513 | : mGraph(aGraph) |
michael@0 | 1514 | { |
michael@0 | 1515 | } |
michael@0 | 1516 | NS_IMETHOD Run() |
michael@0 | 1517 | { |
michael@0 | 1518 | mGraph->RunThread(); |
michael@0 | 1519 | return NS_OK; |
michael@0 | 1520 | } |
michael@0 | 1521 | private: |
michael@0 | 1522 | MediaStreamGraphImpl* mGraph; |
michael@0 | 1523 | }; |
michael@0 | 1524 | |
michael@0 | 1525 | class MediaStreamGraphShutDownRunnable : public nsRunnable { |
michael@0 | 1526 | public: |
michael@0 | 1527 | MediaStreamGraphShutDownRunnable(MediaStreamGraphImpl* aGraph) : mGraph(aGraph) {} |
michael@0 | 1528 | NS_IMETHOD Run() |
michael@0 | 1529 | { |
michael@0 | 1530 | NS_ASSERTION(mGraph->mDetectedNotRunning, |
michael@0 | 1531 | "We should know the graph thread control loop isn't running!"); |
michael@0 | 1532 | |
michael@0 | 1533 | mGraph->ShutdownThreads(); |
michael@0 | 1534 | |
michael@0 | 1535 | // mGraph's thread is not running so it's OK to do whatever here |
michael@0 | 1536 | if (mGraph->IsEmpty()) { |
michael@0 | 1537 | // mGraph is no longer needed, so delete it. |
michael@0 | 1538 | mGraph->Destroy(); |
michael@0 | 1539 | } else { |
michael@0 | 1540 | // The graph is not empty. We must be in a forced shutdown, or a |
michael@0 | 1541 | // non-realtime graph that has finished processing. Some later |
michael@0 | 1542 | // AppendMessage will detect that the manager has been emptied, and |
michael@0 | 1543 | // delete it. |
michael@0 | 1544 | NS_ASSERTION(mGraph->mForceShutDown || !mGraph->mRealtime, |
michael@0 | 1545 | "Not in forced shutdown?"); |
michael@0 | 1546 | for (uint32_t i = 0; i < mGraph->mStreams.Length(); ++i) { |
michael@0 | 1547 | DOMMediaStream* s = mGraph->mStreams[i]->GetWrapper(); |
michael@0 | 1548 | if (s) { |
michael@0 | 1549 | s->NotifyMediaStreamGraphShutdown(); |
michael@0 | 1550 | } |
michael@0 | 1551 | } |
michael@0 | 1552 | |
michael@0 | 1553 | mGraph->mLifecycleState = |
michael@0 | 1554 | MediaStreamGraphImpl::LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION; |
michael@0 | 1555 | } |
michael@0 | 1556 | return NS_OK; |
michael@0 | 1557 | } |
michael@0 | 1558 | private: |
michael@0 | 1559 | MediaStreamGraphImpl* mGraph; |
michael@0 | 1560 | }; |
michael@0 | 1561 | |
michael@0 | 1562 | class MediaStreamGraphStableStateRunnable : public nsRunnable { |
michael@0 | 1563 | public: |
michael@0 | 1564 | explicit MediaStreamGraphStableStateRunnable(MediaStreamGraphImpl* aGraph) |
michael@0 | 1565 | : mGraph(aGraph) |
michael@0 | 1566 | { |
michael@0 | 1567 | } |
michael@0 | 1568 | NS_IMETHOD Run() |
michael@0 | 1569 | { |
michael@0 | 1570 | if (mGraph) { |
michael@0 | 1571 | mGraph->RunInStableState(); |
michael@0 | 1572 | } |
michael@0 | 1573 | return NS_OK; |
michael@0 | 1574 | } |
michael@0 | 1575 | private: |
michael@0 | 1576 | MediaStreamGraphImpl* mGraph; |
michael@0 | 1577 | }; |
michael@0 | 1578 | |
michael@0 | 1579 | /* |
michael@0 | 1580 | * Control messages forwarded from main thread to graph manager thread |
michael@0 | 1581 | */ |
michael@0 | 1582 | class CreateMessage : public ControlMessage { |
michael@0 | 1583 | public: |
michael@0 | 1584 | CreateMessage(MediaStream* aStream) : ControlMessage(aStream) {} |
michael@0 | 1585 | virtual void Run() MOZ_OVERRIDE |
michael@0 | 1586 | { |
michael@0 | 1587 | mStream->GraphImpl()->AddStream(mStream); |
michael@0 | 1588 | mStream->Init(); |
michael@0 | 1589 | } |
michael@0 | 1590 | virtual void RunDuringShutdown() MOZ_OVERRIDE |
michael@0 | 1591 | { |
michael@0 | 1592 | // Make sure to run this message during shutdown too, to make sure |
michael@0 | 1593 | // that we balance the number of streams registered with the graph |
michael@0 | 1594 | // as they're destroyed during shutdown. |
michael@0 | 1595 | Run(); |
michael@0 | 1596 | } |
michael@0 | 1597 | }; |
michael@0 | 1598 | |
michael@0 | 1599 | class MediaStreamGraphShutdownObserver MOZ_FINAL : public nsIObserver |
michael@0 | 1600 | { |
michael@0 | 1601 | public: |
michael@0 | 1602 | NS_DECL_ISUPPORTS |
michael@0 | 1603 | NS_DECL_NSIOBSERVER |
michael@0 | 1604 | }; |
michael@0 | 1605 | |
michael@0 | 1606 | } |
michael@0 | 1607 | |
michael@0 | 1608 | void |
michael@0 | 1609 | MediaStreamGraphImpl::RunInStableState() |
michael@0 | 1610 | { |
michael@0 | 1611 | NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread"); |
michael@0 | 1612 | |
michael@0 | 1613 | nsTArray<nsCOMPtr<nsIRunnable> > runnables; |
michael@0 | 1614 | // When we're doing a forced shutdown, pending control messages may be |
michael@0 | 1615 | // run on the main thread via RunDuringShutdown. Those messages must |
michael@0 | 1616 | // run without the graph monitor being held. So, we collect them here. |
michael@0 | 1617 | nsTArray<nsAutoPtr<ControlMessage> > controlMessagesToRunDuringShutdown; |
michael@0 | 1618 | |
michael@0 | 1619 | { |
michael@0 | 1620 | MonitorAutoLock lock(mMonitor); |
michael@0 | 1621 | mPostedRunInStableStateEvent = false; |
michael@0 | 1622 | |
michael@0 | 1623 | runnables.SwapElements(mUpdateRunnables); |
michael@0 | 1624 | for (uint32_t i = 0; i < mStreamUpdates.Length(); ++i) { |
michael@0 | 1625 | StreamUpdate* update = &mStreamUpdates[i]; |
michael@0 | 1626 | if (update->mStream) { |
michael@0 | 1627 | ApplyStreamUpdate(update); |
michael@0 | 1628 | } |
michael@0 | 1629 | } |
michael@0 | 1630 | mStreamUpdates.Clear(); |
michael@0 | 1631 | |
michael@0 | 1632 | // Don't start the thread for a non-realtime graph until it has been |
michael@0 | 1633 | // explicitly started by StartNonRealtimeProcessing. |
michael@0 | 1634 | if (mLifecycleState == LIFECYCLE_THREAD_NOT_STARTED && |
michael@0 | 1635 | (mRealtime || mNonRealtimeProcessing)) { |
michael@0 | 1636 | mLifecycleState = LIFECYCLE_RUNNING; |
michael@0 | 1637 | // Start the thread now. We couldn't start it earlier because |
michael@0 | 1638 | // the graph might exit immediately on finding it has no streams. The |
michael@0 | 1639 | // first message for a new graph must create a stream. |
michael@0 | 1640 | nsCOMPtr<nsIRunnable> event = new MediaStreamGraphInitThreadRunnable(this); |
michael@0 | 1641 | NS_NewNamedThread("MediaStreamGrph", getter_AddRefs(mThread), event); |
michael@0 | 1642 | } |
michael@0 | 1643 | |
michael@0 | 1644 | if (mCurrentTaskMessageQueue.IsEmpty()) { |
michael@0 | 1645 | if (mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP && IsEmpty()) { |
michael@0 | 1646 | // Complete shutdown. First, ensure that this graph is no longer used. |
michael@0 | 1647 | // A new graph graph will be created if one is needed. |
michael@0 | 1648 | STREAM_LOG(PR_LOG_DEBUG, ("Disconnecting MediaStreamGraph %p", this)); |
michael@0 | 1649 | if (this == gGraph) { |
michael@0 | 1650 | // null out gGraph if that's the graph being shut down |
michael@0 | 1651 | gGraph = nullptr; |
michael@0 | 1652 | } |
michael@0 | 1653 | // Asynchronously clean up old graph. We don't want to do this |
michael@0 | 1654 | // synchronously because it spins the event loop waiting for threads |
michael@0 | 1655 | // to shut down, and we don't want to do that in a stable state handler. |
michael@0 | 1656 | mLifecycleState = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN; |
michael@0 | 1657 | nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this); |
michael@0 | 1658 | NS_DispatchToMainThread(event); |
michael@0 | 1659 | } |
michael@0 | 1660 | } else { |
michael@0 | 1661 | if (mLifecycleState <= LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) { |
michael@0 | 1662 | MessageBlock* block = mMessageQueue.AppendElement(); |
michael@0 | 1663 | block->mMessages.SwapElements(mCurrentTaskMessageQueue); |
michael@0 | 1664 | block->mGraphUpdateIndex = mNextGraphUpdateIndex; |
michael@0 | 1665 | ++mNextGraphUpdateIndex; |
michael@0 | 1666 | EnsureNextIterationLocked(lock); |
michael@0 | 1667 | } |
michael@0 | 1668 | |
michael@0 | 1669 | // If the MediaStreamGraph has more messages going to it, try to revive |
michael@0 | 1670 | // it to process those messages. Don't do this if we're in a forced |
michael@0 | 1671 | // shutdown or it's a non-realtime graph that has already terminated |
michael@0 | 1672 | // processing. |
michael@0 | 1673 | if (mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP && |
michael@0 | 1674 | mRealtime && !mForceShutDown) { |
michael@0 | 1675 | mLifecycleState = LIFECYCLE_RUNNING; |
michael@0 | 1676 | // Revive the MediaStreamGraph since we have more messages going to it. |
michael@0 | 1677 | // Note that we need to put messages into its queue before reviving it, |
michael@0 | 1678 | // or it might exit immediately. |
michael@0 | 1679 | nsCOMPtr<nsIRunnable> event = new MediaStreamGraphThreadRunnable(this); |
michael@0 | 1680 | mThread->Dispatch(event, 0); |
michael@0 | 1681 | } |
michael@0 | 1682 | } |
michael@0 | 1683 | |
michael@0 | 1684 | if ((mForceShutDown || !mRealtime) && |
michael@0 | 1685 | mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) { |
michael@0 | 1686 | // Defer calls to RunDuringShutdown() to happen while mMonitor is not held. |
michael@0 | 1687 | for (uint32_t i = 0; i < mMessageQueue.Length(); ++i) { |
michael@0 | 1688 | MessageBlock& mb = mMessageQueue[i]; |
michael@0 | 1689 | controlMessagesToRunDuringShutdown.MoveElementsFrom(mb.mMessages); |
michael@0 | 1690 | } |
michael@0 | 1691 | mMessageQueue.Clear(); |
michael@0 | 1692 | MOZ_ASSERT(mCurrentTaskMessageQueue.IsEmpty()); |
michael@0 | 1693 | // Stop MediaStreamGraph threads. Do not clear gGraph since |
michael@0 | 1694 | // we have outstanding DOM objects that may need it. |
michael@0 | 1695 | mLifecycleState = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN; |
michael@0 | 1696 | nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this); |
michael@0 | 1697 | NS_DispatchToMainThread(event); |
michael@0 | 1698 | } |
michael@0 | 1699 | |
michael@0 | 1700 | mDetectedNotRunning = mLifecycleState > LIFECYCLE_RUNNING; |
michael@0 | 1701 | } |
michael@0 | 1702 | |
michael@0 | 1703 | // Make sure we get a new current time in the next event loop task |
michael@0 | 1704 | mPostedRunInStableState = false; |
michael@0 | 1705 | |
michael@0 | 1706 | for (uint32_t i = 0; i < runnables.Length(); ++i) { |
michael@0 | 1707 | runnables[i]->Run(); |
michael@0 | 1708 | } |
michael@0 | 1709 | for (uint32_t i = 0; i < controlMessagesToRunDuringShutdown.Length(); ++i) { |
michael@0 | 1710 | controlMessagesToRunDuringShutdown[i]->RunDuringShutdown(); |
michael@0 | 1711 | } |
michael@0 | 1712 | |
michael@0 | 1713 | #ifdef DEBUG |
michael@0 | 1714 | mCanRunMessagesSynchronously = mDetectedNotRunning && |
michael@0 | 1715 | mLifecycleState >= LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN; |
michael@0 | 1716 | #endif |
michael@0 | 1717 | } |
michael@0 | 1718 | |
michael@0 | 1719 | static NS_DEFINE_CID(kAppShellCID, NS_APPSHELL_CID); |
michael@0 | 1720 | |
michael@0 | 1721 | void |
michael@0 | 1722 | MediaStreamGraphImpl::EnsureRunInStableState() |
michael@0 | 1723 | { |
michael@0 | 1724 | NS_ASSERTION(NS_IsMainThread(), "main thread only"); |
michael@0 | 1725 | |
michael@0 | 1726 | if (mPostedRunInStableState) |
michael@0 | 1727 | return; |
michael@0 | 1728 | mPostedRunInStableState = true; |
michael@0 | 1729 | nsCOMPtr<nsIRunnable> event = new MediaStreamGraphStableStateRunnable(this); |
michael@0 | 1730 | nsCOMPtr<nsIAppShell> appShell = do_GetService(kAppShellCID); |
michael@0 | 1731 | if (appShell) { |
michael@0 | 1732 | appShell->RunInStableState(event); |
michael@0 | 1733 | } else { |
michael@0 | 1734 | NS_ERROR("Appshell already destroyed?"); |
michael@0 | 1735 | } |
michael@0 | 1736 | } |
michael@0 | 1737 | |
michael@0 | 1738 | void |
michael@0 | 1739 | MediaStreamGraphImpl::EnsureStableStateEventPosted() |
michael@0 | 1740 | { |
michael@0 | 1741 | mMonitor.AssertCurrentThreadOwns(); |
michael@0 | 1742 | |
michael@0 | 1743 | if (mPostedRunInStableStateEvent) |
michael@0 | 1744 | return; |
michael@0 | 1745 | mPostedRunInStableStateEvent = true; |
michael@0 | 1746 | nsCOMPtr<nsIRunnable> event = new MediaStreamGraphStableStateRunnable(this); |
michael@0 | 1747 | NS_DispatchToMainThread(event); |
michael@0 | 1748 | } |
michael@0 | 1749 | |
michael@0 | 1750 | void |
michael@0 | 1751 | MediaStreamGraphImpl::AppendMessage(ControlMessage* aMessage) |
michael@0 | 1752 | { |
michael@0 | 1753 | NS_ASSERTION(NS_IsMainThread(), "main thread only"); |
michael@0 | 1754 | NS_ASSERTION(!aMessage->GetStream() || |
michael@0 | 1755 | !aMessage->GetStream()->IsDestroyed(), |
michael@0 | 1756 | "Stream already destroyed"); |
michael@0 | 1757 | |
michael@0 | 1758 | if (mDetectedNotRunning && |
michael@0 | 1759 | mLifecycleState > LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) { |
michael@0 | 1760 | // The graph control loop is not running and main thread cleanup has |
michael@0 | 1761 | // happened. From now on we can't append messages to mCurrentTaskMessageQueue, |
michael@0 | 1762 | // because that will never be processed again, so just RunDuringShutdown |
michael@0 | 1763 | // this message. |
michael@0 | 1764 | // This should only happen during forced shutdown, or after a non-realtime |
michael@0 | 1765 | // graph has finished processing. |
michael@0 | 1766 | #ifdef DEBUG |
michael@0 | 1767 | MOZ_ASSERT(mCanRunMessagesSynchronously); |
michael@0 | 1768 | mCanRunMessagesSynchronously = false; |
michael@0 | 1769 | #endif |
michael@0 | 1770 | aMessage->RunDuringShutdown(); |
michael@0 | 1771 | #ifdef DEBUG |
michael@0 | 1772 | mCanRunMessagesSynchronously = true; |
michael@0 | 1773 | #endif |
michael@0 | 1774 | delete aMessage; |
michael@0 | 1775 | if (IsEmpty() && |
michael@0 | 1776 | mLifecycleState >= LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION) { |
michael@0 | 1777 | if (gGraph == this) { |
michael@0 | 1778 | gGraph = nullptr; |
michael@0 | 1779 | } |
michael@0 | 1780 | Destroy(); |
michael@0 | 1781 | } |
michael@0 | 1782 | return; |
michael@0 | 1783 | } |
michael@0 | 1784 | |
michael@0 | 1785 | mCurrentTaskMessageQueue.AppendElement(aMessage); |
michael@0 | 1786 | EnsureRunInStableState(); |
michael@0 | 1787 | } |
michael@0 | 1788 | |
michael@0 | 1789 | MediaStream::MediaStream(DOMMediaStream* aWrapper) |
michael@0 | 1790 | : mBufferStartTime(0) |
michael@0 | 1791 | , mExplicitBlockerCount(0) |
michael@0 | 1792 | , mBlocked(false) |
michael@0 | 1793 | , mGraphUpdateIndices(0) |
michael@0 | 1794 | , mFinished(false) |
michael@0 | 1795 | , mNotifiedFinished(false) |
michael@0 | 1796 | , mNotifiedBlocked(false) |
michael@0 | 1797 | , mHasCurrentData(false) |
michael@0 | 1798 | , mNotifiedHasCurrentData(false) |
michael@0 | 1799 | , mWrapper(aWrapper) |
michael@0 | 1800 | , mMainThreadCurrentTime(0) |
michael@0 | 1801 | , mMainThreadFinished(false) |
michael@0 | 1802 | , mMainThreadDestroyed(false) |
michael@0 | 1803 | , mGraph(nullptr) |
michael@0 | 1804 | , mAudioChannelType(dom::AudioChannel::Normal) |
michael@0 | 1805 | { |
michael@0 | 1806 | MOZ_COUNT_CTOR(MediaStream); |
michael@0 | 1807 | // aWrapper should not already be connected to a MediaStream! It needs |
michael@0 | 1808 | // to be hooked up to this stream, and since this stream is only just |
michael@0 | 1809 | // being created now, aWrapper must not be connected to anything. |
michael@0 | 1810 | NS_ASSERTION(!aWrapper || !aWrapper->GetStream(), |
michael@0 | 1811 | "Wrapper already has another media stream hooked up to it!"); |
michael@0 | 1812 | } |
michael@0 | 1813 | |
michael@0 | 1814 | size_t |
michael@0 | 1815 | MediaStream::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const |
michael@0 | 1816 | { |
michael@0 | 1817 | size_t amount = 0; |
michael@0 | 1818 | |
michael@0 | 1819 | // Not owned: |
michael@0 | 1820 | // - mGraph - Not reported here |
michael@0 | 1821 | // - mConsumers - elements |
michael@0 | 1822 | // Future: |
michael@0 | 1823 | // - mWrapper |
michael@0 | 1824 | // - mVideoOutputs - elements |
michael@0 | 1825 | // - mLastPlayedVideoFrame |
michael@0 | 1826 | // - mListeners - elements |
michael@0 | 1827 | // - mAudioOutputStreams - elements |
michael@0 | 1828 | |
michael@0 | 1829 | amount += mBuffer.SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 1830 | amount += mAudioOutputs.SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 1831 | amount += mVideoOutputs.SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 1832 | amount += mExplicitBlockerCount.SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 1833 | amount += mListeners.SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 1834 | amount += mMainThreadListeners.SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 1835 | amount += mDisabledTrackIDs.SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 1836 | amount += mBlocked.SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 1837 | amount += mGraphUpdateIndices.SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 1838 | amount += mConsumers.SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 1839 | amount += mAudioOutputStreams.SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 1840 | for (size_t i = 0; i < mAudioOutputStreams.Length(); i++) { |
michael@0 | 1841 | amount += mAudioOutputStreams[i].SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 1842 | } |
michael@0 | 1843 | |
michael@0 | 1844 | return amount; |
michael@0 | 1845 | } |
michael@0 | 1846 | |
michael@0 | 1847 | size_t |
michael@0 | 1848 | MediaStream::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const |
michael@0 | 1849 | { |
michael@0 | 1850 | return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 1851 | } |
michael@0 | 1852 | |
michael@0 | 1853 | void |
michael@0 | 1854 | MediaStream::Init() |
michael@0 | 1855 | { |
michael@0 | 1856 | MediaStreamGraphImpl* graph = GraphImpl(); |
michael@0 | 1857 | mBlocked.SetAtAndAfter(graph->mCurrentTime, true); |
michael@0 | 1858 | mExplicitBlockerCount.SetAtAndAfter(graph->mCurrentTime, true); |
michael@0 | 1859 | mExplicitBlockerCount.SetAtAndAfter(graph->mStateComputedTime, false); |
michael@0 | 1860 | } |
michael@0 | 1861 | |
michael@0 | 1862 | MediaStreamGraphImpl* |
michael@0 | 1863 | MediaStream::GraphImpl() |
michael@0 | 1864 | { |
michael@0 | 1865 | return mGraph; |
michael@0 | 1866 | } |
michael@0 | 1867 | |
michael@0 | 1868 | MediaStreamGraph* |
michael@0 | 1869 | MediaStream::Graph() |
michael@0 | 1870 | { |
michael@0 | 1871 | return mGraph; |
michael@0 | 1872 | } |
michael@0 | 1873 | |
michael@0 | 1874 | void |
michael@0 | 1875 | MediaStream::SetGraphImpl(MediaStreamGraphImpl* aGraph) |
michael@0 | 1876 | { |
michael@0 | 1877 | MOZ_ASSERT(!mGraph, "Should only be called once"); |
michael@0 | 1878 | mGraph = aGraph; |
michael@0 | 1879 | } |
michael@0 | 1880 | |
michael@0 | 1881 | void |
michael@0 | 1882 | MediaStream::SetGraphImpl(MediaStreamGraph* aGraph) |
michael@0 | 1883 | { |
michael@0 | 1884 | MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(aGraph); |
michael@0 | 1885 | SetGraphImpl(graph); |
michael@0 | 1886 | } |
michael@0 | 1887 | |
michael@0 | 1888 | StreamTime |
michael@0 | 1889 | MediaStream::GraphTimeToStreamTime(GraphTime aTime) |
michael@0 | 1890 | { |
michael@0 | 1891 | return GraphImpl()->GraphTimeToStreamTime(this, aTime); |
michael@0 | 1892 | } |
michael@0 | 1893 | |
michael@0 | 1894 | StreamTime |
michael@0 | 1895 | MediaStream::GraphTimeToStreamTimeOptimistic(GraphTime aTime) |
michael@0 | 1896 | { |
michael@0 | 1897 | return GraphImpl()->GraphTimeToStreamTimeOptimistic(this, aTime); |
michael@0 | 1898 | } |
michael@0 | 1899 | |
michael@0 | 1900 | GraphTime |
michael@0 | 1901 | MediaStream::StreamTimeToGraphTime(StreamTime aTime) |
michael@0 | 1902 | { |
michael@0 | 1903 | return GraphImpl()->StreamTimeToGraphTime(this, aTime, 0); |
michael@0 | 1904 | } |
michael@0 | 1905 | |
michael@0 | 1906 | void |
michael@0 | 1907 | MediaStream::FinishOnGraphThread() |
michael@0 | 1908 | { |
michael@0 | 1909 | GraphImpl()->FinishStream(this); |
michael@0 | 1910 | } |
michael@0 | 1911 | |
michael@0 | 1912 | int64_t |
michael@0 | 1913 | MediaStream::GetProcessingGraphUpdateIndex() |
michael@0 | 1914 | { |
michael@0 | 1915 | return GraphImpl()->GetProcessingGraphUpdateIndex(); |
michael@0 | 1916 | } |
michael@0 | 1917 | |
michael@0 | 1918 | StreamBuffer::Track* |
michael@0 | 1919 | MediaStream::EnsureTrack(TrackID aTrackId, TrackRate aSampleRate) |
michael@0 | 1920 | { |
michael@0 | 1921 | StreamBuffer::Track* track = mBuffer.FindTrack(aTrackId); |
michael@0 | 1922 | if (!track) { |
michael@0 | 1923 | nsAutoPtr<MediaSegment> segment(new AudioSegment()); |
michael@0 | 1924 | for (uint32_t j = 0; j < mListeners.Length(); ++j) { |
michael@0 | 1925 | MediaStreamListener* l = mListeners[j]; |
michael@0 | 1926 | l->NotifyQueuedTrackChanges(Graph(), aTrackId, |
michael@0 | 1927 | GraphImpl()->AudioSampleRate(), 0, |
michael@0 | 1928 | MediaStreamListener::TRACK_EVENT_CREATED, |
michael@0 | 1929 | *segment); |
michael@0 | 1930 | } |
michael@0 | 1931 | track = &mBuffer.AddTrack(aTrackId, aSampleRate, 0, segment.forget()); |
michael@0 | 1932 | } |
michael@0 | 1933 | return track; |
michael@0 | 1934 | } |
michael@0 | 1935 | |
michael@0 | 1936 | void |
michael@0 | 1937 | MediaStream::RemoveAllListenersImpl() |
michael@0 | 1938 | { |
michael@0 | 1939 | for (int32_t i = mListeners.Length() - 1; i >= 0; --i) { |
michael@0 | 1940 | nsRefPtr<MediaStreamListener> listener = mListeners[i].forget(); |
michael@0 | 1941 | listener->NotifyRemoved(GraphImpl()); |
michael@0 | 1942 | } |
michael@0 | 1943 | mListeners.Clear(); |
michael@0 | 1944 | } |
michael@0 | 1945 | |
michael@0 | 1946 | void |
michael@0 | 1947 | MediaStream::DestroyImpl() |
michael@0 | 1948 | { |
michael@0 | 1949 | for (int32_t i = mConsumers.Length() - 1; i >= 0; --i) { |
michael@0 | 1950 | mConsumers[i]->Disconnect(); |
michael@0 | 1951 | } |
michael@0 | 1952 | for (uint32_t i = 0; i < mAudioOutputStreams.Length(); ++i) { |
michael@0 | 1953 | mAudioOutputStreams[i].mStream->Shutdown(); |
michael@0 | 1954 | } |
michael@0 | 1955 | mAudioOutputStreams.Clear(); |
michael@0 | 1956 | mGraph = nullptr; |
michael@0 | 1957 | } |
michael@0 | 1958 | |
michael@0 | 1959 | void |
michael@0 | 1960 | MediaStream::Destroy() |
michael@0 | 1961 | { |
michael@0 | 1962 | // Keep this stream alive until we leave this method |
michael@0 | 1963 | nsRefPtr<MediaStream> kungFuDeathGrip = this; |
michael@0 | 1964 | |
michael@0 | 1965 | class Message : public ControlMessage { |
michael@0 | 1966 | public: |
michael@0 | 1967 | Message(MediaStream* aStream) : ControlMessage(aStream) {} |
michael@0 | 1968 | virtual void Run() |
michael@0 | 1969 | { |
michael@0 | 1970 | mStream->RemoveAllListenersImpl(); |
michael@0 | 1971 | auto graph = mStream->GraphImpl(); |
michael@0 | 1972 | mStream->DestroyImpl(); |
michael@0 | 1973 | graph->RemoveStream(mStream); |
michael@0 | 1974 | } |
michael@0 | 1975 | virtual void RunDuringShutdown() |
michael@0 | 1976 | { Run(); } |
michael@0 | 1977 | }; |
michael@0 | 1978 | mWrapper = nullptr; |
michael@0 | 1979 | GraphImpl()->AppendMessage(new Message(this)); |
michael@0 | 1980 | // Message::RunDuringShutdown may have removed this stream from the graph, |
michael@0 | 1981 | // but our kungFuDeathGrip above will have kept this stream alive if |
michael@0 | 1982 | // necessary. |
michael@0 | 1983 | mMainThreadDestroyed = true; |
michael@0 | 1984 | } |
michael@0 | 1985 | |
michael@0 | 1986 | void |
michael@0 | 1987 | MediaStream::AddAudioOutput(void* aKey) |
michael@0 | 1988 | { |
michael@0 | 1989 | class Message : public ControlMessage { |
michael@0 | 1990 | public: |
michael@0 | 1991 | Message(MediaStream* aStream, void* aKey) : ControlMessage(aStream), mKey(aKey) {} |
michael@0 | 1992 | virtual void Run() |
michael@0 | 1993 | { |
michael@0 | 1994 | mStream->AddAudioOutputImpl(mKey); |
michael@0 | 1995 | } |
michael@0 | 1996 | void* mKey; |
michael@0 | 1997 | }; |
michael@0 | 1998 | GraphImpl()->AppendMessage(new Message(this, aKey)); |
michael@0 | 1999 | } |
michael@0 | 2000 | |
michael@0 | 2001 | void |
michael@0 | 2002 | MediaStream::SetAudioOutputVolumeImpl(void* aKey, float aVolume) |
michael@0 | 2003 | { |
michael@0 | 2004 | for (uint32_t i = 0; i < mAudioOutputs.Length(); ++i) { |
michael@0 | 2005 | if (mAudioOutputs[i].mKey == aKey) { |
michael@0 | 2006 | mAudioOutputs[i].mVolume = aVolume; |
michael@0 | 2007 | return; |
michael@0 | 2008 | } |
michael@0 | 2009 | } |
michael@0 | 2010 | NS_ERROR("Audio output key not found"); |
michael@0 | 2011 | } |
michael@0 | 2012 | |
michael@0 | 2013 | void |
michael@0 | 2014 | MediaStream::SetAudioOutputVolume(void* aKey, float aVolume) |
michael@0 | 2015 | { |
michael@0 | 2016 | class Message : public ControlMessage { |
michael@0 | 2017 | public: |
michael@0 | 2018 | Message(MediaStream* aStream, void* aKey, float aVolume) : |
michael@0 | 2019 | ControlMessage(aStream), mKey(aKey), mVolume(aVolume) {} |
michael@0 | 2020 | virtual void Run() |
michael@0 | 2021 | { |
michael@0 | 2022 | mStream->SetAudioOutputVolumeImpl(mKey, mVolume); |
michael@0 | 2023 | } |
michael@0 | 2024 | void* mKey; |
michael@0 | 2025 | float mVolume; |
michael@0 | 2026 | }; |
michael@0 | 2027 | GraphImpl()->AppendMessage(new Message(this, aKey, aVolume)); |
michael@0 | 2028 | } |
michael@0 | 2029 | |
michael@0 | 2030 | void |
michael@0 | 2031 | MediaStream::RemoveAudioOutputImpl(void* aKey) |
michael@0 | 2032 | { |
michael@0 | 2033 | for (uint32_t i = 0; i < mAudioOutputs.Length(); ++i) { |
michael@0 | 2034 | if (mAudioOutputs[i].mKey == aKey) { |
michael@0 | 2035 | mAudioOutputs.RemoveElementAt(i); |
michael@0 | 2036 | return; |
michael@0 | 2037 | } |
michael@0 | 2038 | } |
michael@0 | 2039 | NS_ERROR("Audio output key not found"); |
michael@0 | 2040 | } |
michael@0 | 2041 | |
michael@0 | 2042 | void |
michael@0 | 2043 | MediaStream::RemoveAudioOutput(void* aKey) |
michael@0 | 2044 | { |
michael@0 | 2045 | class Message : public ControlMessage { |
michael@0 | 2046 | public: |
michael@0 | 2047 | Message(MediaStream* aStream, void* aKey) : |
michael@0 | 2048 | ControlMessage(aStream), mKey(aKey) {} |
michael@0 | 2049 | virtual void Run() |
michael@0 | 2050 | { |
michael@0 | 2051 | mStream->RemoveAudioOutputImpl(mKey); |
michael@0 | 2052 | } |
michael@0 | 2053 | void* mKey; |
michael@0 | 2054 | }; |
michael@0 | 2055 | GraphImpl()->AppendMessage(new Message(this, aKey)); |
michael@0 | 2056 | } |
michael@0 | 2057 | |
michael@0 | 2058 | void |
michael@0 | 2059 | MediaStream::AddVideoOutput(VideoFrameContainer* aContainer) |
michael@0 | 2060 | { |
michael@0 | 2061 | class Message : public ControlMessage { |
michael@0 | 2062 | public: |
michael@0 | 2063 | Message(MediaStream* aStream, VideoFrameContainer* aContainer) : |
michael@0 | 2064 | ControlMessage(aStream), mContainer(aContainer) {} |
michael@0 | 2065 | virtual void Run() |
michael@0 | 2066 | { |
michael@0 | 2067 | mStream->AddVideoOutputImpl(mContainer.forget()); |
michael@0 | 2068 | } |
michael@0 | 2069 | nsRefPtr<VideoFrameContainer> mContainer; |
michael@0 | 2070 | }; |
michael@0 | 2071 | GraphImpl()->AppendMessage(new Message(this, aContainer)); |
michael@0 | 2072 | } |
michael@0 | 2073 | |
michael@0 | 2074 | void |
michael@0 | 2075 | MediaStream::RemoveVideoOutput(VideoFrameContainer* aContainer) |
michael@0 | 2076 | { |
michael@0 | 2077 | class Message : public ControlMessage { |
michael@0 | 2078 | public: |
michael@0 | 2079 | Message(MediaStream* aStream, VideoFrameContainer* aContainer) : |
michael@0 | 2080 | ControlMessage(aStream), mContainer(aContainer) {} |
michael@0 | 2081 | virtual void Run() |
michael@0 | 2082 | { |
michael@0 | 2083 | mStream->RemoveVideoOutputImpl(mContainer); |
michael@0 | 2084 | } |
michael@0 | 2085 | nsRefPtr<VideoFrameContainer> mContainer; |
michael@0 | 2086 | }; |
michael@0 | 2087 | GraphImpl()->AppendMessage(new Message(this, aContainer)); |
michael@0 | 2088 | } |
michael@0 | 2089 | |
michael@0 | 2090 | void |
michael@0 | 2091 | MediaStream::ChangeExplicitBlockerCount(int32_t aDelta) |
michael@0 | 2092 | { |
michael@0 | 2093 | class Message : public ControlMessage { |
michael@0 | 2094 | public: |
michael@0 | 2095 | Message(MediaStream* aStream, int32_t aDelta) : |
michael@0 | 2096 | ControlMessage(aStream), mDelta(aDelta) {} |
michael@0 | 2097 | virtual void Run() |
michael@0 | 2098 | { |
michael@0 | 2099 | mStream->ChangeExplicitBlockerCountImpl( |
michael@0 | 2100 | mStream->GraphImpl()->mStateComputedTime, mDelta); |
michael@0 | 2101 | } |
michael@0 | 2102 | int32_t mDelta; |
michael@0 | 2103 | }; |
michael@0 | 2104 | |
michael@0 | 2105 | // This can happen if this method has been called asynchronously, and the |
michael@0 | 2106 | // stream has been destroyed since then. |
michael@0 | 2107 | if (mMainThreadDestroyed) { |
michael@0 | 2108 | return; |
michael@0 | 2109 | } |
michael@0 | 2110 | GraphImpl()->AppendMessage(new Message(this, aDelta)); |
michael@0 | 2111 | } |
michael@0 | 2112 | |
michael@0 | 2113 | void |
michael@0 | 2114 | MediaStream::AddListenerImpl(already_AddRefed<MediaStreamListener> aListener) |
michael@0 | 2115 | { |
michael@0 | 2116 | MediaStreamListener* listener = *mListeners.AppendElement() = aListener; |
michael@0 | 2117 | listener->NotifyBlockingChanged(GraphImpl(), |
michael@0 | 2118 | mNotifiedBlocked ? MediaStreamListener::BLOCKED : MediaStreamListener::UNBLOCKED); |
michael@0 | 2119 | if (mNotifiedFinished) { |
michael@0 | 2120 | listener->NotifyFinished(GraphImpl()); |
michael@0 | 2121 | } |
michael@0 | 2122 | if (mNotifiedHasCurrentData) { |
michael@0 | 2123 | listener->NotifyHasCurrentData(GraphImpl()); |
michael@0 | 2124 | } |
michael@0 | 2125 | } |
michael@0 | 2126 | |
michael@0 | 2127 | void |
michael@0 | 2128 | MediaStream::AddListener(MediaStreamListener* aListener) |
michael@0 | 2129 | { |
michael@0 | 2130 | class Message : public ControlMessage { |
michael@0 | 2131 | public: |
michael@0 | 2132 | Message(MediaStream* aStream, MediaStreamListener* aListener) : |
michael@0 | 2133 | ControlMessage(aStream), mListener(aListener) {} |
michael@0 | 2134 | virtual void Run() |
michael@0 | 2135 | { |
michael@0 | 2136 | mStream->AddListenerImpl(mListener.forget()); |
michael@0 | 2137 | } |
michael@0 | 2138 | nsRefPtr<MediaStreamListener> mListener; |
michael@0 | 2139 | }; |
michael@0 | 2140 | GraphImpl()->AppendMessage(new Message(this, aListener)); |
michael@0 | 2141 | } |
michael@0 | 2142 | |
michael@0 | 2143 | void |
michael@0 | 2144 | MediaStream::RemoveListenerImpl(MediaStreamListener* aListener) |
michael@0 | 2145 | { |
michael@0 | 2146 | // wouldn't need this if we could do it in the opposite order |
michael@0 | 2147 | nsRefPtr<MediaStreamListener> listener(aListener); |
michael@0 | 2148 | mListeners.RemoveElement(aListener); |
michael@0 | 2149 | listener->NotifyRemoved(GraphImpl()); |
michael@0 | 2150 | } |
michael@0 | 2151 | |
michael@0 | 2152 | void |
michael@0 | 2153 | MediaStream::RemoveListener(MediaStreamListener* aListener) |
michael@0 | 2154 | { |
michael@0 | 2155 | class Message : public ControlMessage { |
michael@0 | 2156 | public: |
michael@0 | 2157 | Message(MediaStream* aStream, MediaStreamListener* aListener) : |
michael@0 | 2158 | ControlMessage(aStream), mListener(aListener) {} |
michael@0 | 2159 | virtual void Run() |
michael@0 | 2160 | { |
michael@0 | 2161 | mStream->RemoveListenerImpl(mListener); |
michael@0 | 2162 | } |
michael@0 | 2163 | nsRefPtr<MediaStreamListener> mListener; |
michael@0 | 2164 | }; |
michael@0 | 2165 | // If the stream is destroyed the Listeners have or will be |
michael@0 | 2166 | // removed. |
michael@0 | 2167 | if (!IsDestroyed()) { |
michael@0 | 2168 | GraphImpl()->AppendMessage(new Message(this, aListener)); |
michael@0 | 2169 | } |
michael@0 | 2170 | } |
michael@0 | 2171 | |
michael@0 | 2172 | void |
michael@0 | 2173 | MediaStream::RunAfterPendingUpdates(nsRefPtr<nsIRunnable> aRunnable) |
michael@0 | 2174 | { |
michael@0 | 2175 | MOZ_ASSERT(NS_IsMainThread()); |
michael@0 | 2176 | MediaStreamGraphImpl* graph = GraphImpl(); |
michael@0 | 2177 | |
michael@0 | 2178 | // Special case when a non-realtime graph has not started, to ensure the |
michael@0 | 2179 | // runnable will run in finite time. |
michael@0 | 2180 | if (!(graph->mRealtime || graph->mNonRealtimeProcessing)) { |
michael@0 | 2181 | aRunnable->Run(); |
michael@0 | 2182 | } |
michael@0 | 2183 | |
michael@0 | 2184 | class Message : public ControlMessage { |
michael@0 | 2185 | public: |
michael@0 | 2186 | explicit Message(MediaStream* aStream, |
michael@0 | 2187 | already_AddRefed<nsIRunnable> aRunnable) |
michael@0 | 2188 | : ControlMessage(aStream) |
michael@0 | 2189 | , mRunnable(aRunnable) {} |
michael@0 | 2190 | virtual void Run() MOZ_OVERRIDE |
michael@0 | 2191 | { |
michael@0 | 2192 | mStream->Graph()-> |
michael@0 | 2193 | DispatchToMainThreadAfterStreamStateUpdate(mRunnable.forget()); |
michael@0 | 2194 | } |
michael@0 | 2195 | virtual void RunDuringShutdown() MOZ_OVERRIDE |
michael@0 | 2196 | { |
michael@0 | 2197 | // Don't run mRunnable now as it may call AppendMessage() which would |
michael@0 | 2198 | // assume that there are no remaining controlMessagesToRunDuringShutdown. |
michael@0 | 2199 | MOZ_ASSERT(NS_IsMainThread()); |
michael@0 | 2200 | NS_DispatchToCurrentThread(mRunnable); |
michael@0 | 2201 | } |
michael@0 | 2202 | private: |
michael@0 | 2203 | nsRefPtr<nsIRunnable> mRunnable; |
michael@0 | 2204 | }; |
michael@0 | 2205 | |
michael@0 | 2206 | graph->AppendMessage(new Message(this, aRunnable.forget())); |
michael@0 | 2207 | } |
michael@0 | 2208 | |
michael@0 | 2209 | void |
michael@0 | 2210 | MediaStream::SetTrackEnabledImpl(TrackID aTrackID, bool aEnabled) |
michael@0 | 2211 | { |
michael@0 | 2212 | if (aEnabled) { |
michael@0 | 2213 | mDisabledTrackIDs.RemoveElement(aTrackID); |
michael@0 | 2214 | } else { |
michael@0 | 2215 | if (!mDisabledTrackIDs.Contains(aTrackID)) { |
michael@0 | 2216 | mDisabledTrackIDs.AppendElement(aTrackID); |
michael@0 | 2217 | } |
michael@0 | 2218 | } |
michael@0 | 2219 | } |
michael@0 | 2220 | |
michael@0 | 2221 | void |
michael@0 | 2222 | MediaStream::SetTrackEnabled(TrackID aTrackID, bool aEnabled) |
michael@0 | 2223 | { |
michael@0 | 2224 | class Message : public ControlMessage { |
michael@0 | 2225 | public: |
michael@0 | 2226 | Message(MediaStream* aStream, TrackID aTrackID, bool aEnabled) : |
michael@0 | 2227 | ControlMessage(aStream), mTrackID(aTrackID), mEnabled(aEnabled) {} |
michael@0 | 2228 | virtual void Run() |
michael@0 | 2229 | { |
michael@0 | 2230 | mStream->SetTrackEnabledImpl(mTrackID, mEnabled); |
michael@0 | 2231 | } |
michael@0 | 2232 | TrackID mTrackID; |
michael@0 | 2233 | bool mEnabled; |
michael@0 | 2234 | }; |
michael@0 | 2235 | GraphImpl()->AppendMessage(new Message(this, aTrackID, aEnabled)); |
michael@0 | 2236 | } |
michael@0 | 2237 | |
michael@0 | 2238 | void |
michael@0 | 2239 | MediaStream::ApplyTrackDisabling(TrackID aTrackID, MediaSegment* aSegment, MediaSegment* aRawSegment) |
michael@0 | 2240 | { |
michael@0 | 2241 | // mMutex must be owned here if this is a SourceMediaStream |
michael@0 | 2242 | if (!mDisabledTrackIDs.Contains(aTrackID)) { |
michael@0 | 2243 | return; |
michael@0 | 2244 | } |
michael@0 | 2245 | aSegment->ReplaceWithDisabled(); |
michael@0 | 2246 | if (aRawSegment) { |
michael@0 | 2247 | aRawSegment->ReplaceWithDisabled(); |
michael@0 | 2248 | } |
michael@0 | 2249 | } |
michael@0 | 2250 | |
michael@0 | 2251 | void |
michael@0 | 2252 | SourceMediaStream::DestroyImpl() |
michael@0 | 2253 | { |
michael@0 | 2254 | // Hold mMutex while mGraph is reset so that other threads holding mMutex |
michael@0 | 2255 | // can null-check know that the graph will not destroyed. |
michael@0 | 2256 | MutexAutoLock lock(mMutex); |
michael@0 | 2257 | MediaStream::DestroyImpl(); |
michael@0 | 2258 | } |
michael@0 | 2259 | |
michael@0 | 2260 | void |
michael@0 | 2261 | SourceMediaStream::SetPullEnabled(bool aEnabled) |
michael@0 | 2262 | { |
michael@0 | 2263 | MutexAutoLock lock(mMutex); |
michael@0 | 2264 | mPullEnabled = aEnabled; |
michael@0 | 2265 | if (mPullEnabled && GraphImpl()) { |
michael@0 | 2266 | GraphImpl()->EnsureNextIteration(); |
michael@0 | 2267 | } |
michael@0 | 2268 | } |
michael@0 | 2269 | |
michael@0 | 2270 | void |
michael@0 | 2271 | SourceMediaStream::AddTrack(TrackID aID, TrackRate aRate, TrackTicks aStart, |
michael@0 | 2272 | MediaSegment* aSegment) |
michael@0 | 2273 | { |
michael@0 | 2274 | MutexAutoLock lock(mMutex); |
michael@0 | 2275 | TrackData* data = mUpdateTracks.AppendElement(); |
michael@0 | 2276 | data->mID = aID; |
michael@0 | 2277 | data->mInputRate = aRate; |
michael@0 | 2278 | // We resample all audio input tracks to the sample rate of the audio mixer. |
michael@0 | 2279 | data->mOutputRate = aSegment->GetType() == MediaSegment::AUDIO ? |
michael@0 | 2280 | GraphImpl()->AudioSampleRate() : aRate; |
michael@0 | 2281 | data->mStart = aStart; |
michael@0 | 2282 | data->mCommands = TRACK_CREATE; |
michael@0 | 2283 | data->mData = aSegment; |
michael@0 | 2284 | data->mHaveEnough = false; |
michael@0 | 2285 | if (auto graph = GraphImpl()) { |
michael@0 | 2286 | graph->EnsureNextIteration(); |
michael@0 | 2287 | } |
michael@0 | 2288 | } |
michael@0 | 2289 | |
michael@0 | 2290 | void |
michael@0 | 2291 | SourceMediaStream::ResampleAudioToGraphSampleRate(TrackData* aTrackData, MediaSegment* aSegment) |
michael@0 | 2292 | { |
michael@0 | 2293 | if (aSegment->GetType() != MediaSegment::AUDIO || |
michael@0 | 2294 | aTrackData->mInputRate == GraphImpl()->AudioSampleRate()) { |
michael@0 | 2295 | return; |
michael@0 | 2296 | } |
michael@0 | 2297 | AudioSegment* segment = static_cast<AudioSegment*>(aSegment); |
michael@0 | 2298 | if (!aTrackData->mResampler) { |
michael@0 | 2299 | int channels = segment->ChannelCount(); |
michael@0 | 2300 | |
michael@0 | 2301 | // If this segment is just silence, we delay instanciating the resampler. |
michael@0 | 2302 | if (channels) { |
michael@0 | 2303 | SpeexResamplerState* state = speex_resampler_init(channels, |
michael@0 | 2304 | aTrackData->mInputRate, |
michael@0 | 2305 | GraphImpl()->AudioSampleRate(), |
michael@0 | 2306 | SPEEX_RESAMPLER_QUALITY_DEFAULT, |
michael@0 | 2307 | nullptr); |
michael@0 | 2308 | if (!state) { |
michael@0 | 2309 | return; |
michael@0 | 2310 | } |
michael@0 | 2311 | aTrackData->mResampler.own(state); |
michael@0 | 2312 | } |
michael@0 | 2313 | } |
michael@0 | 2314 | segment->ResampleChunks(aTrackData->mResampler); |
michael@0 | 2315 | } |
michael@0 | 2316 | |
michael@0 | 2317 | bool |
michael@0 | 2318 | SourceMediaStream::AppendToTrack(TrackID aID, MediaSegment* aSegment, MediaSegment *aRawSegment) |
michael@0 | 2319 | { |
michael@0 | 2320 | MutexAutoLock lock(mMutex); |
michael@0 | 2321 | // ::EndAllTrackAndFinished() can end these before the sources notice |
michael@0 | 2322 | bool appended = false; |
michael@0 | 2323 | auto graph = GraphImpl(); |
michael@0 | 2324 | if (!mFinished && graph) { |
michael@0 | 2325 | TrackData *track = FindDataForTrack(aID); |
michael@0 | 2326 | if (track) { |
michael@0 | 2327 | // Data goes into mData, and on the next iteration of the MSG moves |
michael@0 | 2328 | // into the track's segment after NotifyQueuedTrackChanges(). This adds |
michael@0 | 2329 | // 0-10ms of delay before data gets to direct listeners. |
michael@0 | 2330 | // Indirect listeners (via subsequent TrackUnion nodes) are synced to |
michael@0 | 2331 | // playout time, and so can be delayed by buffering. |
michael@0 | 2332 | |
michael@0 | 2333 | // Apply track disabling before notifying any consumers directly |
michael@0 | 2334 | // or inserting into the graph |
michael@0 | 2335 | ApplyTrackDisabling(aID, aSegment, aRawSegment); |
michael@0 | 2336 | |
michael@0 | 2337 | ResampleAudioToGraphSampleRate(track, aSegment); |
michael@0 | 2338 | |
michael@0 | 2339 | // Must notify first, since AppendFrom() will empty out aSegment |
michael@0 | 2340 | NotifyDirectConsumers(track, aRawSegment ? aRawSegment : aSegment); |
michael@0 | 2341 | track->mData->AppendFrom(aSegment); // note: aSegment is now dead |
michael@0 | 2342 | appended = true; |
michael@0 | 2343 | graph->EnsureNextIteration(); |
michael@0 | 2344 | } else { |
michael@0 | 2345 | aSegment->Clear(); |
michael@0 | 2346 | } |
michael@0 | 2347 | } |
michael@0 | 2348 | return appended; |
michael@0 | 2349 | } |
michael@0 | 2350 | |
michael@0 | 2351 | void |
michael@0 | 2352 | SourceMediaStream::NotifyDirectConsumers(TrackData *aTrack, |
michael@0 | 2353 | MediaSegment *aSegment) |
michael@0 | 2354 | { |
michael@0 | 2355 | // Call with mMutex locked |
michael@0 | 2356 | MOZ_ASSERT(aTrack); |
michael@0 | 2357 | |
michael@0 | 2358 | for (uint32_t j = 0; j < mDirectListeners.Length(); ++j) { |
michael@0 | 2359 | MediaStreamDirectListener* l = mDirectListeners[j]; |
michael@0 | 2360 | TrackTicks offset = 0; // FIX! need a separate TrackTicks.... or the end of the internal buffer |
michael@0 | 2361 | l->NotifyRealtimeData(static_cast<MediaStreamGraph*>(GraphImpl()), aTrack->mID, aTrack->mOutputRate, |
michael@0 | 2362 | offset, aTrack->mCommands, *aSegment); |
michael@0 | 2363 | } |
michael@0 | 2364 | } |
michael@0 | 2365 | |
michael@0 | 2366 | void |
michael@0 | 2367 | SourceMediaStream::AddDirectListener(MediaStreamDirectListener* aListener) |
michael@0 | 2368 | { |
michael@0 | 2369 | MutexAutoLock lock(mMutex); |
michael@0 | 2370 | mDirectListeners.AppendElement(aListener); |
michael@0 | 2371 | } |
michael@0 | 2372 | |
michael@0 | 2373 | void |
michael@0 | 2374 | SourceMediaStream::RemoveDirectListener(MediaStreamDirectListener* aListener) |
michael@0 | 2375 | { |
michael@0 | 2376 | MutexAutoLock lock(mMutex); |
michael@0 | 2377 | mDirectListeners.RemoveElement(aListener); |
michael@0 | 2378 | } |
michael@0 | 2379 | |
michael@0 | 2380 | bool |
michael@0 | 2381 | SourceMediaStream::HaveEnoughBuffered(TrackID aID) |
michael@0 | 2382 | { |
michael@0 | 2383 | MutexAutoLock lock(mMutex); |
michael@0 | 2384 | TrackData *track = FindDataForTrack(aID); |
michael@0 | 2385 | if (track) { |
michael@0 | 2386 | return track->mHaveEnough; |
michael@0 | 2387 | } |
michael@0 | 2388 | return false; |
michael@0 | 2389 | } |
michael@0 | 2390 | |
michael@0 | 2391 | void |
michael@0 | 2392 | SourceMediaStream::DispatchWhenNotEnoughBuffered(TrackID aID, |
michael@0 | 2393 | nsIEventTarget* aSignalThread, nsIRunnable* aSignalRunnable) |
michael@0 | 2394 | { |
michael@0 | 2395 | MutexAutoLock lock(mMutex); |
michael@0 | 2396 | TrackData* data = FindDataForTrack(aID); |
michael@0 | 2397 | if (!data) { |
michael@0 | 2398 | aSignalThread->Dispatch(aSignalRunnable, 0); |
michael@0 | 2399 | return; |
michael@0 | 2400 | } |
michael@0 | 2401 | |
michael@0 | 2402 | if (data->mHaveEnough) { |
michael@0 | 2403 | if (data->mDispatchWhenNotEnough.IsEmpty()) { |
michael@0 | 2404 | data->mDispatchWhenNotEnough.AppendElement()->Init(aSignalThread, aSignalRunnable); |
michael@0 | 2405 | } |
michael@0 | 2406 | } else { |
michael@0 | 2407 | aSignalThread->Dispatch(aSignalRunnable, 0); |
michael@0 | 2408 | } |
michael@0 | 2409 | } |
michael@0 | 2410 | |
michael@0 | 2411 | void |
michael@0 | 2412 | SourceMediaStream::EndTrack(TrackID aID) |
michael@0 | 2413 | { |
michael@0 | 2414 | MutexAutoLock lock(mMutex); |
michael@0 | 2415 | // ::EndAllTrackAndFinished() can end these before the sources call this |
michael@0 | 2416 | if (!mFinished) { |
michael@0 | 2417 | TrackData *track = FindDataForTrack(aID); |
michael@0 | 2418 | if (track) { |
michael@0 | 2419 | track->mCommands |= TRACK_END; |
michael@0 | 2420 | } |
michael@0 | 2421 | } |
michael@0 | 2422 | if (auto graph = GraphImpl()) { |
michael@0 | 2423 | graph->EnsureNextIteration(); |
michael@0 | 2424 | } |
michael@0 | 2425 | } |
michael@0 | 2426 | |
michael@0 | 2427 | void |
michael@0 | 2428 | SourceMediaStream::AdvanceKnownTracksTime(StreamTime aKnownTime) |
michael@0 | 2429 | { |
michael@0 | 2430 | MutexAutoLock lock(mMutex); |
michael@0 | 2431 | MOZ_ASSERT(aKnownTime >= mUpdateKnownTracksTime); |
michael@0 | 2432 | mUpdateKnownTracksTime = aKnownTime; |
michael@0 | 2433 | if (auto graph = GraphImpl()) { |
michael@0 | 2434 | graph->EnsureNextIteration(); |
michael@0 | 2435 | } |
michael@0 | 2436 | } |
michael@0 | 2437 | |
michael@0 | 2438 | void |
michael@0 | 2439 | SourceMediaStream::FinishWithLockHeld() |
michael@0 | 2440 | { |
michael@0 | 2441 | mMutex.AssertCurrentThreadOwns(); |
michael@0 | 2442 | mUpdateFinished = true; |
michael@0 | 2443 | if (auto graph = GraphImpl()) { |
michael@0 | 2444 | graph->EnsureNextIteration(); |
michael@0 | 2445 | } |
michael@0 | 2446 | } |
michael@0 | 2447 | |
michael@0 | 2448 | void |
michael@0 | 2449 | SourceMediaStream::EndAllTrackAndFinish() |
michael@0 | 2450 | { |
michael@0 | 2451 | MutexAutoLock lock(mMutex); |
michael@0 | 2452 | for (uint32_t i = 0; i < mUpdateTracks.Length(); ++i) { |
michael@0 | 2453 | SourceMediaStream::TrackData* data = &mUpdateTracks[i]; |
michael@0 | 2454 | data->mCommands |= TRACK_END; |
michael@0 | 2455 | } |
michael@0 | 2456 | FinishWithLockHeld(); |
michael@0 | 2457 | // we will call NotifyFinished() to let GetUserMedia know |
michael@0 | 2458 | } |
michael@0 | 2459 | |
michael@0 | 2460 | TrackTicks |
michael@0 | 2461 | SourceMediaStream::GetBufferedTicks(TrackID aID) |
michael@0 | 2462 | { |
michael@0 | 2463 | StreamBuffer::Track* track = mBuffer.FindTrack(aID); |
michael@0 | 2464 | if (track) { |
michael@0 | 2465 | MediaSegment* segment = track->GetSegment(); |
michael@0 | 2466 | if (segment) { |
michael@0 | 2467 | return segment->GetDuration() - |
michael@0 | 2468 | track->TimeToTicksRoundDown( |
michael@0 | 2469 | GraphTimeToStreamTime(GraphImpl()->mStateComputedTime)); |
michael@0 | 2470 | } |
michael@0 | 2471 | } |
michael@0 | 2472 | return 0; |
michael@0 | 2473 | } |
michael@0 | 2474 | |
michael@0 | 2475 | void |
michael@0 | 2476 | SourceMediaStream::RegisterForAudioMixing() |
michael@0 | 2477 | { |
michael@0 | 2478 | MutexAutoLock lock(mMutex); |
michael@0 | 2479 | mNeedsMixing = true; |
michael@0 | 2480 | } |
michael@0 | 2481 | |
michael@0 | 2482 | bool |
michael@0 | 2483 | SourceMediaStream::NeedsMixing() |
michael@0 | 2484 | { |
michael@0 | 2485 | MutexAutoLock lock(mMutex); |
michael@0 | 2486 | return mNeedsMixing; |
michael@0 | 2487 | } |
michael@0 | 2488 | |
michael@0 | 2489 | void |
michael@0 | 2490 | MediaInputPort::Init() |
michael@0 | 2491 | { |
michael@0 | 2492 | STREAM_LOG(PR_LOG_DEBUG, ("Adding MediaInputPort %p (from %p to %p) to the graph", |
michael@0 | 2493 | this, mSource, mDest)); |
michael@0 | 2494 | mSource->AddConsumer(this); |
michael@0 | 2495 | mDest->AddInput(this); |
michael@0 | 2496 | // mPortCount decremented via MediaInputPort::Destroy's message |
michael@0 | 2497 | ++mDest->GraphImpl()->mPortCount; |
michael@0 | 2498 | } |
michael@0 | 2499 | |
michael@0 | 2500 | void |
michael@0 | 2501 | MediaInputPort::Disconnect() |
michael@0 | 2502 | { |
michael@0 | 2503 | NS_ASSERTION(!mSource == !mDest, |
michael@0 | 2504 | "mSource must either both be null or both non-null"); |
michael@0 | 2505 | if (!mSource) |
michael@0 | 2506 | return; |
michael@0 | 2507 | |
michael@0 | 2508 | mSource->RemoveConsumer(this); |
michael@0 | 2509 | mSource = nullptr; |
michael@0 | 2510 | mDest->RemoveInput(this); |
michael@0 | 2511 | mDest = nullptr; |
michael@0 | 2512 | |
michael@0 | 2513 | GraphImpl()->SetStreamOrderDirty(); |
michael@0 | 2514 | } |
michael@0 | 2515 | |
michael@0 | 2516 | MediaInputPort::InputInterval |
michael@0 | 2517 | MediaInputPort::GetNextInputInterval(GraphTime aTime) |
michael@0 | 2518 | { |
michael@0 | 2519 | InputInterval result = { GRAPH_TIME_MAX, GRAPH_TIME_MAX, false }; |
michael@0 | 2520 | GraphTime t = aTime; |
michael@0 | 2521 | GraphTime end; |
michael@0 | 2522 | for (;;) { |
michael@0 | 2523 | if (!mDest->mBlocked.GetAt(t, &end)) |
michael@0 | 2524 | break; |
michael@0 | 2525 | if (end == GRAPH_TIME_MAX) |
michael@0 | 2526 | return result; |
michael@0 | 2527 | t = end; |
michael@0 | 2528 | } |
michael@0 | 2529 | result.mStart = t; |
michael@0 | 2530 | GraphTime sourceEnd; |
michael@0 | 2531 | result.mInputIsBlocked = mSource->mBlocked.GetAt(t, &sourceEnd); |
michael@0 | 2532 | result.mEnd = std::min(end, sourceEnd); |
michael@0 | 2533 | return result; |
michael@0 | 2534 | } |
michael@0 | 2535 | |
michael@0 | 2536 | void |
michael@0 | 2537 | MediaInputPort::Destroy() |
michael@0 | 2538 | { |
michael@0 | 2539 | class Message : public ControlMessage { |
michael@0 | 2540 | public: |
michael@0 | 2541 | Message(MediaInputPort* aPort) |
michael@0 | 2542 | : ControlMessage(nullptr), mPort(aPort) {} |
michael@0 | 2543 | virtual void Run() |
michael@0 | 2544 | { |
michael@0 | 2545 | mPort->Disconnect(); |
michael@0 | 2546 | --mPort->GraphImpl()->mPortCount; |
michael@0 | 2547 | mPort->SetGraphImpl(nullptr); |
michael@0 | 2548 | NS_RELEASE(mPort); |
michael@0 | 2549 | } |
michael@0 | 2550 | virtual void RunDuringShutdown() |
michael@0 | 2551 | { |
michael@0 | 2552 | Run(); |
michael@0 | 2553 | } |
michael@0 | 2554 | MediaInputPort* mPort; |
michael@0 | 2555 | }; |
michael@0 | 2556 | GraphImpl()->AppendMessage(new Message(this)); |
michael@0 | 2557 | } |
michael@0 | 2558 | |
michael@0 | 2559 | MediaStreamGraphImpl* |
michael@0 | 2560 | MediaInputPort::GraphImpl() |
michael@0 | 2561 | { |
michael@0 | 2562 | return mGraph; |
michael@0 | 2563 | } |
michael@0 | 2564 | |
michael@0 | 2565 | MediaStreamGraph* |
michael@0 | 2566 | MediaInputPort::Graph() |
michael@0 | 2567 | { |
michael@0 | 2568 | return mGraph; |
michael@0 | 2569 | } |
michael@0 | 2570 | |
michael@0 | 2571 | void |
michael@0 | 2572 | MediaInputPort::SetGraphImpl(MediaStreamGraphImpl* aGraph) |
michael@0 | 2573 | { |
michael@0 | 2574 | MOZ_ASSERT(!mGraph || !aGraph, "Should only be set once"); |
michael@0 | 2575 | mGraph = aGraph; |
michael@0 | 2576 | } |
michael@0 | 2577 | |
michael@0 | 2578 | already_AddRefed<MediaInputPort> |
michael@0 | 2579 | ProcessedMediaStream::AllocateInputPort(MediaStream* aStream, uint32_t aFlags, |
michael@0 | 2580 | uint16_t aInputNumber, uint16_t aOutputNumber) |
michael@0 | 2581 | { |
michael@0 | 2582 | // This method creates two references to the MediaInputPort: one for |
michael@0 | 2583 | // the main thread, and one for the MediaStreamGraph. |
michael@0 | 2584 | class Message : public ControlMessage { |
michael@0 | 2585 | public: |
michael@0 | 2586 | Message(MediaInputPort* aPort) |
michael@0 | 2587 | : ControlMessage(aPort->GetDestination()), |
michael@0 | 2588 | mPort(aPort) {} |
michael@0 | 2589 | virtual void Run() |
michael@0 | 2590 | { |
michael@0 | 2591 | mPort->Init(); |
michael@0 | 2592 | // The graph holds its reference implicitly |
michael@0 | 2593 | mPort->GraphImpl()->SetStreamOrderDirty(); |
michael@0 | 2594 | unused << mPort.forget(); |
michael@0 | 2595 | } |
michael@0 | 2596 | virtual void RunDuringShutdown() |
michael@0 | 2597 | { |
michael@0 | 2598 | Run(); |
michael@0 | 2599 | } |
michael@0 | 2600 | nsRefPtr<MediaInputPort> mPort; |
michael@0 | 2601 | }; |
michael@0 | 2602 | nsRefPtr<MediaInputPort> port = new MediaInputPort(aStream, this, aFlags, |
michael@0 | 2603 | aInputNumber, aOutputNumber); |
michael@0 | 2604 | port->SetGraphImpl(GraphImpl()); |
michael@0 | 2605 | GraphImpl()->AppendMessage(new Message(port)); |
michael@0 | 2606 | return port.forget(); |
michael@0 | 2607 | } |
michael@0 | 2608 | |
michael@0 | 2609 | void |
michael@0 | 2610 | ProcessedMediaStream::Finish() |
michael@0 | 2611 | { |
michael@0 | 2612 | class Message : public ControlMessage { |
michael@0 | 2613 | public: |
michael@0 | 2614 | Message(ProcessedMediaStream* aStream) |
michael@0 | 2615 | : ControlMessage(aStream) {} |
michael@0 | 2616 | virtual void Run() |
michael@0 | 2617 | { |
michael@0 | 2618 | mStream->GraphImpl()->FinishStream(mStream); |
michael@0 | 2619 | } |
michael@0 | 2620 | }; |
michael@0 | 2621 | GraphImpl()->AppendMessage(new Message(this)); |
michael@0 | 2622 | } |
michael@0 | 2623 | |
michael@0 | 2624 | void |
michael@0 | 2625 | ProcessedMediaStream::SetAutofinish(bool aAutofinish) |
michael@0 | 2626 | { |
michael@0 | 2627 | class Message : public ControlMessage { |
michael@0 | 2628 | public: |
michael@0 | 2629 | Message(ProcessedMediaStream* aStream, bool aAutofinish) |
michael@0 | 2630 | : ControlMessage(aStream), mAutofinish(aAutofinish) {} |
michael@0 | 2631 | virtual void Run() |
michael@0 | 2632 | { |
michael@0 | 2633 | static_cast<ProcessedMediaStream*>(mStream)->SetAutofinishImpl(mAutofinish); |
michael@0 | 2634 | } |
michael@0 | 2635 | bool mAutofinish; |
michael@0 | 2636 | }; |
michael@0 | 2637 | GraphImpl()->AppendMessage(new Message(this, aAutofinish)); |
michael@0 | 2638 | } |
michael@0 | 2639 | |
michael@0 | 2640 | void |
michael@0 | 2641 | ProcessedMediaStream::DestroyImpl() |
michael@0 | 2642 | { |
michael@0 | 2643 | for (int32_t i = mInputs.Length() - 1; i >= 0; --i) { |
michael@0 | 2644 | mInputs[i]->Disconnect(); |
michael@0 | 2645 | } |
michael@0 | 2646 | MediaStream::DestroyImpl(); |
michael@0 | 2647 | // The stream order is only important if there are connections, in which |
michael@0 | 2648 | // case MediaInputPort::Disconnect() called SetStreamOrderDirty(). |
michael@0 | 2649 | // MediaStreamGraphImpl::RemoveStream() will also call |
michael@0 | 2650 | // SetStreamOrderDirty(), for other reasons. |
michael@0 | 2651 | } |
michael@0 | 2652 | |
michael@0 | 2653 | /** |
michael@0 | 2654 | * We make the initial mCurrentTime nonzero so that zero times can have |
michael@0 | 2655 | * special meaning if necessary. |
michael@0 | 2656 | */ |
michael@0 | 2657 | static const int32_t INITIAL_CURRENT_TIME = 1; |
michael@0 | 2658 | |
michael@0 | 2659 | MediaStreamGraphImpl::MediaStreamGraphImpl(bool aRealtime, TrackRate aSampleRate) |
michael@0 | 2660 | : mCurrentTime(INITIAL_CURRENT_TIME) |
michael@0 | 2661 | , mStateComputedTime(INITIAL_CURRENT_TIME) |
michael@0 | 2662 | , mProcessingGraphUpdateIndex(0) |
michael@0 | 2663 | , mPortCount(0) |
michael@0 | 2664 | , mMonitor("MediaStreamGraphImpl") |
michael@0 | 2665 | , mLifecycleState(LIFECYCLE_THREAD_NOT_STARTED) |
michael@0 | 2666 | , mWaitState(WAITSTATE_RUNNING) |
michael@0 | 2667 | , mEndTime(GRAPH_TIME_MAX) |
michael@0 | 2668 | , mSampleRate(aSampleRate) |
michael@0 | 2669 | , mNeedAnotherIteration(false) |
michael@0 | 2670 | , mForceShutDown(false) |
michael@0 | 2671 | , mPostedRunInStableStateEvent(false) |
michael@0 | 2672 | , mDetectedNotRunning(false) |
michael@0 | 2673 | , mPostedRunInStableState(false) |
michael@0 | 2674 | , mRealtime(aRealtime) |
michael@0 | 2675 | , mNonRealtimeProcessing(false) |
michael@0 | 2676 | , mStreamOrderDirty(false) |
michael@0 | 2677 | , mLatencyLog(AsyncLatencyLogger::Get()) |
michael@0 | 2678 | , mMixer(nullptr) |
michael@0 | 2679 | , mMemoryReportMonitor("MSGIMemory") |
michael@0 | 2680 | , mSelfRef(MOZ_THIS_IN_INITIALIZER_LIST()) |
michael@0 | 2681 | , mAudioStreamSizes() |
michael@0 | 2682 | , mNeedsMemoryReport(false) |
michael@0 | 2683 | #ifdef DEBUG |
michael@0 | 2684 | , mCanRunMessagesSynchronously(false) |
michael@0 | 2685 | #endif |
michael@0 | 2686 | { |
michael@0 | 2687 | #ifdef PR_LOGGING |
michael@0 | 2688 | if (!gMediaStreamGraphLog) { |
michael@0 | 2689 | gMediaStreamGraphLog = PR_NewLogModule("MediaStreamGraph"); |
michael@0 | 2690 | } |
michael@0 | 2691 | #endif |
michael@0 | 2692 | |
michael@0 | 2693 | mCurrentTimeStamp = mInitialTimeStamp = mLastMainThreadUpdate = TimeStamp::Now(); |
michael@0 | 2694 | |
michael@0 | 2695 | RegisterWeakMemoryReporter(this); |
michael@0 | 2696 | } |
michael@0 | 2697 | |
michael@0 | 2698 | void |
michael@0 | 2699 | MediaStreamGraphImpl::Destroy() |
michael@0 | 2700 | { |
michael@0 | 2701 | // First unregister from memory reporting. |
michael@0 | 2702 | UnregisterWeakMemoryReporter(this); |
michael@0 | 2703 | |
michael@0 | 2704 | // Clear the self reference which will destroy this instance. |
michael@0 | 2705 | mSelfRef = nullptr; |
michael@0 | 2706 | } |
michael@0 | 2707 | |
michael@0 | 2708 | NS_IMPL_ISUPPORTS(MediaStreamGraphShutdownObserver, nsIObserver) |
michael@0 | 2709 | |
michael@0 | 2710 | static bool gShutdownObserverRegistered = false; |
michael@0 | 2711 | |
michael@0 | 2712 | NS_IMETHODIMP |
michael@0 | 2713 | MediaStreamGraphShutdownObserver::Observe(nsISupports *aSubject, |
michael@0 | 2714 | const char *aTopic, |
michael@0 | 2715 | const char16_t *aData) |
michael@0 | 2716 | { |
michael@0 | 2717 | if (strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID) == 0) { |
michael@0 | 2718 | if (gGraph) { |
michael@0 | 2719 | gGraph->ForceShutDown(); |
michael@0 | 2720 | } |
michael@0 | 2721 | nsContentUtils::UnregisterShutdownObserver(this); |
michael@0 | 2722 | gShutdownObserverRegistered = false; |
michael@0 | 2723 | } |
michael@0 | 2724 | return NS_OK; |
michael@0 | 2725 | } |
michael@0 | 2726 | |
michael@0 | 2727 | MediaStreamGraph* |
michael@0 | 2728 | MediaStreamGraph::GetInstance() |
michael@0 | 2729 | { |
michael@0 | 2730 | NS_ASSERTION(NS_IsMainThread(), "Main thread only"); |
michael@0 | 2731 | |
michael@0 | 2732 | if (!gGraph) { |
michael@0 | 2733 | if (!gShutdownObserverRegistered) { |
michael@0 | 2734 | gShutdownObserverRegistered = true; |
michael@0 | 2735 | nsContentUtils::RegisterShutdownObserver(new MediaStreamGraphShutdownObserver()); |
michael@0 | 2736 | } |
michael@0 | 2737 | |
michael@0 | 2738 | AudioStream::InitPreferredSampleRate(); |
michael@0 | 2739 | |
michael@0 | 2740 | gGraph = new MediaStreamGraphImpl(true, AudioStream::PreferredSampleRate()); |
michael@0 | 2741 | |
michael@0 | 2742 | STREAM_LOG(PR_LOG_DEBUG, ("Starting up MediaStreamGraph %p", gGraph)); |
michael@0 | 2743 | } |
michael@0 | 2744 | |
michael@0 | 2745 | return gGraph; |
michael@0 | 2746 | } |
michael@0 | 2747 | |
michael@0 | 2748 | MediaStreamGraph* |
michael@0 | 2749 | MediaStreamGraph::CreateNonRealtimeInstance(TrackRate aSampleRate) |
michael@0 | 2750 | { |
michael@0 | 2751 | NS_ASSERTION(NS_IsMainThread(), "Main thread only"); |
michael@0 | 2752 | |
michael@0 | 2753 | MediaStreamGraphImpl* graph = new MediaStreamGraphImpl(false, aSampleRate); |
michael@0 | 2754 | |
michael@0 | 2755 | return graph; |
michael@0 | 2756 | } |
michael@0 | 2757 | |
michael@0 | 2758 | void |
michael@0 | 2759 | MediaStreamGraph::DestroyNonRealtimeInstance(MediaStreamGraph* aGraph) |
michael@0 | 2760 | { |
michael@0 | 2761 | NS_ASSERTION(NS_IsMainThread(), "Main thread only"); |
michael@0 | 2762 | MOZ_ASSERT(aGraph->IsNonRealtime(), "Should not destroy the global graph here"); |
michael@0 | 2763 | |
michael@0 | 2764 | MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(aGraph); |
michael@0 | 2765 | if (graph->mForceShutDown) |
michael@0 | 2766 | return; // already done |
michael@0 | 2767 | |
michael@0 | 2768 | if (!graph->mNonRealtimeProcessing) { |
michael@0 | 2769 | // Start the graph, but don't produce anything |
michael@0 | 2770 | graph->StartNonRealtimeProcessing(1, 0); |
michael@0 | 2771 | } |
michael@0 | 2772 | graph->ForceShutDown(); |
michael@0 | 2773 | } |
michael@0 | 2774 | |
michael@0 | 2775 | NS_IMPL_ISUPPORTS(MediaStreamGraphImpl, nsIMemoryReporter) |
michael@0 | 2776 | |
michael@0 | 2777 | struct ArrayClearer |
michael@0 | 2778 | { |
michael@0 | 2779 | ArrayClearer(nsTArray<AudioNodeSizes>& aArray) : mArray(aArray) {} |
michael@0 | 2780 | ~ArrayClearer() { mArray.Clear(); } |
michael@0 | 2781 | nsTArray<AudioNodeSizes>& mArray; |
michael@0 | 2782 | }; |
michael@0 | 2783 | |
michael@0 | 2784 | NS_IMETHODIMP |
michael@0 | 2785 | MediaStreamGraphImpl::CollectReports(nsIHandleReportCallback* aHandleReport, |
michael@0 | 2786 | nsISupports* aData) |
michael@0 | 2787 | { |
michael@0 | 2788 | // Clears out the report array after we're done with it. |
michael@0 | 2789 | ArrayClearer reportCleanup(mAudioStreamSizes); |
michael@0 | 2790 | |
michael@0 | 2791 | { |
michael@0 | 2792 | MonitorAutoLock memoryReportLock(mMemoryReportMonitor); |
michael@0 | 2793 | mNeedsMemoryReport = true; |
michael@0 | 2794 | |
michael@0 | 2795 | { |
michael@0 | 2796 | // Wake up the MSG thread. |
michael@0 | 2797 | MonitorAutoLock monitorLock(mMonitor); |
michael@0 | 2798 | EnsureImmediateWakeUpLocked(monitorLock); |
michael@0 | 2799 | } |
michael@0 | 2800 | |
michael@0 | 2801 | // Wait for the report to complete. |
michael@0 | 2802 | nsresult rv; |
michael@0 | 2803 | while ((rv = memoryReportLock.Wait()) != NS_OK) { |
michael@0 | 2804 | if (PR_GetError() != PR_PENDING_INTERRUPT_ERROR) { |
michael@0 | 2805 | return rv; |
michael@0 | 2806 | } |
michael@0 | 2807 | } |
michael@0 | 2808 | } |
michael@0 | 2809 | |
michael@0 | 2810 | #define REPORT(_path, _amount, _desc) \ |
michael@0 | 2811 | do { \ |
michael@0 | 2812 | nsresult rv; \ |
michael@0 | 2813 | rv = aHandleReport->Callback(EmptyCString(), _path, \ |
michael@0 | 2814 | KIND_HEAP, UNITS_BYTES, _amount, \ |
michael@0 | 2815 | NS_LITERAL_CSTRING(_desc), aData); \ |
michael@0 | 2816 | NS_ENSURE_SUCCESS(rv, rv); \ |
michael@0 | 2817 | } while (0) |
michael@0 | 2818 | |
michael@0 | 2819 | for (size_t i = 0; i < mAudioStreamSizes.Length(); i++) { |
michael@0 | 2820 | const AudioNodeSizes& usage = mAudioStreamSizes[i]; |
michael@0 | 2821 | const char* const nodeType = usage.mNodeType.get(); |
michael@0 | 2822 | |
michael@0 | 2823 | nsPrintfCString domNodePath("explicit/webaudio/audio-node/%s/dom-nodes", |
michael@0 | 2824 | nodeType); |
michael@0 | 2825 | REPORT(domNodePath, usage.mDomNode, |
michael@0 | 2826 | "Memory used by AudioNode DOM objects (Web Audio)."); |
michael@0 | 2827 | |
michael@0 | 2828 | nsPrintfCString enginePath("explicit/webaudio/audio-node/%s/engine-objects", |
michael@0 | 2829 | nodeType); |
michael@0 | 2830 | REPORT(enginePath, usage.mEngine, |
michael@0 | 2831 | "Memory used by AudioNode engine objects (Web Audio)."); |
michael@0 | 2832 | |
michael@0 | 2833 | nsPrintfCString streamPath("explicit/webaudio/audio-node/%s/stream-objects", |
michael@0 | 2834 | nodeType); |
michael@0 | 2835 | REPORT(streamPath, usage.mStream, |
michael@0 | 2836 | "Memory used by AudioNode stream objects (Web Audio)."); |
michael@0 | 2837 | |
michael@0 | 2838 | } |
michael@0 | 2839 | |
michael@0 | 2840 | #undef REPORT |
michael@0 | 2841 | |
michael@0 | 2842 | return NS_OK; |
michael@0 | 2843 | } |
michael@0 | 2844 | |
michael@0 | 2845 | SourceMediaStream* |
michael@0 | 2846 | MediaStreamGraph::CreateSourceStream(DOMMediaStream* aWrapper) |
michael@0 | 2847 | { |
michael@0 | 2848 | SourceMediaStream* stream = new SourceMediaStream(aWrapper); |
michael@0 | 2849 | NS_ADDREF(stream); |
michael@0 | 2850 | MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this); |
michael@0 | 2851 | stream->SetGraphImpl(graph); |
michael@0 | 2852 | graph->AppendMessage(new CreateMessage(stream)); |
michael@0 | 2853 | return stream; |
michael@0 | 2854 | } |
michael@0 | 2855 | |
michael@0 | 2856 | ProcessedMediaStream* |
michael@0 | 2857 | MediaStreamGraph::CreateTrackUnionStream(DOMMediaStream* aWrapper) |
michael@0 | 2858 | { |
michael@0 | 2859 | TrackUnionStream* stream = new TrackUnionStream(aWrapper); |
michael@0 | 2860 | NS_ADDREF(stream); |
michael@0 | 2861 | MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this); |
michael@0 | 2862 | stream->SetGraphImpl(graph); |
michael@0 | 2863 | graph->AppendMessage(new CreateMessage(stream)); |
michael@0 | 2864 | return stream; |
michael@0 | 2865 | } |
michael@0 | 2866 | |
michael@0 | 2867 | AudioNodeExternalInputStream* |
michael@0 | 2868 | MediaStreamGraph::CreateAudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate) |
michael@0 | 2869 | { |
michael@0 | 2870 | MOZ_ASSERT(NS_IsMainThread()); |
michael@0 | 2871 | if (!aSampleRate) { |
michael@0 | 2872 | aSampleRate = aEngine->NodeMainThread()->Context()->SampleRate(); |
michael@0 | 2873 | } |
michael@0 | 2874 | AudioNodeExternalInputStream* stream = new AudioNodeExternalInputStream(aEngine, aSampleRate); |
michael@0 | 2875 | NS_ADDREF(stream); |
michael@0 | 2876 | MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this); |
michael@0 | 2877 | stream->SetGraphImpl(graph); |
michael@0 | 2878 | graph->AppendMessage(new CreateMessage(stream)); |
michael@0 | 2879 | return stream; |
michael@0 | 2880 | } |
michael@0 | 2881 | |
michael@0 | 2882 | AudioNodeStream* |
michael@0 | 2883 | MediaStreamGraph::CreateAudioNodeStream(AudioNodeEngine* aEngine, |
michael@0 | 2884 | AudioNodeStreamKind aKind, |
michael@0 | 2885 | TrackRate aSampleRate) |
michael@0 | 2886 | { |
michael@0 | 2887 | MOZ_ASSERT(NS_IsMainThread()); |
michael@0 | 2888 | if (!aSampleRate) { |
michael@0 | 2889 | aSampleRate = aEngine->NodeMainThread()->Context()->SampleRate(); |
michael@0 | 2890 | } |
michael@0 | 2891 | AudioNodeStream* stream = new AudioNodeStream(aEngine, aKind, aSampleRate); |
michael@0 | 2892 | NS_ADDREF(stream); |
michael@0 | 2893 | MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this); |
michael@0 | 2894 | stream->SetGraphImpl(graph); |
michael@0 | 2895 | if (aEngine->HasNode()) { |
michael@0 | 2896 | stream->SetChannelMixingParametersImpl(aEngine->NodeMainThread()->ChannelCount(), |
michael@0 | 2897 | aEngine->NodeMainThread()->ChannelCountModeValue(), |
michael@0 | 2898 | aEngine->NodeMainThread()->ChannelInterpretationValue()); |
michael@0 | 2899 | } |
michael@0 | 2900 | graph->AppendMessage(new CreateMessage(stream)); |
michael@0 | 2901 | return stream; |
michael@0 | 2902 | } |
michael@0 | 2903 | |
michael@0 | 2904 | bool |
michael@0 | 2905 | MediaStreamGraph::IsNonRealtime() const |
michael@0 | 2906 | { |
michael@0 | 2907 | return this != gGraph; |
michael@0 | 2908 | } |
michael@0 | 2909 | |
michael@0 | 2910 | void |
michael@0 | 2911 | MediaStreamGraph::StartNonRealtimeProcessing(TrackRate aRate, uint32_t aTicksToProcess) |
michael@0 | 2912 | { |
michael@0 | 2913 | NS_ASSERTION(NS_IsMainThread(), "main thread only"); |
michael@0 | 2914 | |
michael@0 | 2915 | MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this); |
michael@0 | 2916 | NS_ASSERTION(!graph->mRealtime, "non-realtime only"); |
michael@0 | 2917 | |
michael@0 | 2918 | if (graph->mNonRealtimeProcessing) |
michael@0 | 2919 | return; |
michael@0 | 2920 | graph->mEndTime = graph->mCurrentTime + TicksToTimeRoundUp(aRate, aTicksToProcess); |
michael@0 | 2921 | graph->mNonRealtimeProcessing = true; |
michael@0 | 2922 | graph->EnsureRunInStableState(); |
michael@0 | 2923 | } |
michael@0 | 2924 | |
michael@0 | 2925 | void |
michael@0 | 2926 | ProcessedMediaStream::AddInput(MediaInputPort* aPort) |
michael@0 | 2927 | { |
michael@0 | 2928 | mInputs.AppendElement(aPort); |
michael@0 | 2929 | GraphImpl()->SetStreamOrderDirty(); |
michael@0 | 2930 | } |
michael@0 | 2931 | |
michael@0 | 2932 | } |