content/media/MediaStreamGraph.cpp

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/content/media/MediaStreamGraph.cpp	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,2932 @@
     1.4 +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
     1.5 +/* This Source Code Form is subject to the terms of the Mozilla Public
     1.6 + * License, v. 2.0. If a copy of the MPL was not distributed with this file,
     1.7 + * You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.8 +
     1.9 +#include "MediaStreamGraphImpl.h"
    1.10 +#include "mozilla/LinkedList.h"
    1.11 +#include "mozilla/MathAlgorithms.h"
    1.12 +#include "mozilla/unused.h"
    1.13 +
    1.14 +#include "AudioSegment.h"
    1.15 +#include "VideoSegment.h"
    1.16 +#include "nsContentUtils.h"
    1.17 +#include "nsIAppShell.h"
    1.18 +#include "nsIObserver.h"
    1.19 +#include "nsPrintfCString.h"
    1.20 +#include "nsServiceManagerUtils.h"
    1.21 +#include "nsWidgetsCID.h"
    1.22 +#include "prerror.h"
    1.23 +#include "prlog.h"
    1.24 +#include "mozilla/Attributes.h"
    1.25 +#include "TrackUnionStream.h"
    1.26 +#include "ImageContainer.h"
    1.27 +#include "AudioChannelService.h"
    1.28 +#include "AudioNodeEngine.h"
    1.29 +#include "AudioNodeStream.h"
    1.30 +#include "AudioNodeExternalInputStream.h"
    1.31 +#include <algorithm>
    1.32 +#include "DOMMediaStream.h"
    1.33 +#include "GeckoProfiler.h"
    1.34 +#include "mozilla/unused.h"
    1.35 +#include "speex/speex_resampler.h"
    1.36 +#ifdef MOZ_WEBRTC
    1.37 +#include "AudioOutputObserver.h"
    1.38 +#endif
    1.39 +
    1.40 +using namespace mozilla::layers;
    1.41 +using namespace mozilla::dom;
    1.42 +using namespace mozilla::gfx;
    1.43 +
    1.44 +namespace mozilla {
    1.45 +
    1.46 +#ifdef PR_LOGGING
    1.47 +PRLogModuleInfo* gMediaStreamGraphLog;
    1.48 +#define STREAM_LOG(type, msg) PR_LOG(gMediaStreamGraphLog, type, msg)
    1.49 +#else
    1.50 +#define STREAM_LOG(type, msg)
    1.51 +#endif
    1.52 +
    1.53 +/**
    1.54 + * The singleton graph instance.
    1.55 + */
    1.56 +static MediaStreamGraphImpl* gGraph;
    1.57 +
    1.58 +MediaStreamGraphImpl::~MediaStreamGraphImpl()
    1.59 +{
    1.60 +  NS_ASSERTION(IsEmpty(),
    1.61 +               "All streams should have been destroyed by messages from the main thread");
    1.62 +  STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p destroyed", this));
    1.63 +}
    1.64 +
    1.65 +
    1.66 +StreamTime
    1.67 +MediaStreamGraphImpl::GetDesiredBufferEnd(MediaStream* aStream)
    1.68 +{
    1.69 +  StreamTime current = mCurrentTime - aStream->mBufferStartTime;
    1.70 +  // When waking up media decoders, we need a longer safety margin, as it can
    1.71 +  // take more time to get new samples. A factor of two seem to work.
    1.72 +  return current +
    1.73 +      2 * MillisecondsToMediaTime(std::max(AUDIO_TARGET_MS, VIDEO_TARGET_MS));
    1.74 +}
    1.75 +
    1.76 +void
    1.77 +MediaStreamGraphImpl::FinishStream(MediaStream* aStream)
    1.78 +{
    1.79 +  if (aStream->mFinished)
    1.80 +    return;
    1.81 +  STREAM_LOG(PR_LOG_DEBUG, ("MediaStream %p will finish", aStream));
    1.82 +  aStream->mFinished = true;
    1.83 +  aStream->mBuffer.AdvanceKnownTracksTime(STREAM_TIME_MAX);
    1.84 +  // Force at least one more iteration of the control loop, since we rely
    1.85 +  // on UpdateCurrentTime to notify our listeners once the stream end
    1.86 +  // has been reached.
    1.87 +  EnsureNextIteration();
    1.88 +
    1.89 +  SetStreamOrderDirty();
    1.90 +}
    1.91 +
    1.92 +void
    1.93 +MediaStreamGraphImpl::AddStream(MediaStream* aStream)
    1.94 +{
    1.95 +  aStream->mBufferStartTime = mCurrentTime;
    1.96 +  *mStreams.AppendElement() = already_AddRefed<MediaStream>(aStream);
    1.97 +  STREAM_LOG(PR_LOG_DEBUG, ("Adding media stream %p to the graph", aStream));
    1.98 +
    1.99 +  SetStreamOrderDirty();
   1.100 +}
   1.101 +
   1.102 +void
   1.103 +MediaStreamGraphImpl::RemoveStream(MediaStream* aStream)
   1.104 +{
   1.105 +  // Remove references in mStreamUpdates before we allow aStream to die.
   1.106 +  // Pending updates are not needed (since the main thread has already given
   1.107 +  // up the stream) so we will just drop them.
   1.108 +  {
   1.109 +    MonitorAutoLock lock(mMonitor);
   1.110 +    for (uint32_t i = 0; i < mStreamUpdates.Length(); ++i) {
   1.111 +      if (mStreamUpdates[i].mStream == aStream) {
   1.112 +        mStreamUpdates[i].mStream = nullptr;
   1.113 +      }
   1.114 +    }
   1.115 +  }
   1.116 +
   1.117 +  // Ensure that mMixer is updated when necessary.
   1.118 +  SetStreamOrderDirty();
   1.119 +
   1.120 +  // This unrefs the stream, probably destroying it
   1.121 +  mStreams.RemoveElement(aStream);
   1.122 +
   1.123 +  STREAM_LOG(PR_LOG_DEBUG, ("Removing media stream %p from the graph", aStream));
   1.124 +}
   1.125 +
   1.126 +void
   1.127 +MediaStreamGraphImpl::UpdateConsumptionState(SourceMediaStream* aStream)
   1.128 +{
   1.129 +  MediaStreamListener::Consumption state =
   1.130 +      aStream->mIsConsumed ? MediaStreamListener::CONSUMED
   1.131 +      : MediaStreamListener::NOT_CONSUMED;
   1.132 +  if (state != aStream->mLastConsumptionState) {
   1.133 +    aStream->mLastConsumptionState = state;
   1.134 +    for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) {
   1.135 +      MediaStreamListener* l = aStream->mListeners[j];
   1.136 +      l->NotifyConsumptionChanged(this, state);
   1.137 +    }
   1.138 +  }
   1.139 +}
   1.140 +
   1.141 +void
   1.142 +MediaStreamGraphImpl::ExtractPendingInput(SourceMediaStream* aStream,
   1.143 +                                          GraphTime aDesiredUpToTime,
   1.144 +                                          bool* aEnsureNextIteration)
   1.145 +{
   1.146 +  bool finished;
   1.147 +  {
   1.148 +    MutexAutoLock lock(aStream->mMutex);
   1.149 +    if (aStream->mPullEnabled && !aStream->mFinished &&
   1.150 +        !aStream->mListeners.IsEmpty()) {
   1.151 +      // Compute how much stream time we'll need assuming we don't block
   1.152 +      // the stream at all between mBlockingDecisionsMadeUntilTime and
   1.153 +      // aDesiredUpToTime.
   1.154 +      StreamTime t =
   1.155 +        GraphTimeToStreamTime(aStream, mStateComputedTime) +
   1.156 +        (aDesiredUpToTime - mStateComputedTime);
   1.157 +      STREAM_LOG(PR_LOG_DEBUG+1, ("Calling NotifyPull aStream=%p t=%f current end=%f", aStream,
   1.158 +                                  MediaTimeToSeconds(t),
   1.159 +                                  MediaTimeToSeconds(aStream->mBuffer.GetEnd())));
   1.160 +      if (t > aStream->mBuffer.GetEnd()) {
   1.161 +        *aEnsureNextIteration = true;
   1.162 +#ifdef DEBUG
   1.163 +        if (aStream->mListeners.Length() == 0) {
   1.164 +          STREAM_LOG(PR_LOG_ERROR, ("No listeners in NotifyPull aStream=%p desired=%f current end=%f",
   1.165 +                                    aStream, MediaTimeToSeconds(t),
   1.166 +                                    MediaTimeToSeconds(aStream->mBuffer.GetEnd())));
   1.167 +          aStream->DumpTrackInfo();
   1.168 +        }
   1.169 +#endif
   1.170 +        for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) {
   1.171 +          MediaStreamListener* l = aStream->mListeners[j];
   1.172 +          {
   1.173 +            MutexAutoUnlock unlock(aStream->mMutex);
   1.174 +            l->NotifyPull(this, t);
   1.175 +          }
   1.176 +        }
   1.177 +      }
   1.178 +    }
   1.179 +    finished = aStream->mUpdateFinished;
   1.180 +    for (int32_t i = aStream->mUpdateTracks.Length() - 1; i >= 0; --i) {
   1.181 +      SourceMediaStream::TrackData* data = &aStream->mUpdateTracks[i];
   1.182 +      aStream->ApplyTrackDisabling(data->mID, data->mData);
   1.183 +      for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) {
   1.184 +        MediaStreamListener* l = aStream->mListeners[j];
   1.185 +        TrackTicks offset = (data->mCommands & SourceMediaStream::TRACK_CREATE)
   1.186 +            ? data->mStart : aStream->mBuffer.FindTrack(data->mID)->GetSegment()->GetDuration();
   1.187 +        l->NotifyQueuedTrackChanges(this, data->mID, data->mOutputRate,
   1.188 +                                    offset, data->mCommands, *data->mData);
   1.189 +      }
   1.190 +      if (data->mCommands & SourceMediaStream::TRACK_CREATE) {
   1.191 +        MediaSegment* segment = data->mData.forget();
   1.192 +        STREAM_LOG(PR_LOG_DEBUG, ("SourceMediaStream %p creating track %d, rate %d, start %lld, initial end %lld",
   1.193 +                                  aStream, data->mID, data->mOutputRate, int64_t(data->mStart),
   1.194 +                                  int64_t(segment->GetDuration())));
   1.195 +
   1.196 +        aStream->mBuffer.AddTrack(data->mID, data->mOutputRate, data->mStart, segment);
   1.197 +        // The track has taken ownership of data->mData, so let's replace
   1.198 +        // data->mData with an empty clone.
   1.199 +        data->mData = segment->CreateEmptyClone();
   1.200 +        data->mCommands &= ~SourceMediaStream::TRACK_CREATE;
   1.201 +      } else if (data->mData->GetDuration() > 0) {
   1.202 +        MediaSegment* dest = aStream->mBuffer.FindTrack(data->mID)->GetSegment();
   1.203 +        STREAM_LOG(PR_LOG_DEBUG+1, ("SourceMediaStream %p track %d, advancing end from %lld to %lld",
   1.204 +                                    aStream, data->mID,
   1.205 +                                    int64_t(dest->GetDuration()),
   1.206 +                                    int64_t(dest->GetDuration() + data->mData->GetDuration())));
   1.207 +        dest->AppendFrom(data->mData);
   1.208 +      }
   1.209 +      if (data->mCommands & SourceMediaStream::TRACK_END) {
   1.210 +        aStream->mBuffer.FindTrack(data->mID)->SetEnded();
   1.211 +        aStream->mUpdateTracks.RemoveElementAt(i);
   1.212 +      }
   1.213 +    }
   1.214 +    if (!aStream->mFinished) {
   1.215 +      aStream->mBuffer.AdvanceKnownTracksTime(aStream->mUpdateKnownTracksTime);
   1.216 +    }
   1.217 +  }
   1.218 +  if (aStream->mBuffer.GetEnd() > 0) {
   1.219 +    aStream->mHasCurrentData = true;
   1.220 +  }
   1.221 +  if (finished) {
   1.222 +    FinishStream(aStream);
   1.223 +  }
   1.224 +}
   1.225 +
   1.226 +void
   1.227 +MediaStreamGraphImpl::UpdateBufferSufficiencyState(SourceMediaStream* aStream)
   1.228 +{
   1.229 +  StreamTime desiredEnd = GetDesiredBufferEnd(aStream);
   1.230 +  nsTArray<SourceMediaStream::ThreadAndRunnable> runnables;
   1.231 +
   1.232 +  {
   1.233 +    MutexAutoLock lock(aStream->mMutex);
   1.234 +    for (uint32_t i = 0; i < aStream->mUpdateTracks.Length(); ++i) {
   1.235 +      SourceMediaStream::TrackData* data = &aStream->mUpdateTracks[i];
   1.236 +      if (data->mCommands & SourceMediaStream::TRACK_CREATE) {
   1.237 +        // This track hasn't been created yet, so we have no sufficiency
   1.238 +        // data. The track will be created in the next iteration of the
   1.239 +        // control loop and then we'll fire insufficiency notifications
   1.240 +        // if necessary.
   1.241 +        continue;
   1.242 +      }
   1.243 +      if (data->mCommands & SourceMediaStream::TRACK_END) {
   1.244 +        // This track will end, so no point in firing not-enough-data
   1.245 +        // callbacks.
   1.246 +        continue;
   1.247 +      }
   1.248 +      StreamBuffer::Track* track = aStream->mBuffer.FindTrack(data->mID);
   1.249 +      // Note that track->IsEnded() must be false, otherwise we would have
   1.250 +      // removed the track from mUpdateTracks already.
   1.251 +      NS_ASSERTION(!track->IsEnded(), "What is this track doing here?");
   1.252 +      data->mHaveEnough = track->GetEndTimeRoundDown() >= desiredEnd;
   1.253 +      if (!data->mHaveEnough) {
   1.254 +        runnables.MoveElementsFrom(data->mDispatchWhenNotEnough);
   1.255 +      }
   1.256 +    }
   1.257 +  }
   1.258 +
   1.259 +  for (uint32_t i = 0; i < runnables.Length(); ++i) {
   1.260 +    runnables[i].mTarget->Dispatch(runnables[i].mRunnable, 0);
   1.261 +  }
   1.262 +}
   1.263 +
   1.264 +StreamTime
   1.265 +MediaStreamGraphImpl::GraphTimeToStreamTime(MediaStream* aStream,
   1.266 +                                            GraphTime aTime)
   1.267 +{
   1.268 +  NS_ASSERTION(aTime <= mStateComputedTime,
   1.269 +               "Don't ask about times where we haven't made blocking decisions yet");
   1.270 +  if (aTime <= mCurrentTime) {
   1.271 +    return std::max<StreamTime>(0, aTime - aStream->mBufferStartTime);
   1.272 +  }
   1.273 +  GraphTime t = mCurrentTime;
   1.274 +  StreamTime s = t - aStream->mBufferStartTime;
   1.275 +  while (t < aTime) {
   1.276 +    GraphTime end;
   1.277 +    if (!aStream->mBlocked.GetAt(t, &end)) {
   1.278 +      s += std::min(aTime, end) - t;
   1.279 +    }
   1.280 +    t = end;
   1.281 +  }
   1.282 +  return std::max<StreamTime>(0, s);
   1.283 +}
   1.284 +
   1.285 +StreamTime
   1.286 +MediaStreamGraphImpl::GraphTimeToStreamTimeOptimistic(MediaStream* aStream,
   1.287 +                                                      GraphTime aTime)
   1.288 +{
   1.289 +  GraphTime computedUpToTime = std::min(mStateComputedTime, aTime);
   1.290 +  StreamTime s = GraphTimeToStreamTime(aStream, computedUpToTime);
   1.291 +  return s + (aTime - computedUpToTime);
   1.292 +}
   1.293 +
   1.294 +GraphTime
   1.295 +MediaStreamGraphImpl::StreamTimeToGraphTime(MediaStream* aStream,
   1.296 +                                            StreamTime aTime, uint32_t aFlags)
   1.297 +{
   1.298 +  if (aTime >= STREAM_TIME_MAX) {
   1.299 +    return GRAPH_TIME_MAX;
   1.300 +  }
   1.301 +  MediaTime bufferElapsedToCurrentTime = mCurrentTime - aStream->mBufferStartTime;
   1.302 +  if (aTime < bufferElapsedToCurrentTime ||
   1.303 +      (aTime == bufferElapsedToCurrentTime && !(aFlags & INCLUDE_TRAILING_BLOCKED_INTERVAL))) {
   1.304 +    return aTime + aStream->mBufferStartTime;
   1.305 +  }
   1.306 +
   1.307 +  MediaTime streamAmount = aTime - bufferElapsedToCurrentTime;
   1.308 +  NS_ASSERTION(streamAmount >= 0, "Can't answer queries before current time");
   1.309 +
   1.310 +  GraphTime t = mCurrentTime;
   1.311 +  while (t < GRAPH_TIME_MAX) {
   1.312 +    if (!(aFlags & INCLUDE_TRAILING_BLOCKED_INTERVAL) && streamAmount == 0) {
   1.313 +      return t;
   1.314 +    }
   1.315 +    bool blocked;
   1.316 +    GraphTime end;
   1.317 +    if (t < mStateComputedTime) {
   1.318 +      blocked = aStream->mBlocked.GetAt(t, &end);
   1.319 +      end = std::min(end, mStateComputedTime);
   1.320 +    } else {
   1.321 +      blocked = false;
   1.322 +      end = GRAPH_TIME_MAX;
   1.323 +    }
   1.324 +    if (blocked) {
   1.325 +      t = end;
   1.326 +    } else {
   1.327 +      if (streamAmount == 0) {
   1.328 +        // No more stream time to consume at time t, so we're done.
   1.329 +        break;
   1.330 +      }
   1.331 +      MediaTime consume = std::min(end - t, streamAmount);
   1.332 +      streamAmount -= consume;
   1.333 +      t += consume;
   1.334 +    }
   1.335 +  }
   1.336 +  return t;
   1.337 +}
   1.338 +
   1.339 +GraphTime
   1.340 +MediaStreamGraphImpl::GetAudioPosition(MediaStream* aStream)
   1.341 +{
   1.342 +  if (aStream->mAudioOutputStreams.IsEmpty()) {
   1.343 +    return mCurrentTime;
   1.344 +  }
   1.345 +  int64_t positionInFrames = aStream->mAudioOutputStreams[0].mStream->GetPositionInFrames();
   1.346 +  if (positionInFrames < 0) {
   1.347 +    return mCurrentTime;
   1.348 +  }
   1.349 +  return aStream->mAudioOutputStreams[0].mAudioPlaybackStartTime +
   1.350 +      TicksToTimeRoundDown(mSampleRate,
   1.351 +                           positionInFrames);
   1.352 +}
   1.353 +
   1.354 +void
   1.355 +MediaStreamGraphImpl::UpdateCurrentTime()
   1.356 +{
   1.357 +  GraphTime prevCurrentTime, nextCurrentTime;
   1.358 +  if (mRealtime) {
   1.359 +    TimeStamp now = TimeStamp::Now();
   1.360 +    prevCurrentTime = mCurrentTime;
   1.361 +    nextCurrentTime =
   1.362 +      SecondsToMediaTime((now - mCurrentTimeStamp).ToSeconds()) + mCurrentTime;
   1.363 +
   1.364 +    mCurrentTimeStamp = now;
   1.365 +    STREAM_LOG(PR_LOG_DEBUG+1, ("Updating current time to %f (real %f, mStateComputedTime %f)",
   1.366 +               MediaTimeToSeconds(nextCurrentTime),
   1.367 +               (now - mInitialTimeStamp).ToSeconds(),
   1.368 +               MediaTimeToSeconds(mStateComputedTime)));
   1.369 +  } else {
   1.370 +    prevCurrentTime = mCurrentTime;
   1.371 +    nextCurrentTime = mCurrentTime + MillisecondsToMediaTime(MEDIA_GRAPH_TARGET_PERIOD_MS);
   1.372 +    STREAM_LOG(PR_LOG_DEBUG+1, ("Updating offline current time to %f (mStateComputedTime %f)",
   1.373 +               MediaTimeToSeconds(nextCurrentTime),
   1.374 +               MediaTimeToSeconds(mStateComputedTime)));
   1.375 +  }
   1.376 +
   1.377 +  if (mStateComputedTime < nextCurrentTime) {
   1.378 +    STREAM_LOG(PR_LOG_WARNING, ("Media graph global underrun detected"));
   1.379 +    nextCurrentTime = mStateComputedTime;
   1.380 +  }
   1.381 +
   1.382 +  if (prevCurrentTime >= nextCurrentTime) {
   1.383 +    NS_ASSERTION(prevCurrentTime == nextCurrentTime, "Time can't go backwards!");
   1.384 +    // This could happen due to low clock resolution, maybe?
   1.385 +    STREAM_LOG(PR_LOG_DEBUG, ("Time did not advance"));
   1.386 +    // There's not much left to do here, but the code below that notifies
   1.387 +    // listeners that streams have ended still needs to run.
   1.388 +  }
   1.389 +
   1.390 +  nsTArray<MediaStream*> streamsReadyToFinish;
   1.391 +  nsAutoTArray<bool,800> streamHasOutput;
   1.392 +  streamHasOutput.SetLength(mStreams.Length());
   1.393 +  for (uint32_t i = 0; i < mStreams.Length(); ++i) {
   1.394 +    MediaStream* stream = mStreams[i];
   1.395 +
   1.396 +    // Calculate blocked time and fire Blocked/Unblocked events
   1.397 +    GraphTime blockedTime = 0;
   1.398 +    GraphTime t = prevCurrentTime;
   1.399 +    // include |nextCurrentTime| to ensure NotifyBlockingChanged() is called
   1.400 +    // before NotifyFinished() when |nextCurrentTime == stream end time|
   1.401 +    while (t <= nextCurrentTime) {
   1.402 +      GraphTime end;
   1.403 +      bool blocked = stream->mBlocked.GetAt(t, &end);
   1.404 +      if (blocked) {
   1.405 +        blockedTime += std::min(end, nextCurrentTime) - t;
   1.406 +      }
   1.407 +      if (blocked != stream->mNotifiedBlocked) {
   1.408 +        for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) {
   1.409 +          MediaStreamListener* l = stream->mListeners[j];
   1.410 +          l->NotifyBlockingChanged(this,
   1.411 +              blocked ? MediaStreamListener::BLOCKED : MediaStreamListener::UNBLOCKED);
   1.412 +        }
   1.413 +        stream->mNotifiedBlocked = blocked;
   1.414 +      }
   1.415 +      t = end;
   1.416 +    }
   1.417 +
   1.418 +    stream->AdvanceTimeVaryingValuesToCurrentTime(nextCurrentTime, blockedTime);
   1.419 +    // Advance mBlocked last so that implementations of
   1.420 +    // AdvanceTimeVaryingValuesToCurrentTime can rely on the value of mBlocked.
   1.421 +    stream->mBlocked.AdvanceCurrentTime(nextCurrentTime);
   1.422 +
   1.423 +    streamHasOutput[i] = blockedTime < nextCurrentTime - prevCurrentTime;
   1.424 +    // Make this an assertion when bug 957832 is fixed.
   1.425 +    NS_WARN_IF_FALSE(!streamHasOutput[i] || !stream->mNotifiedFinished,
   1.426 +      "Shouldn't have already notified of finish *and* have output!");
   1.427 +
   1.428 +    if (stream->mFinished && !stream->mNotifiedFinished) {
   1.429 +      streamsReadyToFinish.AppendElement(stream);
   1.430 +    }
   1.431 +    STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p bufferStartTime=%f blockedTime=%f",
   1.432 +                                stream, MediaTimeToSeconds(stream->mBufferStartTime),
   1.433 +                                MediaTimeToSeconds(blockedTime)));
   1.434 +  }
   1.435 +
   1.436 +  mCurrentTime = nextCurrentTime;
   1.437 +
   1.438 +  // Do these after setting mCurrentTime so that StreamTimeToGraphTime works properly.
   1.439 +  for (uint32_t i = 0; i < streamHasOutput.Length(); ++i) {
   1.440 +    if (!streamHasOutput[i]) {
   1.441 +      continue;
   1.442 +    }
   1.443 +    MediaStream* stream = mStreams[i];
   1.444 +    for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) {
   1.445 +      MediaStreamListener* l = stream->mListeners[j];
   1.446 +      l->NotifyOutput(this, mCurrentTime);
   1.447 +    }
   1.448 +  }
   1.449 +
   1.450 +  for (uint32_t i = 0; i < streamsReadyToFinish.Length(); ++i) {
   1.451 +    MediaStream* stream = streamsReadyToFinish[i];
   1.452 +    // The stream is fully finished when all of its track data has been played
   1.453 +    // out.
   1.454 +    if (mCurrentTime >=
   1.455 +          stream->StreamTimeToGraphTime(stream->GetStreamBuffer().GetAllTracksEnd()))  {
   1.456 +      NS_WARN_IF_FALSE(stream->mNotifiedBlocked,
   1.457 +        "Should've notified blocked=true for a fully finished stream");
   1.458 +      stream->mNotifiedFinished = true;
   1.459 +      stream->mLastPlayedVideoFrame.SetNull();
   1.460 +      SetStreamOrderDirty();
   1.461 +      for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) {
   1.462 +        MediaStreamListener* l = stream->mListeners[j];
   1.463 +        l->NotifyFinished(this);
   1.464 +      }
   1.465 +    }
   1.466 +  }
   1.467 +}
   1.468 +
   1.469 +bool
   1.470 +MediaStreamGraphImpl::WillUnderrun(MediaStream* aStream, GraphTime aTime,
   1.471 +                                   GraphTime aEndBlockingDecisions, GraphTime* aEnd)
   1.472 +{
   1.473 +  // Finished streams can't underrun. ProcessedMediaStreams also can't cause
   1.474 +  // underrun currently, since we'll always be able to produce data for them
   1.475 +  // unless they block on some other stream.
   1.476 +  if (aStream->mFinished || aStream->AsProcessedStream()) {
   1.477 +    return false;
   1.478 +  }
   1.479 +  GraphTime bufferEnd =
   1.480 +    StreamTimeToGraphTime(aStream, aStream->GetBufferEnd(),
   1.481 +                          INCLUDE_TRAILING_BLOCKED_INTERVAL);
   1.482 +#ifdef DEBUG
   1.483 +  if (bufferEnd < mCurrentTime) {
   1.484 +    STREAM_LOG(PR_LOG_ERROR, ("MediaStream %p underrun, "
   1.485 +                              "bufferEnd %f < mCurrentTime %f (%lld < %lld), Streamtime %lld",
   1.486 +                              aStream, MediaTimeToSeconds(bufferEnd), MediaTimeToSeconds(mCurrentTime),
   1.487 +                              bufferEnd, mCurrentTime, aStream->GetBufferEnd()));
   1.488 +    aStream->DumpTrackInfo();
   1.489 +    NS_ASSERTION(bufferEnd >= mCurrentTime, "Buffer underran");
   1.490 +  }
   1.491 +#endif
   1.492 +  // We should block after bufferEnd.
   1.493 +  if (bufferEnd <= aTime) {
   1.494 +    STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p will block due to data underrun, "
   1.495 +                                "bufferEnd %f",
   1.496 +                                aStream, MediaTimeToSeconds(bufferEnd)));
   1.497 +    return true;
   1.498 +  }
   1.499 +  // We should keep blocking if we're currently blocked and we don't have
   1.500 +  // data all the way through to aEndBlockingDecisions. If we don't have
   1.501 +  // data all the way through to aEndBlockingDecisions, we'll block soon,
   1.502 +  // but we might as well remain unblocked and play the data we've got while
   1.503 +  // we can.
   1.504 +  if (bufferEnd <= aEndBlockingDecisions && aStream->mBlocked.GetBefore(aTime)) {
   1.505 +    STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p will block due to speculative data underrun, "
   1.506 +                                "bufferEnd %f",
   1.507 +                                aStream, MediaTimeToSeconds(bufferEnd)));
   1.508 +    return true;
   1.509 +  }
   1.510 +  // Reconsider decisions at bufferEnd
   1.511 +  *aEnd = std::min(*aEnd, bufferEnd);
   1.512 +  return false;
   1.513 +}
   1.514 +
   1.515 +void
   1.516 +MediaStreamGraphImpl::MarkConsumed(MediaStream* aStream)
   1.517 +{
   1.518 +  if (aStream->mIsConsumed) {
   1.519 +    return;
   1.520 +  }
   1.521 +  aStream->mIsConsumed = true;
   1.522 +
   1.523 +  ProcessedMediaStream* ps = aStream->AsProcessedStream();
   1.524 +  if (!ps) {
   1.525 +    return;
   1.526 +  }
   1.527 +  // Mark all the inputs to this stream as consumed
   1.528 +  for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) {
   1.529 +    MarkConsumed(ps->mInputs[i]->mSource);
   1.530 +  }
   1.531 +}
   1.532 +
   1.533 +void
   1.534 +MediaStreamGraphImpl::UpdateStreamOrderForStream(mozilla::LinkedList<MediaStream>* aStack,
   1.535 +                                                 already_AddRefed<MediaStream> aStream)
   1.536 +{
   1.537 +  nsRefPtr<MediaStream> stream = aStream;
   1.538 +  NS_ASSERTION(!stream->mHasBeenOrdered, "stream should not have already been ordered");
   1.539 +  if (stream->mIsOnOrderingStack) {
   1.540 +    MediaStream* iter = aStack->getLast();
   1.541 +    AudioNodeStream* ns = stream->AsAudioNodeStream();
   1.542 +    bool delayNodePresent = ns ? ns->Engine()->AsDelayNodeEngine() != nullptr : false;
   1.543 +    bool cycleFound = false;
   1.544 +    if (iter) {
   1.545 +      do {
   1.546 +        cycleFound = true;
   1.547 +        iter->AsProcessedStream()->mInCycle = true;
   1.548 +        AudioNodeStream* ns = iter->AsAudioNodeStream();
   1.549 +        if (ns && ns->Engine()->AsDelayNodeEngine()) {
   1.550 +          delayNodePresent = true;
   1.551 +        }
   1.552 +        iter = iter->getPrevious();
   1.553 +      } while (iter && iter != stream);
   1.554 +    }
   1.555 +    if (cycleFound && !delayNodePresent) {
   1.556 +      // If we have detected a cycle, the previous loop should exit with stream
   1.557 +      // == iter, or the node is connected to itself. Go back in the cycle and
   1.558 +      // mute all nodes we find, or just mute the node itself.
   1.559 +      if (!iter) {
   1.560 +        // The node is connected to itself.
   1.561 +        // There can't be a non-AudioNodeStream here, because only AudioNodes
   1.562 +        // can be self-connected.
   1.563 +        iter = aStack->getLast();
   1.564 +        MOZ_ASSERT(iter->AsAudioNodeStream());
   1.565 +        iter->AsAudioNodeStream()->Mute();
   1.566 +      } else {
   1.567 +        MOZ_ASSERT(iter);
   1.568 +        do {
   1.569 +          AudioNodeStream* nodeStream = iter->AsAudioNodeStream();
   1.570 +          if (nodeStream) {
   1.571 +            nodeStream->Mute();
   1.572 +          }
   1.573 +        } while((iter = iter->getNext()));
   1.574 +      }
   1.575 +    }
   1.576 +    return;
   1.577 +  }
   1.578 +  ProcessedMediaStream* ps = stream->AsProcessedStream();
   1.579 +  if (ps) {
   1.580 +    aStack->insertBack(stream);
   1.581 +    stream->mIsOnOrderingStack = true;
   1.582 +    for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) {
   1.583 +      MediaStream* source = ps->mInputs[i]->mSource;
   1.584 +      if (!source->mHasBeenOrdered) {
   1.585 +        nsRefPtr<MediaStream> s = source;
   1.586 +        UpdateStreamOrderForStream(aStack, s.forget());
   1.587 +      }
   1.588 +    }
   1.589 +    aStack->popLast();
   1.590 +    stream->mIsOnOrderingStack = false;
   1.591 +  }
   1.592 +
   1.593 +  stream->mHasBeenOrdered = true;
   1.594 +  *mStreams.AppendElement() = stream.forget();
   1.595 +}
   1.596 +
   1.597 +static void AudioMixerCallback(AudioDataValue* aMixedBuffer,
   1.598 +                               AudioSampleFormat aFormat,
   1.599 +                               uint32_t aChannels,
   1.600 +                               uint32_t aFrames,
   1.601 +                               uint32_t aSampleRate)
   1.602 +{
   1.603 +  // Need an api to register mixer callbacks, bug 989921
   1.604 +#ifdef MOZ_WEBRTC
   1.605 +  if (aFrames > 0 && aChannels > 0) {
   1.606 +    // XXX need Observer base class and registration API
   1.607 +    if (gFarendObserver) {
   1.608 +      gFarendObserver->InsertFarEnd(aMixedBuffer, aFrames, false,
   1.609 +                                    aSampleRate, aChannels, aFormat);
   1.610 +    }
   1.611 +  }
   1.612 +#endif
   1.613 +}
   1.614 +
   1.615 +void
   1.616 +MediaStreamGraphImpl::UpdateStreamOrder()
   1.617 +{
   1.618 +  mOldStreams.SwapElements(mStreams);
   1.619 +  mStreams.ClearAndRetainStorage();
   1.620 +  bool shouldMix = false;
   1.621 +  for (uint32_t i = 0; i < mOldStreams.Length(); ++i) {
   1.622 +    MediaStream* stream = mOldStreams[i];
   1.623 +    stream->mHasBeenOrdered = false;
   1.624 +    stream->mIsConsumed = false;
   1.625 +    stream->mIsOnOrderingStack = false;
   1.626 +    stream->mInBlockingSet = false;
   1.627 +    if (stream->AsSourceStream() &&
   1.628 +        stream->AsSourceStream()->NeedsMixing()) {
   1.629 +      shouldMix = true;
   1.630 +    }
   1.631 +    ProcessedMediaStream* ps = stream->AsProcessedStream();
   1.632 +    if (ps) {
   1.633 +      ps->mInCycle = false;
   1.634 +      AudioNodeStream* ns = ps->AsAudioNodeStream();
   1.635 +      if (ns) {
   1.636 +        ns->Unmute();
   1.637 +      }
   1.638 +    }
   1.639 +  }
   1.640 +
   1.641 +  if (!mMixer && shouldMix) {
   1.642 +    mMixer = new AudioMixer(AudioMixerCallback);
   1.643 +  } else if (mMixer && !shouldMix) {
   1.644 +    mMixer = nullptr;
   1.645 +  }
   1.646 +
   1.647 +  mozilla::LinkedList<MediaStream> stack;
   1.648 +  for (uint32_t i = 0; i < mOldStreams.Length(); ++i) {
   1.649 +    nsRefPtr<MediaStream>& s = mOldStreams[i];
   1.650 +    if (s->IsIntrinsicallyConsumed()) {
   1.651 +      MarkConsumed(s);
   1.652 +    }
   1.653 +    if (!s->mHasBeenOrdered) {
   1.654 +      UpdateStreamOrderForStream(&stack, s.forget());
   1.655 +    }
   1.656 +  }
   1.657 +}
   1.658 +
   1.659 +void
   1.660 +MediaStreamGraphImpl::RecomputeBlocking(GraphTime aEndBlockingDecisions)
   1.661 +{
   1.662 +  bool blockingDecisionsWillChange = false;
   1.663 +
   1.664 +  STREAM_LOG(PR_LOG_DEBUG+1, ("Media graph %p computing blocking for time %f",
   1.665 +                              this, MediaTimeToSeconds(mStateComputedTime)));
   1.666 +  for (uint32_t i = 0; i < mStreams.Length(); ++i) {
   1.667 +    MediaStream* stream = mStreams[i];
   1.668 +    if (!stream->mInBlockingSet) {
   1.669 +      // Compute a partition of the streams containing 'stream' such that we can
   1.670 +      // compute the blocking status of each subset independently.
   1.671 +      nsAutoTArray<MediaStream*,10> streamSet;
   1.672 +      AddBlockingRelatedStreamsToSet(&streamSet, stream);
   1.673 +
   1.674 +      GraphTime end;
   1.675 +      for (GraphTime t = mStateComputedTime;
   1.676 +           t < aEndBlockingDecisions; t = end) {
   1.677 +        end = GRAPH_TIME_MAX;
   1.678 +        RecomputeBlockingAt(streamSet, t, aEndBlockingDecisions, &end);
   1.679 +        if (end < GRAPH_TIME_MAX) {
   1.680 +          blockingDecisionsWillChange = true;
   1.681 +        }
   1.682 +      }
   1.683 +    }
   1.684 +
   1.685 +    GraphTime end;
   1.686 +    stream->mBlocked.GetAt(mCurrentTime, &end);
   1.687 +    if (end < GRAPH_TIME_MAX) {
   1.688 +      blockingDecisionsWillChange = true;
   1.689 +    }
   1.690 +  }
   1.691 +  STREAM_LOG(PR_LOG_DEBUG+1, ("Media graph %p computed blocking for interval %f to %f",
   1.692 +                              this, MediaTimeToSeconds(mStateComputedTime),
   1.693 +                              MediaTimeToSeconds(aEndBlockingDecisions)));
   1.694 +  mStateComputedTime = aEndBlockingDecisions;
   1.695 +
   1.696 +  if (blockingDecisionsWillChange) {
   1.697 +    // Make sure we wake up to notify listeners about these changes.
   1.698 +    EnsureNextIteration();
   1.699 +  }
   1.700 +}
   1.701 +
   1.702 +void
   1.703 +MediaStreamGraphImpl::AddBlockingRelatedStreamsToSet(nsTArray<MediaStream*>* aStreams,
   1.704 +                                                     MediaStream* aStream)
   1.705 +{
   1.706 +  if (aStream->mInBlockingSet)
   1.707 +    return;
   1.708 +  aStream->mInBlockingSet = true;
   1.709 +  aStreams->AppendElement(aStream);
   1.710 +  for (uint32_t i = 0; i < aStream->mConsumers.Length(); ++i) {
   1.711 +    MediaInputPort* port = aStream->mConsumers[i];
   1.712 +    if (port->mFlags & (MediaInputPort::FLAG_BLOCK_INPUT | MediaInputPort::FLAG_BLOCK_OUTPUT)) {
   1.713 +      AddBlockingRelatedStreamsToSet(aStreams, port->mDest);
   1.714 +    }
   1.715 +  }
   1.716 +  ProcessedMediaStream* ps = aStream->AsProcessedStream();
   1.717 +  if (ps) {
   1.718 +    for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) {
   1.719 +      MediaInputPort* port = ps->mInputs[i];
   1.720 +      if (port->mFlags & (MediaInputPort::FLAG_BLOCK_INPUT | MediaInputPort::FLAG_BLOCK_OUTPUT)) {
   1.721 +        AddBlockingRelatedStreamsToSet(aStreams, port->mSource);
   1.722 +      }
   1.723 +    }
   1.724 +  }
   1.725 +}
   1.726 +
   1.727 +void
   1.728 +MediaStreamGraphImpl::MarkStreamBlocking(MediaStream* aStream)
   1.729 +{
   1.730 +  if (aStream->mBlockInThisPhase)
   1.731 +    return;
   1.732 +  aStream->mBlockInThisPhase = true;
   1.733 +  for (uint32_t i = 0; i < aStream->mConsumers.Length(); ++i) {
   1.734 +    MediaInputPort* port = aStream->mConsumers[i];
   1.735 +    if (port->mFlags & MediaInputPort::FLAG_BLOCK_OUTPUT) {
   1.736 +      MarkStreamBlocking(port->mDest);
   1.737 +    }
   1.738 +  }
   1.739 +  ProcessedMediaStream* ps = aStream->AsProcessedStream();
   1.740 +  if (ps) {
   1.741 +    for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) {
   1.742 +      MediaInputPort* port = ps->mInputs[i];
   1.743 +      if (port->mFlags & MediaInputPort::FLAG_BLOCK_INPUT) {
   1.744 +        MarkStreamBlocking(port->mSource);
   1.745 +      }
   1.746 +    }
   1.747 +  }
   1.748 +}
   1.749 +
   1.750 +void
   1.751 +MediaStreamGraphImpl::RecomputeBlockingAt(const nsTArray<MediaStream*>& aStreams,
   1.752 +                                          GraphTime aTime,
   1.753 +                                          GraphTime aEndBlockingDecisions,
   1.754 +                                          GraphTime* aEnd)
   1.755 +{
   1.756 +  for (uint32_t i = 0; i < aStreams.Length(); ++i) {
   1.757 +    MediaStream* stream = aStreams[i];
   1.758 +    stream->mBlockInThisPhase = false;
   1.759 +  }
   1.760 +
   1.761 +  for (uint32_t i = 0; i < aStreams.Length(); ++i) {
   1.762 +    MediaStream* stream = aStreams[i];
   1.763 +
   1.764 +    if (stream->mFinished) {
   1.765 +      GraphTime endTime = StreamTimeToGraphTime(stream,
   1.766 +         stream->GetStreamBuffer().GetAllTracksEnd());
   1.767 +      if (endTime <= aTime) {
   1.768 +        STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p is blocked due to being finished", stream));
   1.769 +        // We'll block indefinitely
   1.770 +        MarkStreamBlocking(stream);
   1.771 +        *aEnd = std::min(*aEnd, aEndBlockingDecisions);
   1.772 +        continue;
   1.773 +      } else {
   1.774 +        STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p is finished, but not blocked yet (end at %f, with blocking at %f)",
   1.775 +                                    stream, MediaTimeToSeconds(stream->GetBufferEnd()),
   1.776 +                                    MediaTimeToSeconds(endTime)));
   1.777 +        *aEnd = std::min(*aEnd, endTime);
   1.778 +      }
   1.779 +    }
   1.780 +
   1.781 +    GraphTime end;
   1.782 +    bool explicitBlock = stream->mExplicitBlockerCount.GetAt(aTime, &end) > 0;
   1.783 +    *aEnd = std::min(*aEnd, end);
   1.784 +    if (explicitBlock) {
   1.785 +      STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p is blocked due to explicit blocker", stream));
   1.786 +      MarkStreamBlocking(stream);
   1.787 +      continue;
   1.788 +    }
   1.789 +
   1.790 +    bool underrun = WillUnderrun(stream, aTime, aEndBlockingDecisions, aEnd);
   1.791 +    if (underrun) {
   1.792 +      // We'll block indefinitely
   1.793 +      MarkStreamBlocking(stream);
   1.794 +      *aEnd = std::min(*aEnd, aEndBlockingDecisions);
   1.795 +      continue;
   1.796 +    }
   1.797 +  }
   1.798 +  NS_ASSERTION(*aEnd > aTime, "Failed to advance!");
   1.799 +
   1.800 +  for (uint32_t i = 0; i < aStreams.Length(); ++i) {
   1.801 +    MediaStream* stream = aStreams[i];
   1.802 +    stream->mBlocked.SetAtAndAfter(aTime, stream->mBlockInThisPhase);
   1.803 +  }
   1.804 +}
   1.805 +
   1.806 +void
   1.807 +MediaStreamGraphImpl::NotifyHasCurrentData(MediaStream* aStream)
   1.808 +{
   1.809 +  if (!aStream->mNotifiedHasCurrentData && aStream->mHasCurrentData) {
   1.810 +    for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) {
   1.811 +      MediaStreamListener* l = aStream->mListeners[j];
   1.812 +      l->NotifyHasCurrentData(this);
   1.813 +    }
   1.814 +    aStream->mNotifiedHasCurrentData = true;
   1.815 +  }
   1.816 +}
   1.817 +
   1.818 +void
   1.819 +MediaStreamGraphImpl::CreateOrDestroyAudioStreams(GraphTime aAudioOutputStartTime,
   1.820 +                                                  MediaStream* aStream)
   1.821 +{
   1.822 +  MOZ_ASSERT(mRealtime, "Should only attempt to create audio streams in real-time mode");
   1.823 +
   1.824 +  nsAutoTArray<bool,2> audioOutputStreamsFound;
   1.825 +  for (uint32_t i = 0; i < aStream->mAudioOutputStreams.Length(); ++i) {
   1.826 +    audioOutputStreamsFound.AppendElement(false);
   1.827 +  }
   1.828 +
   1.829 +  if (!aStream->mAudioOutputs.IsEmpty()) {
   1.830 +    for (StreamBuffer::TrackIter tracks(aStream->GetStreamBuffer(), MediaSegment::AUDIO);
   1.831 +         !tracks.IsEnded(); tracks.Next()) {
   1.832 +      uint32_t i;
   1.833 +      for (i = 0; i < audioOutputStreamsFound.Length(); ++i) {
   1.834 +        if (aStream->mAudioOutputStreams[i].mTrackID == tracks->GetID()) {
   1.835 +          break;
   1.836 +        }
   1.837 +      }
   1.838 +      if (i < audioOutputStreamsFound.Length()) {
   1.839 +        audioOutputStreamsFound[i] = true;
   1.840 +      } else {
   1.841 +        // No output stream created for this track yet. Check if it's time to
   1.842 +        // create one.
   1.843 +        GraphTime startTime =
   1.844 +          StreamTimeToGraphTime(aStream, tracks->GetStartTimeRoundDown(),
   1.845 +                                INCLUDE_TRAILING_BLOCKED_INTERVAL);
   1.846 +        if (startTime >= mStateComputedTime) {
   1.847 +          // The stream wants to play audio, but nothing will play for the forseeable
   1.848 +          // future, so don't create the stream.
   1.849 +          continue;
   1.850 +        }
   1.851 +
   1.852 +        // Allocating a AudioStream would be slow, so we finish the Init async
   1.853 +        MediaStream::AudioOutputStream* audioOutputStream =
   1.854 +          aStream->mAudioOutputStreams.AppendElement();
   1.855 +        audioOutputStream->mAudioPlaybackStartTime = aAudioOutputStartTime;
   1.856 +        audioOutputStream->mBlockedAudioTime = 0;
   1.857 +        audioOutputStream->mLastTickWritten = 0;
   1.858 +        audioOutputStream->mStream = new AudioStream();
   1.859 +        // XXX for now, allocate stereo output. But we need to fix this to
   1.860 +        // match the system's ideal channel configuration.
   1.861 +        // NOTE: we presume this is either fast or async-under-the-covers
   1.862 +        audioOutputStream->mStream->Init(2, mSampleRate,
   1.863 +                                         aStream->mAudioChannelType,
   1.864 +                                         AudioStream::LowLatency);
   1.865 +        audioOutputStream->mTrackID = tracks->GetID();
   1.866 +
   1.867 +        LogLatency(AsyncLatencyLogger::AudioStreamCreate,
   1.868 +                   reinterpret_cast<uint64_t>(aStream),
   1.869 +                   reinterpret_cast<int64_t>(audioOutputStream->mStream.get()));
   1.870 +      }
   1.871 +    }
   1.872 +  }
   1.873 +
   1.874 +  for (int32_t i = audioOutputStreamsFound.Length() - 1; i >= 0; --i) {
   1.875 +    if (!audioOutputStreamsFound[i]) {
   1.876 +      aStream->mAudioOutputStreams[i].mStream->Shutdown();
   1.877 +      aStream->mAudioOutputStreams.RemoveElementAt(i);
   1.878 +    }
   1.879 +  }
   1.880 +}
   1.881 +
   1.882 +TrackTicks
   1.883 +MediaStreamGraphImpl::PlayAudio(MediaStream* aStream,
   1.884 +                                GraphTime aFrom, GraphTime aTo)
   1.885 +{
   1.886 +  MOZ_ASSERT(mRealtime, "Should only attempt to play audio in realtime mode");
   1.887 +
   1.888 +  TrackTicks ticksWritten = 0;
   1.889 +  // We compute the number of needed ticks by converting a difference of graph
   1.890 +  // time rather than by substracting two converted stream time to ensure that
   1.891 +  // the rounding between {Graph,Stream}Time and track ticks is not dependant
   1.892 +  // on the absolute value of the {Graph,Stream}Time, and so that number of
   1.893 +  // ticks to play is the same for each cycle.
   1.894 +  TrackTicks ticksNeeded = TimeToTicksRoundDown(mSampleRate, aTo) - TimeToTicksRoundDown(mSampleRate, aFrom);
   1.895 +
   1.896 +  if (aStream->mAudioOutputStreams.IsEmpty()) {
   1.897 +    return 0;
   1.898 +  }
   1.899 +
   1.900 +  // When we're playing multiple copies of this stream at the same time, they're
   1.901 +  // perfectly correlated so adding volumes is the right thing to do.
   1.902 +  float volume = 0.0f;
   1.903 +  for (uint32_t i = 0; i < aStream->mAudioOutputs.Length(); ++i) {
   1.904 +    volume += aStream->mAudioOutputs[i].mVolume;
   1.905 +  }
   1.906 +
   1.907 +  for (uint32_t i = 0; i < aStream->mAudioOutputStreams.Length(); ++i) {
   1.908 +    MediaStream::AudioOutputStream& audioOutput = aStream->mAudioOutputStreams[i];
   1.909 +    StreamBuffer::Track* track = aStream->mBuffer.FindTrack(audioOutput.mTrackID);
   1.910 +    AudioSegment* audio = track->Get<AudioSegment>();
   1.911 +    AudioSegment output;
   1.912 +    MOZ_ASSERT(track->GetRate() == mSampleRate);
   1.913 +
   1.914 +    // offset and audioOutput.mLastTickWritten can differ by at most one sample,
   1.915 +    // because of the rounding issue. We track that to ensure we don't skip a
   1.916 +    // sample. One sample may be played twice, but this should not happen
   1.917 +    // again during an unblocked sequence of track samples.
   1.918 +    TrackTicks offset = track->TimeToTicksRoundDown(GraphTimeToStreamTime(aStream, aFrom));
   1.919 +    if (audioOutput.mLastTickWritten &&
   1.920 +        audioOutput.mLastTickWritten != offset) {
   1.921 +      // If there is a global underrun of the MSG, this property won't hold, and
   1.922 +      // we reset the sample count tracking.
   1.923 +      if (offset - audioOutput.mLastTickWritten == 1) {
   1.924 +        offset = audioOutput.mLastTickWritten;
   1.925 +      }
   1.926 +    }
   1.927 +
   1.928 +    // We don't update aStream->mBufferStartTime here to account for
   1.929 +    // time spent blocked. Instead, we'll update it in UpdateCurrentTime after the
   1.930 +    // blocked period has completed. But we do need to make sure we play from the
   1.931 +    // right offsets in the stream buffer, even if we've already written silence for
   1.932 +    // some amount of blocked time after the current time.
   1.933 +    GraphTime t = aFrom;
   1.934 +    while (ticksNeeded) {
   1.935 +      GraphTime end;
   1.936 +      bool blocked = aStream->mBlocked.GetAt(t, &end);
   1.937 +      end = std::min(end, aTo);
   1.938 +
   1.939 +      // Check how many ticks of sound we can provide if we are blocked some
   1.940 +      // time in the middle of this cycle.
   1.941 +      TrackTicks toWrite = 0;
   1.942 +      if (end >= aTo) {
   1.943 +        toWrite = ticksNeeded;
   1.944 +      } else {
   1.945 +        toWrite = TimeToTicksRoundDown(mSampleRate, end - aFrom);
   1.946 +      }
   1.947 +      ticksNeeded -= toWrite;
   1.948 +
   1.949 +      if (blocked) {
   1.950 +        output.InsertNullDataAtStart(toWrite);
   1.951 +        STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing %ld blocking-silence samples for %f to %f (%ld to %ld)\n",
   1.952 +                                    aStream, toWrite, MediaTimeToSeconds(t), MediaTimeToSeconds(end),
   1.953 +                                    offset, offset + toWrite));
   1.954 +      } else {
   1.955 +        TrackTicks endTicksNeeded = offset + toWrite;
   1.956 +        TrackTicks endTicksAvailable = audio->GetDuration();
   1.957 +        STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing %ld samples for %f to %f (samples %ld to %ld)\n",
   1.958 +                                     aStream, toWrite, MediaTimeToSeconds(t), MediaTimeToSeconds(end),
   1.959 +                                     offset, endTicksNeeded));
   1.960 +
   1.961 +        if (endTicksNeeded <= endTicksAvailable) {
   1.962 +          output.AppendSlice(*audio, offset, endTicksNeeded);
   1.963 +          offset = endTicksNeeded;
   1.964 +        } else {
   1.965 +          MOZ_ASSERT(track->IsEnded(), "Not enough data, and track not ended.");
   1.966 +          // If we are at the end of the track, maybe write the remaining
   1.967 +          // samples, and pad with/output silence.
   1.968 +          if (endTicksNeeded > endTicksAvailable &&
   1.969 +              offset < endTicksAvailable) {
   1.970 +            output.AppendSlice(*audio, offset, endTicksAvailable);
   1.971 +            toWrite -= endTicksAvailable - offset;
   1.972 +            offset = endTicksAvailable;
   1.973 +          }
   1.974 +          output.AppendNullData(toWrite);
   1.975 +        }
   1.976 +        output.ApplyVolume(volume);
   1.977 +      }
   1.978 +      t = end;
   1.979 +    }
   1.980 +    audioOutput.mLastTickWritten = offset;
   1.981 +
   1.982 +    // Need unique id for stream & track - and we want it to match the inserter
   1.983 +    output.WriteTo(LATENCY_STREAM_ID(aStream, track->GetID()),
   1.984 +                   audioOutput.mStream, mMixer);
   1.985 +  }
   1.986 +  return ticksWritten;
   1.987 +}
   1.988 +
   1.989 +static void
   1.990 +SetImageToBlackPixel(PlanarYCbCrImage* aImage)
   1.991 +{
   1.992 +  uint8_t blackPixel[] = { 0x10, 0x80, 0x80 };
   1.993 +
   1.994 +  PlanarYCbCrData data;
   1.995 +  data.mYChannel = blackPixel;
   1.996 +  data.mCbChannel = blackPixel + 1;
   1.997 +  data.mCrChannel = blackPixel + 2;
   1.998 +  data.mYStride = data.mCbCrStride = 1;
   1.999 +  data.mPicSize = data.mYSize = data.mCbCrSize = IntSize(1, 1);
  1.1000 +  aImage->SetData(data);
  1.1001 +}
  1.1002 +
  1.1003 +void
  1.1004 +MediaStreamGraphImpl::PlayVideo(MediaStream* aStream)
  1.1005 +{
  1.1006 +  MOZ_ASSERT(mRealtime, "Should only attempt to play video in realtime mode");
  1.1007 +
  1.1008 +  if (aStream->mVideoOutputs.IsEmpty())
  1.1009 +    return;
  1.1010 +
  1.1011 +  // Display the next frame a bit early. This is better than letting the current
  1.1012 +  // frame be displayed for too long.
  1.1013 +  GraphTime framePosition = mCurrentTime + MEDIA_GRAPH_TARGET_PERIOD_MS;
  1.1014 +  NS_ASSERTION(framePosition >= aStream->mBufferStartTime, "frame position before buffer?");
  1.1015 +  StreamTime frameBufferTime = GraphTimeToStreamTime(aStream, framePosition);
  1.1016 +
  1.1017 +  TrackTicks start;
  1.1018 +  const VideoFrame* frame = nullptr;
  1.1019 +  StreamBuffer::Track* track;
  1.1020 +  for (StreamBuffer::TrackIter tracks(aStream->GetStreamBuffer(), MediaSegment::VIDEO);
  1.1021 +       !tracks.IsEnded(); tracks.Next()) {
  1.1022 +    VideoSegment* segment = tracks->Get<VideoSegment>();
  1.1023 +    TrackTicks thisStart;
  1.1024 +    const VideoFrame* thisFrame =
  1.1025 +      segment->GetFrameAt(tracks->TimeToTicksRoundDown(frameBufferTime), &thisStart);
  1.1026 +    if (thisFrame && thisFrame->GetImage()) {
  1.1027 +      start = thisStart;
  1.1028 +      frame = thisFrame;
  1.1029 +      track = tracks.get();
  1.1030 +    }
  1.1031 +  }
  1.1032 +  if (!frame || *frame == aStream->mLastPlayedVideoFrame)
  1.1033 +    return;
  1.1034 +
  1.1035 +  STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing video frame %p (%dx%d)",
  1.1036 +                              aStream, frame->GetImage(), frame->GetIntrinsicSize().width,
  1.1037 +                              frame->GetIntrinsicSize().height));
  1.1038 +  GraphTime startTime = StreamTimeToGraphTime(aStream,
  1.1039 +      track->TicksToTimeRoundDown(start), INCLUDE_TRAILING_BLOCKED_INTERVAL);
  1.1040 +  TimeStamp targetTime = mCurrentTimeStamp +
  1.1041 +      TimeDuration::FromMilliseconds(double(startTime - mCurrentTime));
  1.1042 +  for (uint32_t i = 0; i < aStream->mVideoOutputs.Length(); ++i) {
  1.1043 +    VideoFrameContainer* output = aStream->mVideoOutputs[i];
  1.1044 +
  1.1045 +    if (frame->GetForceBlack()) {
  1.1046 +      nsRefPtr<Image> image =
  1.1047 +        output->GetImageContainer()->CreateImage(ImageFormat::PLANAR_YCBCR);
  1.1048 +      if (image) {
  1.1049 +        // Sets the image to a single black pixel, which will be scaled to fill
  1.1050 +        // the rendered size.
  1.1051 +        SetImageToBlackPixel(static_cast<PlanarYCbCrImage*>(image.get()));
  1.1052 +      }
  1.1053 +      output->SetCurrentFrame(frame->GetIntrinsicSize(), image,
  1.1054 +                              targetTime);
  1.1055 +    } else {
  1.1056 +      output->SetCurrentFrame(frame->GetIntrinsicSize(), frame->GetImage(),
  1.1057 +                              targetTime);
  1.1058 +    }
  1.1059 +
  1.1060 +    nsCOMPtr<nsIRunnable> event =
  1.1061 +      NS_NewRunnableMethod(output, &VideoFrameContainer::Invalidate);
  1.1062 +    NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
  1.1063 +  }
  1.1064 +  if (!aStream->mNotifiedFinished) {
  1.1065 +    aStream->mLastPlayedVideoFrame = *frame;
  1.1066 +  }
  1.1067 +}
  1.1068 +
  1.1069 +bool
  1.1070 +MediaStreamGraphImpl::ShouldUpdateMainThread()
  1.1071 +{
  1.1072 +  if (mRealtime) {
  1.1073 +    return true;
  1.1074 +  }
  1.1075 +
  1.1076 +  TimeStamp now = TimeStamp::Now();
  1.1077 +  if ((now - mLastMainThreadUpdate).ToMilliseconds() > MEDIA_GRAPH_TARGET_PERIOD_MS) {
  1.1078 +    mLastMainThreadUpdate = now;
  1.1079 +    return true;
  1.1080 +  }
  1.1081 +  return false;
  1.1082 +}
  1.1083 +
  1.1084 +void
  1.1085 +MediaStreamGraphImpl::PrepareUpdatesToMainThreadState(bool aFinalUpdate)
  1.1086 +{
  1.1087 +  mMonitor.AssertCurrentThreadOwns();
  1.1088 +
  1.1089 +  // We don't want to frequently update the main thread about timing update
  1.1090 +  // when we are not running in realtime.
  1.1091 +  if (aFinalUpdate || ShouldUpdateMainThread()) {
  1.1092 +    mStreamUpdates.SetCapacity(mStreamUpdates.Length() + mStreams.Length());
  1.1093 +    for (uint32_t i = 0; i < mStreams.Length(); ++i) {
  1.1094 +      MediaStream* stream = mStreams[i];
  1.1095 +      if (!stream->MainThreadNeedsUpdates()) {
  1.1096 +        continue;
  1.1097 +      }
  1.1098 +      StreamUpdate* update = mStreamUpdates.AppendElement();
  1.1099 +      update->mGraphUpdateIndex = stream->mGraphUpdateIndices.GetAt(mCurrentTime);
  1.1100 +      update->mStream = stream;
  1.1101 +      update->mNextMainThreadCurrentTime =
  1.1102 +        GraphTimeToStreamTime(stream, mCurrentTime);
  1.1103 +      update->mNextMainThreadFinished = stream->mNotifiedFinished;
  1.1104 +    }
  1.1105 +    if (!mPendingUpdateRunnables.IsEmpty()) {
  1.1106 +      mUpdateRunnables.MoveElementsFrom(mPendingUpdateRunnables);
  1.1107 +    }
  1.1108 +  }
  1.1109 +
  1.1110 +  // Don't send the message to the main thread if it's not going to have
  1.1111 +  // any work to do.
  1.1112 +  if (aFinalUpdate ||
  1.1113 +      !mUpdateRunnables.IsEmpty() ||
  1.1114 +      !mStreamUpdates.IsEmpty()) {
  1.1115 +    EnsureStableStateEventPosted();
  1.1116 +  }
  1.1117 +}
  1.1118 +
  1.1119 +void
  1.1120 +MediaStreamGraphImpl::EnsureImmediateWakeUpLocked(MonitorAutoLock& aLock)
  1.1121 +{
  1.1122 +  if (mWaitState == WAITSTATE_WAITING_FOR_NEXT_ITERATION ||
  1.1123 +      mWaitState == WAITSTATE_WAITING_INDEFINITELY) {
  1.1124 +    mWaitState = WAITSTATE_WAKING_UP;
  1.1125 +    aLock.Notify();
  1.1126 +  }
  1.1127 +}
  1.1128 +
  1.1129 +void
  1.1130 +MediaStreamGraphImpl::EnsureNextIteration()
  1.1131 +{
  1.1132 +  MonitorAutoLock lock(mMonitor);
  1.1133 +  EnsureNextIterationLocked(lock);
  1.1134 +}
  1.1135 +
  1.1136 +void
  1.1137 +MediaStreamGraphImpl::EnsureNextIterationLocked(MonitorAutoLock& aLock)
  1.1138 +{
  1.1139 +  if (mNeedAnotherIteration)
  1.1140 +    return;
  1.1141 +  mNeedAnotherIteration = true;
  1.1142 +  if (mWaitState == WAITSTATE_WAITING_INDEFINITELY) {
  1.1143 +    mWaitState = WAITSTATE_WAKING_UP;
  1.1144 +    aLock.Notify();
  1.1145 +  }
  1.1146 +}
  1.1147 +
  1.1148 +/**
  1.1149 + * Returns smallest value of t such that
  1.1150 + * TimeToTicksRoundUp(aSampleRate, t) is a multiple of WEBAUDIO_BLOCK_SIZE
  1.1151 + * and floor(TimeToTicksRoundUp(aSampleRate, t)/WEBAUDIO_BLOCK_SIZE) >
  1.1152 + * floor(TimeToTicksRoundUp(aSampleRate, aTime)/WEBAUDIO_BLOCK_SIZE).
  1.1153 + */
  1.1154 +static GraphTime
  1.1155 +RoundUpToNextAudioBlock(TrackRate aSampleRate, GraphTime aTime)
  1.1156 +{
  1.1157 +  TrackTicks ticks = TimeToTicksRoundUp(aSampleRate, aTime);
  1.1158 +  uint64_t block = ticks >> WEBAUDIO_BLOCK_SIZE_BITS;
  1.1159 +  uint64_t nextBlock = block + 1;
  1.1160 +  TrackTicks nextTicks = nextBlock << WEBAUDIO_BLOCK_SIZE_BITS;
  1.1161 +  // Find the smallest time t such that TimeToTicksRoundUp(aSampleRate,t) == nextTicks
  1.1162 +  // That's the smallest integer t such that
  1.1163 +  //   t*aSampleRate > ((nextTicks - 1) << MEDIA_TIME_FRAC_BITS)
  1.1164 +  // Both sides are integers, so this is equivalent to
  1.1165 +  //   t*aSampleRate >= ((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) + 1
  1.1166 +  //   t >= (((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) + 1)/aSampleRate
  1.1167 +  //   t = ceil((((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) + 1)/aSampleRate)
  1.1168 +  // Using integer division, that's
  1.1169 +  //   t = (((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) + 1 + aSampleRate - 1)/aSampleRate
  1.1170 +  //     = ((nextTicks - 1) << MEDIA_TIME_FRAC_BITS)/aSampleRate + 1
  1.1171 +  return ((nextTicks - 1) << MEDIA_TIME_FRAC_BITS)/aSampleRate + 1;
  1.1172 +}
  1.1173 +
  1.1174 +void
  1.1175 +MediaStreamGraphImpl::ProduceDataForStreamsBlockByBlock(uint32_t aStreamIndex,
  1.1176 +                                                        TrackRate aSampleRate,
  1.1177 +                                                        GraphTime aFrom,
  1.1178 +                                                        GraphTime aTo)
  1.1179 +{
  1.1180 +  GraphTime t = aFrom;
  1.1181 +  while (t < aTo) {
  1.1182 +    GraphTime next = RoundUpToNextAudioBlock(aSampleRate, t);
  1.1183 +    for (uint32_t i = aStreamIndex; i < mStreams.Length(); ++i) {
  1.1184 +      ProcessedMediaStream* ps = mStreams[i]->AsProcessedStream();
  1.1185 +      if (ps) {
  1.1186 +        ps->ProcessInput(t, next, (next == aTo) ? ProcessedMediaStream::ALLOW_FINISH : 0);
  1.1187 +      }
  1.1188 +    }
  1.1189 +    t = next;
  1.1190 +  }
  1.1191 +  NS_ASSERTION(t == aTo, "Something went wrong with rounding to block boundaries");
  1.1192 +}
  1.1193 +
  1.1194 +bool
  1.1195 +MediaStreamGraphImpl::AllFinishedStreamsNotified()
  1.1196 +{
  1.1197 +  for (uint32_t i = 0; i < mStreams.Length(); ++i) {
  1.1198 +    MediaStream* s = mStreams[i];
  1.1199 +    if (s->mFinished && !s->mNotifiedFinished) {
  1.1200 +      return false;
  1.1201 +    }
  1.1202 +  }
  1.1203 +  return true;
  1.1204 +}
  1.1205 +
  1.1206 +void
  1.1207 +MediaStreamGraphImpl::PauseAllAudioOutputs()
  1.1208 +{
  1.1209 +  for (uint32_t i = 0; i < mStreams.Length(); ++i) {
  1.1210 +    MediaStream* s = mStreams[i];
  1.1211 +    for (uint32_t j = 0; j < s->mAudioOutputStreams.Length(); ++j) {
  1.1212 +      s->mAudioOutputStreams[j].mStream->Pause();
  1.1213 +    }
  1.1214 +  }
  1.1215 +}
  1.1216 +
  1.1217 +void
  1.1218 +MediaStreamGraphImpl::ResumeAllAudioOutputs()
  1.1219 +{
  1.1220 +  for (uint32_t i = 0; i < mStreams.Length(); ++i) {
  1.1221 +    MediaStream* s = mStreams[i];
  1.1222 +    for (uint32_t j = 0; j < s->mAudioOutputStreams.Length(); ++j) {
  1.1223 +      s->mAudioOutputStreams[j].mStream->Resume();
  1.1224 +    }
  1.1225 +  }
  1.1226 +}
  1.1227 +
  1.1228 +struct AutoProfilerUnregisterThread
  1.1229 +{
  1.1230 +  // The empty ctor is used to silence a pre-4.8.0 GCC unused variable warning.
  1.1231 +  AutoProfilerUnregisterThread()
  1.1232 +  {
  1.1233 +  }
  1.1234 +
  1.1235 +  ~AutoProfilerUnregisterThread()
  1.1236 +  {
  1.1237 +    profiler_unregister_thread();
  1.1238 +  }
  1.1239 +};
  1.1240 +
  1.1241 +void
  1.1242 +MediaStreamGraphImpl::RunThread()
  1.1243 +{
  1.1244 +  nsTArray<MessageBlock> messageQueue;
  1.1245 +  {
  1.1246 +    MonitorAutoLock lock(mMonitor);
  1.1247 +    messageQueue.SwapElements(mMessageQueue);
  1.1248 +  }
  1.1249 +  NS_ASSERTION(!messageQueue.IsEmpty(),
  1.1250 +               "Shouldn't have started a graph with empty message queue!");
  1.1251 +
  1.1252 +  uint32_t ticksProcessed = 0;
  1.1253 +  AutoProfilerUnregisterThread autoUnregister;
  1.1254 +
  1.1255 +  for (;;) {
  1.1256 +    // Check if a memory report has been requested.
  1.1257 +    {
  1.1258 +      MonitorAutoLock lock(mMemoryReportMonitor);
  1.1259 +      if (mNeedsMemoryReport) {
  1.1260 +        mNeedsMemoryReport = false;
  1.1261 +
  1.1262 +        for (uint32_t i = 0; i < mStreams.Length(); ++i) {
  1.1263 +          AudioNodeStream* stream = mStreams[i]->AsAudioNodeStream();
  1.1264 +          if (stream) {
  1.1265 +            AudioNodeSizes usage;
  1.1266 +            stream->SizeOfAudioNodesIncludingThis(MallocSizeOf, usage);
  1.1267 +            mAudioStreamSizes.AppendElement(usage);
  1.1268 +          }
  1.1269 +        }
  1.1270 +
  1.1271 +        lock.Notify();
  1.1272 +      }
  1.1273 +    }
  1.1274 +
  1.1275 +    // Update mCurrentTime to the min of the playing audio times, or using the
  1.1276 +    // wall-clock time change if no audio is playing.
  1.1277 +    UpdateCurrentTime();
  1.1278 +
  1.1279 +    // Calculate independent action times for each batch of messages (each
  1.1280 +    // batch corresponding to an event loop task). This isolates the performance
  1.1281 +    // of different scripts to some extent.
  1.1282 +    for (uint32_t i = 0; i < messageQueue.Length(); ++i) {
  1.1283 +      mProcessingGraphUpdateIndex = messageQueue[i].mGraphUpdateIndex;
  1.1284 +      nsTArray<nsAutoPtr<ControlMessage> >& messages = messageQueue[i].mMessages;
  1.1285 +
  1.1286 +      for (uint32_t j = 0; j < messages.Length(); ++j) {
  1.1287 +        messages[j]->Run();
  1.1288 +      }
  1.1289 +    }
  1.1290 +    messageQueue.Clear();
  1.1291 +
  1.1292 +    if (mStreamOrderDirty) {
  1.1293 +      UpdateStreamOrder();
  1.1294 +    }
  1.1295 +
  1.1296 +    GraphTime endBlockingDecisions =
  1.1297 +      RoundUpToNextAudioBlock(mSampleRate, mCurrentTime + MillisecondsToMediaTime(AUDIO_TARGET_MS));
  1.1298 +    bool ensureNextIteration = false;
  1.1299 +
  1.1300 +    // Grab pending stream input.
  1.1301 +    for (uint32_t i = 0; i < mStreams.Length(); ++i) {
  1.1302 +      SourceMediaStream* is = mStreams[i]->AsSourceStream();
  1.1303 +      if (is) {
  1.1304 +        UpdateConsumptionState(is);
  1.1305 +        ExtractPendingInput(is, endBlockingDecisions, &ensureNextIteration);
  1.1306 +      }
  1.1307 +    }
  1.1308 +
  1.1309 +    // The loop is woken up so soon that mCurrentTime barely advances and we
  1.1310 +    // end up having endBlockingDecisions == mStateComputedTime.
  1.1311 +    // Since stream blocking is computed in the interval of
  1.1312 +    // [mStateComputedTime, endBlockingDecisions), it won't be computed at all.
  1.1313 +    // We should ensure next iteration so that pending blocking changes will be
  1.1314 +    // computed in next loop.
  1.1315 +    if (endBlockingDecisions == mStateComputedTime) {
  1.1316 +      ensureNextIteration = true;
  1.1317 +    }
  1.1318 +
  1.1319 +    // Figure out which streams are blocked and when.
  1.1320 +    GraphTime prevComputedTime = mStateComputedTime;
  1.1321 +    RecomputeBlocking(endBlockingDecisions);
  1.1322 +
  1.1323 +    // Play stream contents.
  1.1324 +    bool allBlockedForever = true;
  1.1325 +    // True when we've done ProcessInput for all processed streams.
  1.1326 +    bool doneAllProducing = false;
  1.1327 +    // This is the number of frame that are written to the AudioStreams, for
  1.1328 +    // this cycle.
  1.1329 +    TrackTicks ticksPlayed = 0;
  1.1330 +    // Figure out what each stream wants to do
  1.1331 +    for (uint32_t i = 0; i < mStreams.Length(); ++i) {
  1.1332 +      MediaStream* stream = mStreams[i];
  1.1333 +      if (!doneAllProducing) {
  1.1334 +        ProcessedMediaStream* ps = stream->AsProcessedStream();
  1.1335 +        if (ps) {
  1.1336 +          AudioNodeStream* n = stream->AsAudioNodeStream();
  1.1337 +          if (n) {
  1.1338 +#ifdef DEBUG
  1.1339 +            // Verify that the sampling rate for all of the following streams is the same
  1.1340 +            for (uint32_t j = i + 1; j < mStreams.Length(); ++j) {
  1.1341 +              AudioNodeStream* nextStream = mStreams[j]->AsAudioNodeStream();
  1.1342 +              if (nextStream) {
  1.1343 +                MOZ_ASSERT(n->SampleRate() == nextStream->SampleRate(),
  1.1344 +                           "All AudioNodeStreams in the graph must have the same sampling rate");
  1.1345 +              }
  1.1346 +            }
  1.1347 +#endif
  1.1348 +            // Since an AudioNodeStream is present, go ahead and
  1.1349 +            // produce audio block by block for all the rest of the streams.
  1.1350 +            ProduceDataForStreamsBlockByBlock(i, n->SampleRate(), prevComputedTime, mStateComputedTime);
  1.1351 +            ticksProcessed += TimeToTicksRoundDown(n->SampleRate(), mStateComputedTime - prevComputedTime);
  1.1352 +            doneAllProducing = true;
  1.1353 +          } else {
  1.1354 +            ps->ProcessInput(prevComputedTime, mStateComputedTime,
  1.1355 +                             ProcessedMediaStream::ALLOW_FINISH);
  1.1356 +            NS_WARN_IF_FALSE(stream->mBuffer.GetEnd() >=
  1.1357 +                             GraphTimeToStreamTime(stream, mStateComputedTime),
  1.1358 +                             "Stream did not produce enough data");
  1.1359 +          }
  1.1360 +        }
  1.1361 +      }
  1.1362 +      NotifyHasCurrentData(stream);
  1.1363 +      if (mRealtime) {
  1.1364 +        // Only playback audio and video in real-time mode
  1.1365 +        CreateOrDestroyAudioStreams(prevComputedTime, stream);
  1.1366 +        TrackTicks ticksPlayedForThisStream = PlayAudio(stream, prevComputedTime, mStateComputedTime);
  1.1367 +        if (!ticksPlayed) {
  1.1368 +          ticksPlayed = ticksPlayedForThisStream;
  1.1369 +        } else {
  1.1370 +          MOZ_ASSERT(!ticksPlayedForThisStream || ticksPlayedForThisStream == ticksPlayed,
  1.1371 +              "Each stream should have the same number of frame.");
  1.1372 +        }
  1.1373 +        PlayVideo(stream);
  1.1374 +      }
  1.1375 +      SourceMediaStream* is = stream->AsSourceStream();
  1.1376 +      if (is) {
  1.1377 +        UpdateBufferSufficiencyState(is);
  1.1378 +      }
  1.1379 +      GraphTime end;
  1.1380 +      if (!stream->mBlocked.GetAt(mCurrentTime, &end) || end < GRAPH_TIME_MAX) {
  1.1381 +        allBlockedForever = false;
  1.1382 +      }
  1.1383 +    }
  1.1384 +
  1.1385 +    if (mMixer) {
  1.1386 +      mMixer->FinishMixing();
  1.1387 +    }
  1.1388 +
  1.1389 +    if (ensureNextIteration || !allBlockedForever) {
  1.1390 +      EnsureNextIteration();
  1.1391 +    }
  1.1392 +
  1.1393 +    // Send updates to the main thread and wait for the next control loop
  1.1394 +    // iteration.
  1.1395 +    {
  1.1396 +      MonitorAutoLock lock(mMonitor);
  1.1397 +      bool finalUpdate = mForceShutDown ||
  1.1398 +        (mCurrentTime >= mEndTime && AllFinishedStreamsNotified()) ||
  1.1399 +        (IsEmpty() && mMessageQueue.IsEmpty());
  1.1400 +      PrepareUpdatesToMainThreadState(finalUpdate);
  1.1401 +      if (finalUpdate) {
  1.1402 +        // Enter shutdown mode. The stable-state handler will detect this
  1.1403 +        // and complete shutdown. Destroy any streams immediately.
  1.1404 +        STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p waiting for main thread cleanup", this));
  1.1405 +        // We'll shut down this graph object if it does not get restarted.
  1.1406 +        mLifecycleState = LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP;
  1.1407 +        // No need to Destroy streams here. The main-thread owner of each
  1.1408 +        // stream is responsible for calling Destroy on them.
  1.1409 +        return;
  1.1410 +      }
  1.1411 +
  1.1412 +      // No need to wait in non-realtime mode, just churn through the input as soon
  1.1413 +      // as possible.
  1.1414 +      if (mRealtime) {
  1.1415 +        PRIntervalTime timeout = PR_INTERVAL_NO_TIMEOUT;
  1.1416 +        TimeStamp now = TimeStamp::Now();
  1.1417 +        bool pausedOutputs = false;
  1.1418 +        if (mNeedAnotherIteration) {
  1.1419 +          int64_t timeoutMS = MEDIA_GRAPH_TARGET_PERIOD_MS -
  1.1420 +            int64_t((now - mCurrentTimeStamp).ToMilliseconds());
  1.1421 +          // Make sure timeoutMS doesn't overflow 32 bits by waking up at
  1.1422 +          // least once a minute, if we need to wake up at all
  1.1423 +          timeoutMS = std::max<int64_t>(0, std::min<int64_t>(timeoutMS, 60*1000));
  1.1424 +          timeout = PR_MillisecondsToInterval(uint32_t(timeoutMS));
  1.1425 +          STREAM_LOG(PR_LOG_DEBUG+1, ("Waiting for next iteration; at %f, timeout=%f",
  1.1426 +                                     (now - mInitialTimeStamp).ToSeconds(), timeoutMS/1000.0));
  1.1427 +          mWaitState = WAITSTATE_WAITING_FOR_NEXT_ITERATION;
  1.1428 +        } else {
  1.1429 +          mWaitState = WAITSTATE_WAITING_INDEFINITELY;
  1.1430 +          PauseAllAudioOutputs();
  1.1431 +          pausedOutputs = true;
  1.1432 +        }
  1.1433 +        if (timeout > 0) {
  1.1434 +          mMonitor.Wait(timeout);
  1.1435 +          STREAM_LOG(PR_LOG_DEBUG+1, ("Resuming after timeout; at %f, elapsed=%f",
  1.1436 +                                     (TimeStamp::Now() - mInitialTimeStamp).ToSeconds(),
  1.1437 +                                     (TimeStamp::Now() - now).ToSeconds()));
  1.1438 +        }
  1.1439 +        if (pausedOutputs) {
  1.1440 +          ResumeAllAudioOutputs();
  1.1441 +        }
  1.1442 +      }
  1.1443 +      mWaitState = WAITSTATE_RUNNING;
  1.1444 +      mNeedAnotherIteration = false;
  1.1445 +      messageQueue.SwapElements(mMessageQueue);
  1.1446 +    }
  1.1447 +  }
  1.1448 +}
  1.1449 +
  1.1450 +void
  1.1451 +MediaStreamGraphImpl::ApplyStreamUpdate(StreamUpdate* aUpdate)
  1.1452 +{
  1.1453 +  mMonitor.AssertCurrentThreadOwns();
  1.1454 +
  1.1455 +  MediaStream* stream = aUpdate->mStream;
  1.1456 +  if (!stream)
  1.1457 +    return;
  1.1458 +  stream->mMainThreadCurrentTime = aUpdate->mNextMainThreadCurrentTime;
  1.1459 +  stream->mMainThreadFinished = aUpdate->mNextMainThreadFinished;
  1.1460 +
  1.1461 +  if (stream->mWrapper) {
  1.1462 +    stream->mWrapper->NotifyStreamStateChanged();
  1.1463 +  }
  1.1464 +  for (int32_t i = stream->mMainThreadListeners.Length() - 1; i >= 0; --i) {
  1.1465 +    stream->mMainThreadListeners[i]->NotifyMainThreadStateChanged();
  1.1466 +  }
  1.1467 +}
  1.1468 +
  1.1469 +void
  1.1470 +MediaStreamGraphImpl::ShutdownThreads()
  1.1471 +{
  1.1472 +  NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread");
  1.1473 +  // mGraph's thread is not running so it's OK to do whatever here
  1.1474 +  STREAM_LOG(PR_LOG_DEBUG, ("Stopping threads for MediaStreamGraph %p", this));
  1.1475 +
  1.1476 +  if (mThread) {
  1.1477 +    mThread->Shutdown();
  1.1478 +    mThread = nullptr;
  1.1479 +  }
  1.1480 +}
  1.1481 +
  1.1482 +void
  1.1483 +MediaStreamGraphImpl::ForceShutDown()
  1.1484 +{
  1.1485 +  NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread");
  1.1486 +  STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p ForceShutdown", this));
  1.1487 +  {
  1.1488 +    MonitorAutoLock lock(mMonitor);
  1.1489 +    mForceShutDown = true;
  1.1490 +    EnsureImmediateWakeUpLocked(lock);
  1.1491 +  }
  1.1492 +}
  1.1493 +
  1.1494 +namespace {
  1.1495 +
  1.1496 +class MediaStreamGraphInitThreadRunnable : public nsRunnable {
  1.1497 +public:
  1.1498 +  explicit MediaStreamGraphInitThreadRunnable(MediaStreamGraphImpl* aGraph)
  1.1499 +    : mGraph(aGraph)
  1.1500 +  {
  1.1501 +  }
  1.1502 +  NS_IMETHOD Run()
  1.1503 +  {
  1.1504 +    char aLocal;
  1.1505 +    profiler_register_thread("MediaStreamGraph", &aLocal);
  1.1506 +    mGraph->RunThread();
  1.1507 +    return NS_OK;
  1.1508 +  }
  1.1509 +private:
  1.1510 +  MediaStreamGraphImpl* mGraph;
  1.1511 +};
  1.1512 +
  1.1513 +class MediaStreamGraphThreadRunnable : public nsRunnable {
  1.1514 +public:
  1.1515 +  explicit MediaStreamGraphThreadRunnable(MediaStreamGraphImpl* aGraph)
  1.1516 +    : mGraph(aGraph)
  1.1517 +  {
  1.1518 +  }
  1.1519 +  NS_IMETHOD Run()
  1.1520 +  {
  1.1521 +    mGraph->RunThread();
  1.1522 +    return NS_OK;
  1.1523 +  }
  1.1524 +private:
  1.1525 +  MediaStreamGraphImpl* mGraph;
  1.1526 +};
  1.1527 +
  1.1528 +class MediaStreamGraphShutDownRunnable : public nsRunnable {
  1.1529 +public:
  1.1530 +  MediaStreamGraphShutDownRunnable(MediaStreamGraphImpl* aGraph) : mGraph(aGraph) {}
  1.1531 +  NS_IMETHOD Run()
  1.1532 +  {
  1.1533 +    NS_ASSERTION(mGraph->mDetectedNotRunning,
  1.1534 +                 "We should know the graph thread control loop isn't running!");
  1.1535 +
  1.1536 +    mGraph->ShutdownThreads();
  1.1537 +
  1.1538 +    // mGraph's thread is not running so it's OK to do whatever here
  1.1539 +    if (mGraph->IsEmpty()) {
  1.1540 +      // mGraph is no longer needed, so delete it.
  1.1541 +      mGraph->Destroy();
  1.1542 +    } else {
  1.1543 +      // The graph is not empty.  We must be in a forced shutdown, or a
  1.1544 +      // non-realtime graph that has finished processing.  Some later
  1.1545 +      // AppendMessage will detect that the manager has been emptied, and
  1.1546 +      // delete it.
  1.1547 +      NS_ASSERTION(mGraph->mForceShutDown || !mGraph->mRealtime,
  1.1548 +                   "Not in forced shutdown?");
  1.1549 +      for (uint32_t i = 0; i < mGraph->mStreams.Length(); ++i) {
  1.1550 +        DOMMediaStream* s = mGraph->mStreams[i]->GetWrapper();
  1.1551 +        if (s) {
  1.1552 +          s->NotifyMediaStreamGraphShutdown();
  1.1553 +        }
  1.1554 +      }
  1.1555 +
  1.1556 +      mGraph->mLifecycleState =
  1.1557 +        MediaStreamGraphImpl::LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION;
  1.1558 +    }
  1.1559 +    return NS_OK;
  1.1560 +  }
  1.1561 +private:
  1.1562 +  MediaStreamGraphImpl* mGraph;
  1.1563 +};
  1.1564 +
  1.1565 +class MediaStreamGraphStableStateRunnable : public nsRunnable {
  1.1566 +public:
  1.1567 +  explicit MediaStreamGraphStableStateRunnable(MediaStreamGraphImpl* aGraph)
  1.1568 +    : mGraph(aGraph)
  1.1569 +  {
  1.1570 +  }
  1.1571 +  NS_IMETHOD Run()
  1.1572 +  {
  1.1573 +    if (mGraph) {
  1.1574 +      mGraph->RunInStableState();
  1.1575 +    }
  1.1576 +    return NS_OK;
  1.1577 +  }
  1.1578 +private:
  1.1579 +  MediaStreamGraphImpl* mGraph;
  1.1580 +};
  1.1581 +
  1.1582 +/*
  1.1583 + * Control messages forwarded from main thread to graph manager thread
  1.1584 + */
  1.1585 +class CreateMessage : public ControlMessage {
  1.1586 +public:
  1.1587 +  CreateMessage(MediaStream* aStream) : ControlMessage(aStream) {}
  1.1588 +  virtual void Run() MOZ_OVERRIDE
  1.1589 +  {
  1.1590 +    mStream->GraphImpl()->AddStream(mStream);
  1.1591 +    mStream->Init();
  1.1592 +  }
  1.1593 +  virtual void RunDuringShutdown() MOZ_OVERRIDE
  1.1594 +  {
  1.1595 +    // Make sure to run this message during shutdown too, to make sure
  1.1596 +    // that we balance the number of streams registered with the graph
  1.1597 +    // as they're destroyed during shutdown.
  1.1598 +    Run();
  1.1599 +  }
  1.1600 +};
  1.1601 +
  1.1602 +class MediaStreamGraphShutdownObserver MOZ_FINAL : public nsIObserver
  1.1603 +{
  1.1604 +public:
  1.1605 +  NS_DECL_ISUPPORTS
  1.1606 +  NS_DECL_NSIOBSERVER
  1.1607 +};
  1.1608 +
  1.1609 +}
  1.1610 +
  1.1611 +void
  1.1612 +MediaStreamGraphImpl::RunInStableState()
  1.1613 +{
  1.1614 +  NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread");
  1.1615 +
  1.1616 +  nsTArray<nsCOMPtr<nsIRunnable> > runnables;
  1.1617 +  // When we're doing a forced shutdown, pending control messages may be
  1.1618 +  // run on the main thread via RunDuringShutdown. Those messages must
  1.1619 +  // run without the graph monitor being held. So, we collect them here.
  1.1620 +  nsTArray<nsAutoPtr<ControlMessage> > controlMessagesToRunDuringShutdown;
  1.1621 +
  1.1622 +  {
  1.1623 +    MonitorAutoLock lock(mMonitor);
  1.1624 +    mPostedRunInStableStateEvent = false;
  1.1625 +
  1.1626 +    runnables.SwapElements(mUpdateRunnables);
  1.1627 +    for (uint32_t i = 0; i < mStreamUpdates.Length(); ++i) {
  1.1628 +      StreamUpdate* update = &mStreamUpdates[i];
  1.1629 +      if (update->mStream) {
  1.1630 +        ApplyStreamUpdate(update);
  1.1631 +      }
  1.1632 +    }
  1.1633 +    mStreamUpdates.Clear();
  1.1634 +
  1.1635 +    // Don't start the thread for a non-realtime graph until it has been
  1.1636 +    // explicitly started by StartNonRealtimeProcessing.
  1.1637 +    if (mLifecycleState == LIFECYCLE_THREAD_NOT_STARTED &&
  1.1638 +        (mRealtime || mNonRealtimeProcessing)) {
  1.1639 +      mLifecycleState = LIFECYCLE_RUNNING;
  1.1640 +      // Start the thread now. We couldn't start it earlier because
  1.1641 +      // the graph might exit immediately on finding it has no streams. The
  1.1642 +      // first message for a new graph must create a stream.
  1.1643 +      nsCOMPtr<nsIRunnable> event = new MediaStreamGraphInitThreadRunnable(this);
  1.1644 +      NS_NewNamedThread("MediaStreamGrph", getter_AddRefs(mThread), event);
  1.1645 +    }
  1.1646 +
  1.1647 +    if (mCurrentTaskMessageQueue.IsEmpty()) {
  1.1648 +      if (mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP && IsEmpty()) {
  1.1649 +        // Complete shutdown. First, ensure that this graph is no longer used.
  1.1650 +        // A new graph graph will be created if one is needed.
  1.1651 +        STREAM_LOG(PR_LOG_DEBUG, ("Disconnecting MediaStreamGraph %p", this));
  1.1652 +        if (this == gGraph) {
  1.1653 +          // null out gGraph if that's the graph being shut down
  1.1654 +          gGraph = nullptr;
  1.1655 +        }
  1.1656 +        // Asynchronously clean up old graph. We don't want to do this
  1.1657 +        // synchronously because it spins the event loop waiting for threads
  1.1658 +        // to shut down, and we don't want to do that in a stable state handler.
  1.1659 +        mLifecycleState = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN;
  1.1660 +        nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this);
  1.1661 +        NS_DispatchToMainThread(event);
  1.1662 +      }
  1.1663 +    } else {
  1.1664 +      if (mLifecycleState <= LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
  1.1665 +        MessageBlock* block = mMessageQueue.AppendElement();
  1.1666 +        block->mMessages.SwapElements(mCurrentTaskMessageQueue);
  1.1667 +        block->mGraphUpdateIndex = mNextGraphUpdateIndex;
  1.1668 +        ++mNextGraphUpdateIndex;
  1.1669 +        EnsureNextIterationLocked(lock);
  1.1670 +      }
  1.1671 +
  1.1672 +      // If the MediaStreamGraph has more messages going to it, try to revive
  1.1673 +      // it to process those messages. Don't do this if we're in a forced
  1.1674 +      // shutdown or it's a non-realtime graph that has already terminated
  1.1675 +      // processing.
  1.1676 +      if (mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP &&
  1.1677 +          mRealtime && !mForceShutDown) {
  1.1678 +        mLifecycleState = LIFECYCLE_RUNNING;
  1.1679 +        // Revive the MediaStreamGraph since we have more messages going to it.
  1.1680 +        // Note that we need to put messages into its queue before reviving it,
  1.1681 +        // or it might exit immediately.
  1.1682 +        nsCOMPtr<nsIRunnable> event = new MediaStreamGraphThreadRunnable(this);
  1.1683 +        mThread->Dispatch(event, 0);
  1.1684 +      }
  1.1685 +    }
  1.1686 +
  1.1687 +    if ((mForceShutDown || !mRealtime) &&
  1.1688 +        mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
  1.1689 +      // Defer calls to RunDuringShutdown() to happen while mMonitor is not held.
  1.1690 +      for (uint32_t i = 0; i < mMessageQueue.Length(); ++i) {
  1.1691 +        MessageBlock& mb = mMessageQueue[i];
  1.1692 +        controlMessagesToRunDuringShutdown.MoveElementsFrom(mb.mMessages);
  1.1693 +      }
  1.1694 +      mMessageQueue.Clear();
  1.1695 +      MOZ_ASSERT(mCurrentTaskMessageQueue.IsEmpty());
  1.1696 +      // Stop MediaStreamGraph threads. Do not clear gGraph since
  1.1697 +      // we have outstanding DOM objects that may need it.
  1.1698 +      mLifecycleState = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN;
  1.1699 +      nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this);
  1.1700 +      NS_DispatchToMainThread(event);
  1.1701 +    }
  1.1702 +
  1.1703 +    mDetectedNotRunning = mLifecycleState > LIFECYCLE_RUNNING;
  1.1704 +  }
  1.1705 +
  1.1706 +  // Make sure we get a new current time in the next event loop task
  1.1707 +  mPostedRunInStableState = false;
  1.1708 +
  1.1709 +  for (uint32_t i = 0; i < runnables.Length(); ++i) {
  1.1710 +    runnables[i]->Run();
  1.1711 +  }
  1.1712 +  for (uint32_t i = 0; i < controlMessagesToRunDuringShutdown.Length(); ++i) {
  1.1713 +    controlMessagesToRunDuringShutdown[i]->RunDuringShutdown();
  1.1714 +  }
  1.1715 +
  1.1716 +#ifdef DEBUG
  1.1717 +  mCanRunMessagesSynchronously = mDetectedNotRunning &&
  1.1718 +    mLifecycleState >= LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN;
  1.1719 +#endif
  1.1720 +}
  1.1721 +
  1.1722 +static NS_DEFINE_CID(kAppShellCID, NS_APPSHELL_CID);
  1.1723 +
  1.1724 +void
  1.1725 +MediaStreamGraphImpl::EnsureRunInStableState()
  1.1726 +{
  1.1727 +  NS_ASSERTION(NS_IsMainThread(), "main thread only");
  1.1728 +
  1.1729 +  if (mPostedRunInStableState)
  1.1730 +    return;
  1.1731 +  mPostedRunInStableState = true;
  1.1732 +  nsCOMPtr<nsIRunnable> event = new MediaStreamGraphStableStateRunnable(this);
  1.1733 +  nsCOMPtr<nsIAppShell> appShell = do_GetService(kAppShellCID);
  1.1734 +  if (appShell) {
  1.1735 +    appShell->RunInStableState(event);
  1.1736 +  } else {
  1.1737 +    NS_ERROR("Appshell already destroyed?");
  1.1738 +  }
  1.1739 +}
  1.1740 +
  1.1741 +void
  1.1742 +MediaStreamGraphImpl::EnsureStableStateEventPosted()
  1.1743 +{
  1.1744 +  mMonitor.AssertCurrentThreadOwns();
  1.1745 +
  1.1746 +  if (mPostedRunInStableStateEvent)
  1.1747 +    return;
  1.1748 +  mPostedRunInStableStateEvent = true;
  1.1749 +  nsCOMPtr<nsIRunnable> event = new MediaStreamGraphStableStateRunnable(this);
  1.1750 +  NS_DispatchToMainThread(event);
  1.1751 +}
  1.1752 +
  1.1753 +void
  1.1754 +MediaStreamGraphImpl::AppendMessage(ControlMessage* aMessage)
  1.1755 +{
  1.1756 +  NS_ASSERTION(NS_IsMainThread(), "main thread only");
  1.1757 +  NS_ASSERTION(!aMessage->GetStream() ||
  1.1758 +               !aMessage->GetStream()->IsDestroyed(),
  1.1759 +               "Stream already destroyed");
  1.1760 +
  1.1761 +  if (mDetectedNotRunning &&
  1.1762 +      mLifecycleState > LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
  1.1763 +    // The graph control loop is not running and main thread cleanup has
  1.1764 +    // happened. From now on we can't append messages to mCurrentTaskMessageQueue,
  1.1765 +    // because that will never be processed again, so just RunDuringShutdown
  1.1766 +    // this message.
  1.1767 +    // This should only happen during forced shutdown, or after a non-realtime
  1.1768 +    // graph has finished processing.
  1.1769 +#ifdef DEBUG
  1.1770 +    MOZ_ASSERT(mCanRunMessagesSynchronously);
  1.1771 +    mCanRunMessagesSynchronously = false;
  1.1772 +#endif
  1.1773 +    aMessage->RunDuringShutdown();
  1.1774 +#ifdef DEBUG
  1.1775 +    mCanRunMessagesSynchronously = true;
  1.1776 +#endif
  1.1777 +    delete aMessage;
  1.1778 +    if (IsEmpty() &&
  1.1779 +        mLifecycleState >= LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION) {
  1.1780 +      if (gGraph == this) {
  1.1781 +        gGraph = nullptr;
  1.1782 +      }
  1.1783 +      Destroy();
  1.1784 +    }
  1.1785 +    return;
  1.1786 +  }
  1.1787 +
  1.1788 +  mCurrentTaskMessageQueue.AppendElement(aMessage);
  1.1789 +  EnsureRunInStableState();
  1.1790 +}
  1.1791 +
  1.1792 +MediaStream::MediaStream(DOMMediaStream* aWrapper)
  1.1793 +  : mBufferStartTime(0)
  1.1794 +  , mExplicitBlockerCount(0)
  1.1795 +  , mBlocked(false)
  1.1796 +  , mGraphUpdateIndices(0)
  1.1797 +  , mFinished(false)
  1.1798 +  , mNotifiedFinished(false)
  1.1799 +  , mNotifiedBlocked(false)
  1.1800 +  , mHasCurrentData(false)
  1.1801 +  , mNotifiedHasCurrentData(false)
  1.1802 +  , mWrapper(aWrapper)
  1.1803 +  , mMainThreadCurrentTime(0)
  1.1804 +  , mMainThreadFinished(false)
  1.1805 +  , mMainThreadDestroyed(false)
  1.1806 +  , mGraph(nullptr)
  1.1807 +  , mAudioChannelType(dom::AudioChannel::Normal)
  1.1808 +{
  1.1809 +  MOZ_COUNT_CTOR(MediaStream);
  1.1810 +  // aWrapper should not already be connected to a MediaStream! It needs
  1.1811 +  // to be hooked up to this stream, and since this stream is only just
  1.1812 +  // being created now, aWrapper must not be connected to anything.
  1.1813 +  NS_ASSERTION(!aWrapper || !aWrapper->GetStream(),
  1.1814 +               "Wrapper already has another media stream hooked up to it!");
  1.1815 +}
  1.1816 +
  1.1817 +size_t
  1.1818 +MediaStream::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
  1.1819 +{
  1.1820 +  size_t amount = 0;
  1.1821 +
  1.1822 +  // Not owned:
  1.1823 +  // - mGraph - Not reported here
  1.1824 +  // - mConsumers - elements
  1.1825 +  // Future:
  1.1826 +  // - mWrapper
  1.1827 +  // - mVideoOutputs - elements
  1.1828 +  // - mLastPlayedVideoFrame
  1.1829 +  // - mListeners - elements
  1.1830 +  // - mAudioOutputStreams - elements
  1.1831 +
  1.1832 +  amount += mBuffer.SizeOfExcludingThis(aMallocSizeOf);
  1.1833 +  amount += mAudioOutputs.SizeOfExcludingThis(aMallocSizeOf);
  1.1834 +  amount += mVideoOutputs.SizeOfExcludingThis(aMallocSizeOf);
  1.1835 +  amount += mExplicitBlockerCount.SizeOfExcludingThis(aMallocSizeOf);
  1.1836 +  amount += mListeners.SizeOfExcludingThis(aMallocSizeOf);
  1.1837 +  amount += mMainThreadListeners.SizeOfExcludingThis(aMallocSizeOf);
  1.1838 +  amount += mDisabledTrackIDs.SizeOfExcludingThis(aMallocSizeOf);
  1.1839 +  amount += mBlocked.SizeOfExcludingThis(aMallocSizeOf);
  1.1840 +  amount += mGraphUpdateIndices.SizeOfExcludingThis(aMallocSizeOf);
  1.1841 +  amount += mConsumers.SizeOfExcludingThis(aMallocSizeOf);
  1.1842 +  amount += mAudioOutputStreams.SizeOfExcludingThis(aMallocSizeOf);
  1.1843 +  for (size_t i = 0; i < mAudioOutputStreams.Length(); i++) {
  1.1844 +    amount += mAudioOutputStreams[i].SizeOfExcludingThis(aMallocSizeOf);
  1.1845 +  }
  1.1846 +
  1.1847 +  return amount;
  1.1848 +}
  1.1849 +
  1.1850 +size_t
  1.1851 +MediaStream::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
  1.1852 +{
  1.1853 +  return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
  1.1854 +}
  1.1855 +
  1.1856 +void
  1.1857 +MediaStream::Init()
  1.1858 +{
  1.1859 +  MediaStreamGraphImpl* graph = GraphImpl();
  1.1860 +  mBlocked.SetAtAndAfter(graph->mCurrentTime, true);
  1.1861 +  mExplicitBlockerCount.SetAtAndAfter(graph->mCurrentTime, true);
  1.1862 +  mExplicitBlockerCount.SetAtAndAfter(graph->mStateComputedTime, false);
  1.1863 +}
  1.1864 +
  1.1865 +MediaStreamGraphImpl*
  1.1866 +MediaStream::GraphImpl()
  1.1867 +{
  1.1868 +  return mGraph;
  1.1869 +}
  1.1870 +
  1.1871 +MediaStreamGraph*
  1.1872 +MediaStream::Graph()
  1.1873 +{
  1.1874 +  return mGraph;
  1.1875 +}
  1.1876 +
  1.1877 +void
  1.1878 +MediaStream::SetGraphImpl(MediaStreamGraphImpl* aGraph)
  1.1879 +{
  1.1880 +  MOZ_ASSERT(!mGraph, "Should only be called once");
  1.1881 +  mGraph = aGraph;
  1.1882 +}
  1.1883 +
  1.1884 +void
  1.1885 +MediaStream::SetGraphImpl(MediaStreamGraph* aGraph)
  1.1886 +{
  1.1887 +  MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(aGraph);
  1.1888 +  SetGraphImpl(graph);
  1.1889 +}
  1.1890 +
  1.1891 +StreamTime
  1.1892 +MediaStream::GraphTimeToStreamTime(GraphTime aTime)
  1.1893 +{
  1.1894 +  return GraphImpl()->GraphTimeToStreamTime(this, aTime);
  1.1895 +}
  1.1896 +
  1.1897 +StreamTime
  1.1898 +MediaStream::GraphTimeToStreamTimeOptimistic(GraphTime aTime)
  1.1899 +{
  1.1900 +  return GraphImpl()->GraphTimeToStreamTimeOptimistic(this, aTime);
  1.1901 +}
  1.1902 +
  1.1903 +GraphTime
  1.1904 +MediaStream::StreamTimeToGraphTime(StreamTime aTime)
  1.1905 +{
  1.1906 +  return GraphImpl()->StreamTimeToGraphTime(this, aTime, 0);
  1.1907 +}
  1.1908 +
  1.1909 +void
  1.1910 +MediaStream::FinishOnGraphThread()
  1.1911 +{
  1.1912 +  GraphImpl()->FinishStream(this);
  1.1913 +}
  1.1914 +
  1.1915 +int64_t
  1.1916 +MediaStream::GetProcessingGraphUpdateIndex()
  1.1917 +{
  1.1918 +  return GraphImpl()->GetProcessingGraphUpdateIndex();
  1.1919 +}
  1.1920 +
  1.1921 +StreamBuffer::Track*
  1.1922 +MediaStream::EnsureTrack(TrackID aTrackId, TrackRate aSampleRate)
  1.1923 +{
  1.1924 +  StreamBuffer::Track* track = mBuffer.FindTrack(aTrackId);
  1.1925 +  if (!track) {
  1.1926 +    nsAutoPtr<MediaSegment> segment(new AudioSegment());
  1.1927 +    for (uint32_t j = 0; j < mListeners.Length(); ++j) {
  1.1928 +      MediaStreamListener* l = mListeners[j];
  1.1929 +      l->NotifyQueuedTrackChanges(Graph(), aTrackId,
  1.1930 +                                  GraphImpl()->AudioSampleRate(), 0,
  1.1931 +                                  MediaStreamListener::TRACK_EVENT_CREATED,
  1.1932 +                                  *segment);
  1.1933 +    }
  1.1934 +    track = &mBuffer.AddTrack(aTrackId, aSampleRate, 0, segment.forget());
  1.1935 +  }
  1.1936 +  return track;
  1.1937 +}
  1.1938 +
  1.1939 +void
  1.1940 +MediaStream::RemoveAllListenersImpl()
  1.1941 +{
  1.1942 +  for (int32_t i = mListeners.Length() - 1; i >= 0; --i) {
  1.1943 +    nsRefPtr<MediaStreamListener> listener = mListeners[i].forget();
  1.1944 +    listener->NotifyRemoved(GraphImpl());
  1.1945 +  }
  1.1946 +  mListeners.Clear();
  1.1947 +}
  1.1948 +
  1.1949 +void
  1.1950 +MediaStream::DestroyImpl()
  1.1951 +{
  1.1952 +  for (int32_t i = mConsumers.Length() - 1; i >= 0; --i) {
  1.1953 +    mConsumers[i]->Disconnect();
  1.1954 +  }
  1.1955 +  for (uint32_t i = 0; i < mAudioOutputStreams.Length(); ++i) {
  1.1956 +    mAudioOutputStreams[i].mStream->Shutdown();
  1.1957 +  }
  1.1958 +  mAudioOutputStreams.Clear();
  1.1959 +  mGraph = nullptr;
  1.1960 +}
  1.1961 +
  1.1962 +void
  1.1963 +MediaStream::Destroy()
  1.1964 +{
  1.1965 +  // Keep this stream alive until we leave this method
  1.1966 +  nsRefPtr<MediaStream> kungFuDeathGrip = this;
  1.1967 +
  1.1968 +  class Message : public ControlMessage {
  1.1969 +  public:
  1.1970 +    Message(MediaStream* aStream) : ControlMessage(aStream) {}
  1.1971 +    virtual void Run()
  1.1972 +    {
  1.1973 +      mStream->RemoveAllListenersImpl();
  1.1974 +      auto graph = mStream->GraphImpl();
  1.1975 +      mStream->DestroyImpl();
  1.1976 +      graph->RemoveStream(mStream);
  1.1977 +    }
  1.1978 +    virtual void RunDuringShutdown()
  1.1979 +    { Run(); }
  1.1980 +  };
  1.1981 +  mWrapper = nullptr;
  1.1982 +  GraphImpl()->AppendMessage(new Message(this));
  1.1983 +  // Message::RunDuringShutdown may have removed this stream from the graph,
  1.1984 +  // but our kungFuDeathGrip above will have kept this stream alive if
  1.1985 +  // necessary.
  1.1986 +  mMainThreadDestroyed = true;
  1.1987 +}
  1.1988 +
  1.1989 +void
  1.1990 +MediaStream::AddAudioOutput(void* aKey)
  1.1991 +{
  1.1992 +  class Message : public ControlMessage {
  1.1993 +  public:
  1.1994 +    Message(MediaStream* aStream, void* aKey) : ControlMessage(aStream), mKey(aKey) {}
  1.1995 +    virtual void Run()
  1.1996 +    {
  1.1997 +      mStream->AddAudioOutputImpl(mKey);
  1.1998 +    }
  1.1999 +    void* mKey;
  1.2000 +  };
  1.2001 +  GraphImpl()->AppendMessage(new Message(this, aKey));
  1.2002 +}
  1.2003 +
  1.2004 +void
  1.2005 +MediaStream::SetAudioOutputVolumeImpl(void* aKey, float aVolume)
  1.2006 +{
  1.2007 +  for (uint32_t i = 0; i < mAudioOutputs.Length(); ++i) {
  1.2008 +    if (mAudioOutputs[i].mKey == aKey) {
  1.2009 +      mAudioOutputs[i].mVolume = aVolume;
  1.2010 +      return;
  1.2011 +    }
  1.2012 +  }
  1.2013 +  NS_ERROR("Audio output key not found");
  1.2014 +}
  1.2015 +
  1.2016 +void
  1.2017 +MediaStream::SetAudioOutputVolume(void* aKey, float aVolume)
  1.2018 +{
  1.2019 +  class Message : public ControlMessage {
  1.2020 +  public:
  1.2021 +    Message(MediaStream* aStream, void* aKey, float aVolume) :
  1.2022 +      ControlMessage(aStream), mKey(aKey), mVolume(aVolume) {}
  1.2023 +    virtual void Run()
  1.2024 +    {
  1.2025 +      mStream->SetAudioOutputVolumeImpl(mKey, mVolume);
  1.2026 +    }
  1.2027 +    void* mKey;
  1.2028 +    float mVolume;
  1.2029 +  };
  1.2030 +  GraphImpl()->AppendMessage(new Message(this, aKey, aVolume));
  1.2031 +}
  1.2032 +
  1.2033 +void
  1.2034 +MediaStream::RemoveAudioOutputImpl(void* aKey)
  1.2035 +{
  1.2036 +  for (uint32_t i = 0; i < mAudioOutputs.Length(); ++i) {
  1.2037 +    if (mAudioOutputs[i].mKey == aKey) {
  1.2038 +      mAudioOutputs.RemoveElementAt(i);
  1.2039 +      return;
  1.2040 +    }
  1.2041 +  }
  1.2042 +  NS_ERROR("Audio output key not found");
  1.2043 +}
  1.2044 +
  1.2045 +void
  1.2046 +MediaStream::RemoveAudioOutput(void* aKey)
  1.2047 +{
  1.2048 +  class Message : public ControlMessage {
  1.2049 +  public:
  1.2050 +    Message(MediaStream* aStream, void* aKey) :
  1.2051 +      ControlMessage(aStream), mKey(aKey) {}
  1.2052 +    virtual void Run()
  1.2053 +    {
  1.2054 +      mStream->RemoveAudioOutputImpl(mKey);
  1.2055 +    }
  1.2056 +    void* mKey;
  1.2057 +  };
  1.2058 +  GraphImpl()->AppendMessage(new Message(this, aKey));
  1.2059 +}
  1.2060 +
  1.2061 +void
  1.2062 +MediaStream::AddVideoOutput(VideoFrameContainer* aContainer)
  1.2063 +{
  1.2064 +  class Message : public ControlMessage {
  1.2065 +  public:
  1.2066 +    Message(MediaStream* aStream, VideoFrameContainer* aContainer) :
  1.2067 +      ControlMessage(aStream), mContainer(aContainer) {}
  1.2068 +    virtual void Run()
  1.2069 +    {
  1.2070 +      mStream->AddVideoOutputImpl(mContainer.forget());
  1.2071 +    }
  1.2072 +    nsRefPtr<VideoFrameContainer> mContainer;
  1.2073 +  };
  1.2074 +  GraphImpl()->AppendMessage(new Message(this, aContainer));
  1.2075 +}
  1.2076 +
  1.2077 +void
  1.2078 +MediaStream::RemoveVideoOutput(VideoFrameContainer* aContainer)
  1.2079 +{
  1.2080 +  class Message : public ControlMessage {
  1.2081 +  public:
  1.2082 +    Message(MediaStream* aStream, VideoFrameContainer* aContainer) :
  1.2083 +      ControlMessage(aStream), mContainer(aContainer) {}
  1.2084 +    virtual void Run()
  1.2085 +    {
  1.2086 +      mStream->RemoveVideoOutputImpl(mContainer);
  1.2087 +    }
  1.2088 +    nsRefPtr<VideoFrameContainer> mContainer;
  1.2089 +  };
  1.2090 +  GraphImpl()->AppendMessage(new Message(this, aContainer));
  1.2091 +}
  1.2092 +
  1.2093 +void
  1.2094 +MediaStream::ChangeExplicitBlockerCount(int32_t aDelta)
  1.2095 +{
  1.2096 +  class Message : public ControlMessage {
  1.2097 +  public:
  1.2098 +    Message(MediaStream* aStream, int32_t aDelta) :
  1.2099 +      ControlMessage(aStream), mDelta(aDelta) {}
  1.2100 +    virtual void Run()
  1.2101 +    {
  1.2102 +      mStream->ChangeExplicitBlockerCountImpl(
  1.2103 +          mStream->GraphImpl()->mStateComputedTime, mDelta);
  1.2104 +    }
  1.2105 +    int32_t mDelta;
  1.2106 +  };
  1.2107 +
  1.2108 +  // This can happen if this method has been called asynchronously, and the
  1.2109 +  // stream has been destroyed since then.
  1.2110 +  if (mMainThreadDestroyed) {
  1.2111 +    return;
  1.2112 +  }
  1.2113 +  GraphImpl()->AppendMessage(new Message(this, aDelta));
  1.2114 +}
  1.2115 +
  1.2116 +void
  1.2117 +MediaStream::AddListenerImpl(already_AddRefed<MediaStreamListener> aListener)
  1.2118 +{
  1.2119 +  MediaStreamListener* listener = *mListeners.AppendElement() = aListener;
  1.2120 +  listener->NotifyBlockingChanged(GraphImpl(),
  1.2121 +    mNotifiedBlocked ? MediaStreamListener::BLOCKED : MediaStreamListener::UNBLOCKED);
  1.2122 +  if (mNotifiedFinished) {
  1.2123 +    listener->NotifyFinished(GraphImpl());
  1.2124 +  }
  1.2125 +  if (mNotifiedHasCurrentData) {
  1.2126 +    listener->NotifyHasCurrentData(GraphImpl());
  1.2127 +  }
  1.2128 +}
  1.2129 +
  1.2130 +void
  1.2131 +MediaStream::AddListener(MediaStreamListener* aListener)
  1.2132 +{
  1.2133 +  class Message : public ControlMessage {
  1.2134 +  public:
  1.2135 +    Message(MediaStream* aStream, MediaStreamListener* aListener) :
  1.2136 +      ControlMessage(aStream), mListener(aListener) {}
  1.2137 +    virtual void Run()
  1.2138 +    {
  1.2139 +      mStream->AddListenerImpl(mListener.forget());
  1.2140 +    }
  1.2141 +    nsRefPtr<MediaStreamListener> mListener;
  1.2142 +  };
  1.2143 +  GraphImpl()->AppendMessage(new Message(this, aListener));
  1.2144 +}
  1.2145 +
  1.2146 +void
  1.2147 +MediaStream::RemoveListenerImpl(MediaStreamListener* aListener)
  1.2148 +{
  1.2149 +  // wouldn't need this if we could do it in the opposite order
  1.2150 +  nsRefPtr<MediaStreamListener> listener(aListener);
  1.2151 +  mListeners.RemoveElement(aListener);
  1.2152 +  listener->NotifyRemoved(GraphImpl());
  1.2153 +}
  1.2154 +
  1.2155 +void
  1.2156 +MediaStream::RemoveListener(MediaStreamListener* aListener)
  1.2157 +{
  1.2158 +  class Message : public ControlMessage {
  1.2159 +  public:
  1.2160 +    Message(MediaStream* aStream, MediaStreamListener* aListener) :
  1.2161 +      ControlMessage(aStream), mListener(aListener) {}
  1.2162 +    virtual void Run()
  1.2163 +    {
  1.2164 +      mStream->RemoveListenerImpl(mListener);
  1.2165 +    }
  1.2166 +    nsRefPtr<MediaStreamListener> mListener;
  1.2167 +  };
  1.2168 +  // If the stream is destroyed the Listeners have or will be
  1.2169 +  // removed.
  1.2170 +  if (!IsDestroyed()) {
  1.2171 +    GraphImpl()->AppendMessage(new Message(this, aListener));
  1.2172 +  }
  1.2173 +}
  1.2174 +
  1.2175 +void
  1.2176 +MediaStream::RunAfterPendingUpdates(nsRefPtr<nsIRunnable> aRunnable)
  1.2177 +{
  1.2178 +  MOZ_ASSERT(NS_IsMainThread());
  1.2179 +  MediaStreamGraphImpl* graph = GraphImpl();
  1.2180 +
  1.2181 +  // Special case when a non-realtime graph has not started, to ensure the
  1.2182 +  // runnable will run in finite time.
  1.2183 +  if (!(graph->mRealtime || graph->mNonRealtimeProcessing)) {
  1.2184 +    aRunnable->Run();
  1.2185 +  }
  1.2186 +
  1.2187 +  class Message : public ControlMessage {
  1.2188 +  public:
  1.2189 +    explicit Message(MediaStream* aStream,
  1.2190 +                     already_AddRefed<nsIRunnable> aRunnable)
  1.2191 +      : ControlMessage(aStream)
  1.2192 +      , mRunnable(aRunnable) {}
  1.2193 +    virtual void Run() MOZ_OVERRIDE
  1.2194 +    {
  1.2195 +      mStream->Graph()->
  1.2196 +        DispatchToMainThreadAfterStreamStateUpdate(mRunnable.forget());
  1.2197 +    }
  1.2198 +    virtual void RunDuringShutdown() MOZ_OVERRIDE
  1.2199 +    {
  1.2200 +      // Don't run mRunnable now as it may call AppendMessage() which would
  1.2201 +      // assume that there are no remaining controlMessagesToRunDuringShutdown.
  1.2202 +      MOZ_ASSERT(NS_IsMainThread());
  1.2203 +      NS_DispatchToCurrentThread(mRunnable);
  1.2204 +    }
  1.2205 +  private:
  1.2206 +    nsRefPtr<nsIRunnable> mRunnable;
  1.2207 +  };
  1.2208 +
  1.2209 +  graph->AppendMessage(new Message(this, aRunnable.forget()));
  1.2210 +}
  1.2211 +
  1.2212 +void
  1.2213 +MediaStream::SetTrackEnabledImpl(TrackID aTrackID, bool aEnabled)
  1.2214 +{
  1.2215 +  if (aEnabled) {
  1.2216 +    mDisabledTrackIDs.RemoveElement(aTrackID);
  1.2217 +  } else {
  1.2218 +    if (!mDisabledTrackIDs.Contains(aTrackID)) {
  1.2219 +      mDisabledTrackIDs.AppendElement(aTrackID);
  1.2220 +    }
  1.2221 +  }
  1.2222 +}
  1.2223 +
  1.2224 +void
  1.2225 +MediaStream::SetTrackEnabled(TrackID aTrackID, bool aEnabled)
  1.2226 +{
  1.2227 +  class Message : public ControlMessage {
  1.2228 +  public:
  1.2229 +    Message(MediaStream* aStream, TrackID aTrackID, bool aEnabled) :
  1.2230 +      ControlMessage(aStream), mTrackID(aTrackID), mEnabled(aEnabled) {}
  1.2231 +    virtual void Run()
  1.2232 +    {
  1.2233 +      mStream->SetTrackEnabledImpl(mTrackID, mEnabled);
  1.2234 +    }
  1.2235 +    TrackID mTrackID;
  1.2236 +    bool mEnabled;
  1.2237 +  };
  1.2238 +  GraphImpl()->AppendMessage(new Message(this, aTrackID, aEnabled));
  1.2239 +}
  1.2240 +
  1.2241 +void
  1.2242 +MediaStream::ApplyTrackDisabling(TrackID aTrackID, MediaSegment* aSegment, MediaSegment* aRawSegment)
  1.2243 +{
  1.2244 +  // mMutex must be owned here if this is a SourceMediaStream
  1.2245 +  if (!mDisabledTrackIDs.Contains(aTrackID)) {
  1.2246 +    return;
  1.2247 +  }
  1.2248 +  aSegment->ReplaceWithDisabled();
  1.2249 +  if (aRawSegment) {
  1.2250 +    aRawSegment->ReplaceWithDisabled();
  1.2251 +  }
  1.2252 +}
  1.2253 +
  1.2254 +void
  1.2255 +SourceMediaStream::DestroyImpl()
  1.2256 +{
  1.2257 +  // Hold mMutex while mGraph is reset so that other threads holding mMutex
  1.2258 +  // can null-check know that the graph will not destroyed.
  1.2259 +  MutexAutoLock lock(mMutex);
  1.2260 +  MediaStream::DestroyImpl();
  1.2261 +}
  1.2262 +
  1.2263 +void
  1.2264 +SourceMediaStream::SetPullEnabled(bool aEnabled)
  1.2265 +{
  1.2266 +  MutexAutoLock lock(mMutex);
  1.2267 +  mPullEnabled = aEnabled;
  1.2268 +  if (mPullEnabled && GraphImpl()) {
  1.2269 +    GraphImpl()->EnsureNextIteration();
  1.2270 +  }
  1.2271 +}
  1.2272 +
  1.2273 +void
  1.2274 +SourceMediaStream::AddTrack(TrackID aID, TrackRate aRate, TrackTicks aStart,
  1.2275 +                            MediaSegment* aSegment)
  1.2276 +{
  1.2277 +  MutexAutoLock lock(mMutex);
  1.2278 +  TrackData* data = mUpdateTracks.AppendElement();
  1.2279 +  data->mID = aID;
  1.2280 +  data->mInputRate = aRate;
  1.2281 +  // We resample all audio input tracks to the sample rate of the audio mixer.
  1.2282 +  data->mOutputRate = aSegment->GetType() == MediaSegment::AUDIO ?
  1.2283 +                      GraphImpl()->AudioSampleRate() : aRate;
  1.2284 +  data->mStart = aStart;
  1.2285 +  data->mCommands = TRACK_CREATE;
  1.2286 +  data->mData = aSegment;
  1.2287 +  data->mHaveEnough = false;
  1.2288 +  if (auto graph = GraphImpl()) {
  1.2289 +    graph->EnsureNextIteration();
  1.2290 +  }
  1.2291 +}
  1.2292 +
  1.2293 +void
  1.2294 +SourceMediaStream::ResampleAudioToGraphSampleRate(TrackData* aTrackData, MediaSegment* aSegment)
  1.2295 +{
  1.2296 +  if (aSegment->GetType() != MediaSegment::AUDIO ||
  1.2297 +      aTrackData->mInputRate == GraphImpl()->AudioSampleRate()) {
  1.2298 +    return;
  1.2299 +  }
  1.2300 +  AudioSegment* segment = static_cast<AudioSegment*>(aSegment);
  1.2301 +  if (!aTrackData->mResampler) {
  1.2302 +    int channels = segment->ChannelCount();
  1.2303 +
  1.2304 +    // If this segment is just silence, we delay instanciating the resampler.
  1.2305 +    if (channels) {
  1.2306 +      SpeexResamplerState* state = speex_resampler_init(channels,
  1.2307 +                                                        aTrackData->mInputRate,
  1.2308 +                                                        GraphImpl()->AudioSampleRate(),
  1.2309 +                                                        SPEEX_RESAMPLER_QUALITY_DEFAULT,
  1.2310 +                                                        nullptr);
  1.2311 +      if (!state) {
  1.2312 +        return;
  1.2313 +      }
  1.2314 +      aTrackData->mResampler.own(state);
  1.2315 +    }
  1.2316 +  }
  1.2317 +  segment->ResampleChunks(aTrackData->mResampler);
  1.2318 +}
  1.2319 +
  1.2320 +bool
  1.2321 +SourceMediaStream::AppendToTrack(TrackID aID, MediaSegment* aSegment, MediaSegment *aRawSegment)
  1.2322 +{
  1.2323 +  MutexAutoLock lock(mMutex);
  1.2324 +  // ::EndAllTrackAndFinished() can end these before the sources notice
  1.2325 +  bool appended = false;
  1.2326 +  auto graph = GraphImpl();
  1.2327 +  if (!mFinished && graph) {
  1.2328 +    TrackData *track = FindDataForTrack(aID);
  1.2329 +    if (track) {
  1.2330 +      // Data goes into mData, and on the next iteration of the MSG moves
  1.2331 +      // into the track's segment after NotifyQueuedTrackChanges().  This adds
  1.2332 +      // 0-10ms of delay before data gets to direct listeners.
  1.2333 +      // Indirect listeners (via subsequent TrackUnion nodes) are synced to
  1.2334 +      // playout time, and so can be delayed by buffering.
  1.2335 +
  1.2336 +      // Apply track disabling before notifying any consumers directly
  1.2337 +      // or inserting into the graph
  1.2338 +      ApplyTrackDisabling(aID, aSegment, aRawSegment);
  1.2339 +
  1.2340 +      ResampleAudioToGraphSampleRate(track, aSegment);
  1.2341 +
  1.2342 +      // Must notify first, since AppendFrom() will empty out aSegment
  1.2343 +      NotifyDirectConsumers(track, aRawSegment ? aRawSegment : aSegment);
  1.2344 +      track->mData->AppendFrom(aSegment); // note: aSegment is now dead
  1.2345 +      appended = true;
  1.2346 +      graph->EnsureNextIteration();
  1.2347 +    } else {
  1.2348 +      aSegment->Clear();
  1.2349 +    }
  1.2350 +  }
  1.2351 +  return appended;
  1.2352 +}
  1.2353 +
  1.2354 +void
  1.2355 +SourceMediaStream::NotifyDirectConsumers(TrackData *aTrack,
  1.2356 +                                         MediaSegment *aSegment)
  1.2357 +{
  1.2358 +  // Call with mMutex locked
  1.2359 +  MOZ_ASSERT(aTrack);
  1.2360 +
  1.2361 +  for (uint32_t j = 0; j < mDirectListeners.Length(); ++j) {
  1.2362 +    MediaStreamDirectListener* l = mDirectListeners[j];
  1.2363 +    TrackTicks offset = 0; // FIX! need a separate TrackTicks.... or the end of the internal buffer
  1.2364 +    l->NotifyRealtimeData(static_cast<MediaStreamGraph*>(GraphImpl()), aTrack->mID, aTrack->mOutputRate,
  1.2365 +                          offset, aTrack->mCommands, *aSegment);
  1.2366 +  }
  1.2367 +}
  1.2368 +
  1.2369 +void
  1.2370 +SourceMediaStream::AddDirectListener(MediaStreamDirectListener* aListener)
  1.2371 +{
  1.2372 +  MutexAutoLock lock(mMutex);
  1.2373 +  mDirectListeners.AppendElement(aListener);
  1.2374 +}
  1.2375 +
  1.2376 +void
  1.2377 +SourceMediaStream::RemoveDirectListener(MediaStreamDirectListener* aListener)
  1.2378 +{
  1.2379 +  MutexAutoLock lock(mMutex);
  1.2380 +  mDirectListeners.RemoveElement(aListener);
  1.2381 +}
  1.2382 +
  1.2383 +bool
  1.2384 +SourceMediaStream::HaveEnoughBuffered(TrackID aID)
  1.2385 +{
  1.2386 +  MutexAutoLock lock(mMutex);
  1.2387 +  TrackData *track = FindDataForTrack(aID);
  1.2388 +  if (track) {
  1.2389 +    return track->mHaveEnough;
  1.2390 +  }
  1.2391 +  return false;
  1.2392 +}
  1.2393 +
  1.2394 +void
  1.2395 +SourceMediaStream::DispatchWhenNotEnoughBuffered(TrackID aID,
  1.2396 +    nsIEventTarget* aSignalThread, nsIRunnable* aSignalRunnable)
  1.2397 +{
  1.2398 +  MutexAutoLock lock(mMutex);
  1.2399 +  TrackData* data = FindDataForTrack(aID);
  1.2400 +  if (!data) {
  1.2401 +    aSignalThread->Dispatch(aSignalRunnable, 0);
  1.2402 +    return;
  1.2403 +  }
  1.2404 +
  1.2405 +  if (data->mHaveEnough) {
  1.2406 +    if (data->mDispatchWhenNotEnough.IsEmpty()) {
  1.2407 +      data->mDispatchWhenNotEnough.AppendElement()->Init(aSignalThread, aSignalRunnable);
  1.2408 +    }
  1.2409 +  } else {
  1.2410 +    aSignalThread->Dispatch(aSignalRunnable, 0);
  1.2411 +  }
  1.2412 +}
  1.2413 +
  1.2414 +void
  1.2415 +SourceMediaStream::EndTrack(TrackID aID)
  1.2416 +{
  1.2417 +  MutexAutoLock lock(mMutex);
  1.2418 +  // ::EndAllTrackAndFinished() can end these before the sources call this
  1.2419 +  if (!mFinished) {
  1.2420 +    TrackData *track = FindDataForTrack(aID);
  1.2421 +    if (track) {
  1.2422 +      track->mCommands |= TRACK_END;
  1.2423 +    }
  1.2424 +  }
  1.2425 +  if (auto graph = GraphImpl()) {
  1.2426 +    graph->EnsureNextIteration();
  1.2427 +  }
  1.2428 +}
  1.2429 +
  1.2430 +void
  1.2431 +SourceMediaStream::AdvanceKnownTracksTime(StreamTime aKnownTime)
  1.2432 +{
  1.2433 +  MutexAutoLock lock(mMutex);
  1.2434 +  MOZ_ASSERT(aKnownTime >= mUpdateKnownTracksTime);
  1.2435 +  mUpdateKnownTracksTime = aKnownTime;
  1.2436 +  if (auto graph = GraphImpl()) {
  1.2437 +    graph->EnsureNextIteration();
  1.2438 +  }
  1.2439 +}
  1.2440 +
  1.2441 +void
  1.2442 +SourceMediaStream::FinishWithLockHeld()
  1.2443 +{
  1.2444 +  mMutex.AssertCurrentThreadOwns();
  1.2445 +  mUpdateFinished = true;
  1.2446 +  if (auto graph = GraphImpl()) {
  1.2447 +    graph->EnsureNextIteration();
  1.2448 +  }
  1.2449 +}
  1.2450 +
  1.2451 +void
  1.2452 +SourceMediaStream::EndAllTrackAndFinish()
  1.2453 +{
  1.2454 +  MutexAutoLock lock(mMutex);
  1.2455 +  for (uint32_t i = 0; i < mUpdateTracks.Length(); ++i) {
  1.2456 +    SourceMediaStream::TrackData* data = &mUpdateTracks[i];
  1.2457 +    data->mCommands |= TRACK_END;
  1.2458 +  }
  1.2459 +  FinishWithLockHeld();
  1.2460 +  // we will call NotifyFinished() to let GetUserMedia know
  1.2461 +}
  1.2462 +
  1.2463 +TrackTicks
  1.2464 +SourceMediaStream::GetBufferedTicks(TrackID aID)
  1.2465 +{
  1.2466 +  StreamBuffer::Track* track  = mBuffer.FindTrack(aID);
  1.2467 +  if (track) {
  1.2468 +    MediaSegment* segment = track->GetSegment();
  1.2469 +    if (segment) {
  1.2470 +      return segment->GetDuration() -
  1.2471 +        track->TimeToTicksRoundDown(
  1.2472 +          GraphTimeToStreamTime(GraphImpl()->mStateComputedTime));
  1.2473 +    }
  1.2474 +  }
  1.2475 +  return 0;
  1.2476 +}
  1.2477 +
  1.2478 +void
  1.2479 +SourceMediaStream::RegisterForAudioMixing()
  1.2480 +{
  1.2481 +  MutexAutoLock lock(mMutex);
  1.2482 +  mNeedsMixing = true;
  1.2483 +}
  1.2484 +
  1.2485 +bool
  1.2486 +SourceMediaStream::NeedsMixing()
  1.2487 +{
  1.2488 +  MutexAutoLock lock(mMutex);
  1.2489 +  return mNeedsMixing;
  1.2490 +}
  1.2491 +
  1.2492 +void
  1.2493 +MediaInputPort::Init()
  1.2494 +{
  1.2495 +  STREAM_LOG(PR_LOG_DEBUG, ("Adding MediaInputPort %p (from %p to %p) to the graph",
  1.2496 +             this, mSource, mDest));
  1.2497 +  mSource->AddConsumer(this);
  1.2498 +  mDest->AddInput(this);
  1.2499 +  // mPortCount decremented via MediaInputPort::Destroy's message
  1.2500 +  ++mDest->GraphImpl()->mPortCount;
  1.2501 +}
  1.2502 +
  1.2503 +void
  1.2504 +MediaInputPort::Disconnect()
  1.2505 +{
  1.2506 +  NS_ASSERTION(!mSource == !mDest,
  1.2507 +               "mSource must either both be null or both non-null");
  1.2508 +  if (!mSource)
  1.2509 +    return;
  1.2510 +
  1.2511 +  mSource->RemoveConsumer(this);
  1.2512 +  mSource = nullptr;
  1.2513 +  mDest->RemoveInput(this);
  1.2514 +  mDest = nullptr;
  1.2515 +
  1.2516 +  GraphImpl()->SetStreamOrderDirty();
  1.2517 +}
  1.2518 +
  1.2519 +MediaInputPort::InputInterval
  1.2520 +MediaInputPort::GetNextInputInterval(GraphTime aTime)
  1.2521 +{
  1.2522 +  InputInterval result = { GRAPH_TIME_MAX, GRAPH_TIME_MAX, false };
  1.2523 +  GraphTime t = aTime;
  1.2524 +  GraphTime end;
  1.2525 +  for (;;) {
  1.2526 +    if (!mDest->mBlocked.GetAt(t, &end))
  1.2527 +      break;
  1.2528 +    if (end == GRAPH_TIME_MAX)
  1.2529 +      return result;
  1.2530 +    t = end;
  1.2531 +  }
  1.2532 +  result.mStart = t;
  1.2533 +  GraphTime sourceEnd;
  1.2534 +  result.mInputIsBlocked = mSource->mBlocked.GetAt(t, &sourceEnd);
  1.2535 +  result.mEnd = std::min(end, sourceEnd);
  1.2536 +  return result;
  1.2537 +}
  1.2538 +
  1.2539 +void
  1.2540 +MediaInputPort::Destroy()
  1.2541 +{
  1.2542 +  class Message : public ControlMessage {
  1.2543 +  public:
  1.2544 +    Message(MediaInputPort* aPort)
  1.2545 +      : ControlMessage(nullptr), mPort(aPort) {}
  1.2546 +    virtual void Run()
  1.2547 +    {
  1.2548 +      mPort->Disconnect();
  1.2549 +      --mPort->GraphImpl()->mPortCount;
  1.2550 +      mPort->SetGraphImpl(nullptr);
  1.2551 +      NS_RELEASE(mPort);
  1.2552 +    }
  1.2553 +    virtual void RunDuringShutdown()
  1.2554 +    {
  1.2555 +      Run();
  1.2556 +    }
  1.2557 +    MediaInputPort* mPort;
  1.2558 +  };
  1.2559 +  GraphImpl()->AppendMessage(new Message(this));
  1.2560 +}
  1.2561 +
  1.2562 +MediaStreamGraphImpl*
  1.2563 +MediaInputPort::GraphImpl()
  1.2564 +{
  1.2565 +  return mGraph;
  1.2566 +}
  1.2567 +
  1.2568 +MediaStreamGraph*
  1.2569 +MediaInputPort::Graph()
  1.2570 +{
  1.2571 +  return mGraph;
  1.2572 +}
  1.2573 +
  1.2574 +void
  1.2575 +MediaInputPort::SetGraphImpl(MediaStreamGraphImpl* aGraph)
  1.2576 +{
  1.2577 +  MOZ_ASSERT(!mGraph || !aGraph, "Should only be set once");
  1.2578 +  mGraph = aGraph;
  1.2579 +}
  1.2580 +
  1.2581 +already_AddRefed<MediaInputPort>
  1.2582 +ProcessedMediaStream::AllocateInputPort(MediaStream* aStream, uint32_t aFlags,
  1.2583 +                                        uint16_t aInputNumber, uint16_t aOutputNumber)
  1.2584 +{
  1.2585 +  // This method creates two references to the MediaInputPort: one for
  1.2586 +  // the main thread, and one for the MediaStreamGraph.
  1.2587 +  class Message : public ControlMessage {
  1.2588 +  public:
  1.2589 +    Message(MediaInputPort* aPort)
  1.2590 +      : ControlMessage(aPort->GetDestination()),
  1.2591 +        mPort(aPort) {}
  1.2592 +    virtual void Run()
  1.2593 +    {
  1.2594 +      mPort->Init();
  1.2595 +      // The graph holds its reference implicitly
  1.2596 +      mPort->GraphImpl()->SetStreamOrderDirty();
  1.2597 +      unused << mPort.forget();
  1.2598 +    }
  1.2599 +    virtual void RunDuringShutdown()
  1.2600 +    {
  1.2601 +      Run();
  1.2602 +    }
  1.2603 +    nsRefPtr<MediaInputPort> mPort;
  1.2604 +  };
  1.2605 +  nsRefPtr<MediaInputPort> port = new MediaInputPort(aStream, this, aFlags,
  1.2606 +                                                     aInputNumber, aOutputNumber);
  1.2607 +  port->SetGraphImpl(GraphImpl());
  1.2608 +  GraphImpl()->AppendMessage(new Message(port));
  1.2609 +  return port.forget();
  1.2610 +}
  1.2611 +
  1.2612 +void
  1.2613 +ProcessedMediaStream::Finish()
  1.2614 +{
  1.2615 +  class Message : public ControlMessage {
  1.2616 +  public:
  1.2617 +    Message(ProcessedMediaStream* aStream)
  1.2618 +      : ControlMessage(aStream) {}
  1.2619 +    virtual void Run()
  1.2620 +    {
  1.2621 +      mStream->GraphImpl()->FinishStream(mStream);
  1.2622 +    }
  1.2623 +  };
  1.2624 +  GraphImpl()->AppendMessage(new Message(this));
  1.2625 +}
  1.2626 +
  1.2627 +void
  1.2628 +ProcessedMediaStream::SetAutofinish(bool aAutofinish)
  1.2629 +{
  1.2630 +  class Message : public ControlMessage {
  1.2631 +  public:
  1.2632 +    Message(ProcessedMediaStream* aStream, bool aAutofinish)
  1.2633 +      : ControlMessage(aStream), mAutofinish(aAutofinish) {}
  1.2634 +    virtual void Run()
  1.2635 +    {
  1.2636 +      static_cast<ProcessedMediaStream*>(mStream)->SetAutofinishImpl(mAutofinish);
  1.2637 +    }
  1.2638 +    bool mAutofinish;
  1.2639 +  };
  1.2640 +  GraphImpl()->AppendMessage(new Message(this, aAutofinish));
  1.2641 +}
  1.2642 +
  1.2643 +void
  1.2644 +ProcessedMediaStream::DestroyImpl()
  1.2645 +{
  1.2646 +  for (int32_t i = mInputs.Length() - 1; i >= 0; --i) {
  1.2647 +    mInputs[i]->Disconnect();
  1.2648 +  }
  1.2649 +  MediaStream::DestroyImpl();
  1.2650 +  // The stream order is only important if there are connections, in which
  1.2651 +  // case MediaInputPort::Disconnect() called SetStreamOrderDirty().
  1.2652 +  // MediaStreamGraphImpl::RemoveStream() will also call
  1.2653 +  // SetStreamOrderDirty(), for other reasons.
  1.2654 +}
  1.2655 +
  1.2656 +/**
  1.2657 + * We make the initial mCurrentTime nonzero so that zero times can have
  1.2658 + * special meaning if necessary.
  1.2659 + */
  1.2660 +static const int32_t INITIAL_CURRENT_TIME = 1;
  1.2661 +
  1.2662 +MediaStreamGraphImpl::MediaStreamGraphImpl(bool aRealtime, TrackRate aSampleRate)
  1.2663 +  : mCurrentTime(INITIAL_CURRENT_TIME)
  1.2664 +  , mStateComputedTime(INITIAL_CURRENT_TIME)
  1.2665 +  , mProcessingGraphUpdateIndex(0)
  1.2666 +  , mPortCount(0)
  1.2667 +  , mMonitor("MediaStreamGraphImpl")
  1.2668 +  , mLifecycleState(LIFECYCLE_THREAD_NOT_STARTED)
  1.2669 +  , mWaitState(WAITSTATE_RUNNING)
  1.2670 +  , mEndTime(GRAPH_TIME_MAX)
  1.2671 +  , mSampleRate(aSampleRate)
  1.2672 +  , mNeedAnotherIteration(false)
  1.2673 +  , mForceShutDown(false)
  1.2674 +  , mPostedRunInStableStateEvent(false)
  1.2675 +  , mDetectedNotRunning(false)
  1.2676 +  , mPostedRunInStableState(false)
  1.2677 +  , mRealtime(aRealtime)
  1.2678 +  , mNonRealtimeProcessing(false)
  1.2679 +  , mStreamOrderDirty(false)
  1.2680 +  , mLatencyLog(AsyncLatencyLogger::Get())
  1.2681 +  , mMixer(nullptr)
  1.2682 +  , mMemoryReportMonitor("MSGIMemory")
  1.2683 +  , mSelfRef(MOZ_THIS_IN_INITIALIZER_LIST())
  1.2684 +  , mAudioStreamSizes()
  1.2685 +  , mNeedsMemoryReport(false)
  1.2686 +#ifdef DEBUG
  1.2687 +  , mCanRunMessagesSynchronously(false)
  1.2688 +#endif
  1.2689 +{
  1.2690 +#ifdef PR_LOGGING
  1.2691 +  if (!gMediaStreamGraphLog) {
  1.2692 +    gMediaStreamGraphLog = PR_NewLogModule("MediaStreamGraph");
  1.2693 +  }
  1.2694 +#endif
  1.2695 +
  1.2696 +  mCurrentTimeStamp = mInitialTimeStamp = mLastMainThreadUpdate = TimeStamp::Now();
  1.2697 +
  1.2698 +  RegisterWeakMemoryReporter(this);
  1.2699 +}
  1.2700 +
  1.2701 +void
  1.2702 +MediaStreamGraphImpl::Destroy()
  1.2703 +{
  1.2704 +  // First unregister from memory reporting.
  1.2705 +  UnregisterWeakMemoryReporter(this);
  1.2706 +
  1.2707 +  // Clear the self reference which will destroy this instance.
  1.2708 +  mSelfRef = nullptr;
  1.2709 +}
  1.2710 +
  1.2711 +NS_IMPL_ISUPPORTS(MediaStreamGraphShutdownObserver, nsIObserver)
  1.2712 +
  1.2713 +static bool gShutdownObserverRegistered = false;
  1.2714 +
  1.2715 +NS_IMETHODIMP
  1.2716 +MediaStreamGraphShutdownObserver::Observe(nsISupports *aSubject,
  1.2717 +                                          const char *aTopic,
  1.2718 +                                          const char16_t *aData)
  1.2719 +{
  1.2720 +  if (strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID) == 0) {
  1.2721 +    if (gGraph) {
  1.2722 +      gGraph->ForceShutDown();
  1.2723 +    }
  1.2724 +    nsContentUtils::UnregisterShutdownObserver(this);
  1.2725 +    gShutdownObserverRegistered = false;
  1.2726 +  }
  1.2727 +  return NS_OK;
  1.2728 +}
  1.2729 +
  1.2730 +MediaStreamGraph*
  1.2731 +MediaStreamGraph::GetInstance()
  1.2732 +{
  1.2733 +  NS_ASSERTION(NS_IsMainThread(), "Main thread only");
  1.2734 +
  1.2735 +  if (!gGraph) {
  1.2736 +    if (!gShutdownObserverRegistered) {
  1.2737 +      gShutdownObserverRegistered = true;
  1.2738 +      nsContentUtils::RegisterShutdownObserver(new MediaStreamGraphShutdownObserver());
  1.2739 +    }
  1.2740 +
  1.2741 +    AudioStream::InitPreferredSampleRate();
  1.2742 +
  1.2743 +    gGraph = new MediaStreamGraphImpl(true, AudioStream::PreferredSampleRate());
  1.2744 +
  1.2745 +    STREAM_LOG(PR_LOG_DEBUG, ("Starting up MediaStreamGraph %p", gGraph));
  1.2746 +  }
  1.2747 +
  1.2748 +  return gGraph;
  1.2749 +}
  1.2750 +
  1.2751 +MediaStreamGraph*
  1.2752 +MediaStreamGraph::CreateNonRealtimeInstance(TrackRate aSampleRate)
  1.2753 +{
  1.2754 +  NS_ASSERTION(NS_IsMainThread(), "Main thread only");
  1.2755 +
  1.2756 +  MediaStreamGraphImpl* graph = new MediaStreamGraphImpl(false, aSampleRate);
  1.2757 +
  1.2758 +  return graph;
  1.2759 +}
  1.2760 +
  1.2761 +void
  1.2762 +MediaStreamGraph::DestroyNonRealtimeInstance(MediaStreamGraph* aGraph)
  1.2763 +{
  1.2764 +  NS_ASSERTION(NS_IsMainThread(), "Main thread only");
  1.2765 +  MOZ_ASSERT(aGraph->IsNonRealtime(), "Should not destroy the global graph here");
  1.2766 +
  1.2767 +  MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(aGraph);
  1.2768 +  if (graph->mForceShutDown)
  1.2769 +    return; // already done
  1.2770 +
  1.2771 +  if (!graph->mNonRealtimeProcessing) {
  1.2772 +    // Start the graph, but don't produce anything
  1.2773 +    graph->StartNonRealtimeProcessing(1, 0);
  1.2774 +  }
  1.2775 +  graph->ForceShutDown();
  1.2776 +}
  1.2777 +
  1.2778 +NS_IMPL_ISUPPORTS(MediaStreamGraphImpl, nsIMemoryReporter)
  1.2779 +
  1.2780 +struct ArrayClearer
  1.2781 +{
  1.2782 +  ArrayClearer(nsTArray<AudioNodeSizes>& aArray) : mArray(aArray) {}
  1.2783 +  ~ArrayClearer() { mArray.Clear(); }
  1.2784 +  nsTArray<AudioNodeSizes>& mArray;
  1.2785 +};
  1.2786 +
  1.2787 +NS_IMETHODIMP
  1.2788 +MediaStreamGraphImpl::CollectReports(nsIHandleReportCallback* aHandleReport,
  1.2789 +                                     nsISupports* aData)
  1.2790 +{
  1.2791 +  // Clears out the report array after we're done with it.
  1.2792 +  ArrayClearer reportCleanup(mAudioStreamSizes);
  1.2793 +
  1.2794 +  {
  1.2795 +    MonitorAutoLock memoryReportLock(mMemoryReportMonitor);
  1.2796 +    mNeedsMemoryReport = true;
  1.2797 +
  1.2798 +    {
  1.2799 +      // Wake up the MSG thread.
  1.2800 +      MonitorAutoLock monitorLock(mMonitor);
  1.2801 +      EnsureImmediateWakeUpLocked(monitorLock);
  1.2802 +    }
  1.2803 +
  1.2804 +    // Wait for the report to complete.
  1.2805 +    nsresult rv;
  1.2806 +    while ((rv = memoryReportLock.Wait()) != NS_OK) {
  1.2807 +      if (PR_GetError() != PR_PENDING_INTERRUPT_ERROR) {
  1.2808 +        return rv;
  1.2809 +      }
  1.2810 +    }
  1.2811 +  }
  1.2812 +
  1.2813 +#define REPORT(_path, _amount, _desc)                                       \
  1.2814 +  do {                                                                      \
  1.2815 +    nsresult rv;                                                            \
  1.2816 +    rv = aHandleReport->Callback(EmptyCString(), _path,                     \
  1.2817 +                                 KIND_HEAP, UNITS_BYTES, _amount,           \
  1.2818 +                                 NS_LITERAL_CSTRING(_desc), aData);         \
  1.2819 +    NS_ENSURE_SUCCESS(rv, rv);                                              \
  1.2820 +  } while (0)
  1.2821 +
  1.2822 +  for (size_t i = 0; i < mAudioStreamSizes.Length(); i++) {
  1.2823 +    const AudioNodeSizes& usage = mAudioStreamSizes[i];
  1.2824 +    const char* const nodeType =  usage.mNodeType.get();
  1.2825 +
  1.2826 +    nsPrintfCString domNodePath("explicit/webaudio/audio-node/%s/dom-nodes",
  1.2827 +                                  nodeType);
  1.2828 +    REPORT(domNodePath, usage.mDomNode,
  1.2829 +           "Memory used by AudioNode DOM objects (Web Audio).");
  1.2830 +
  1.2831 +    nsPrintfCString enginePath("explicit/webaudio/audio-node/%s/engine-objects",
  1.2832 +                                nodeType);
  1.2833 +    REPORT(enginePath, usage.mEngine,
  1.2834 +           "Memory used by AudioNode engine objects (Web Audio).");
  1.2835 +
  1.2836 +    nsPrintfCString streamPath("explicit/webaudio/audio-node/%s/stream-objects",
  1.2837 +                                nodeType);
  1.2838 +    REPORT(streamPath, usage.mStream,
  1.2839 +           "Memory used by AudioNode stream objects (Web Audio).");
  1.2840 +
  1.2841 +  }
  1.2842 +
  1.2843 +#undef REPORT
  1.2844 +
  1.2845 +  return NS_OK;
  1.2846 +}
  1.2847 +
  1.2848 +SourceMediaStream*
  1.2849 +MediaStreamGraph::CreateSourceStream(DOMMediaStream* aWrapper)
  1.2850 +{
  1.2851 +  SourceMediaStream* stream = new SourceMediaStream(aWrapper);
  1.2852 +  NS_ADDREF(stream);
  1.2853 +  MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
  1.2854 +  stream->SetGraphImpl(graph);
  1.2855 +  graph->AppendMessage(new CreateMessage(stream));
  1.2856 +  return stream;
  1.2857 +}
  1.2858 +
  1.2859 +ProcessedMediaStream*
  1.2860 +MediaStreamGraph::CreateTrackUnionStream(DOMMediaStream* aWrapper)
  1.2861 +{
  1.2862 +  TrackUnionStream* stream = new TrackUnionStream(aWrapper);
  1.2863 +  NS_ADDREF(stream);
  1.2864 +  MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
  1.2865 +  stream->SetGraphImpl(graph);
  1.2866 +  graph->AppendMessage(new CreateMessage(stream));
  1.2867 +  return stream;
  1.2868 +}
  1.2869 +
  1.2870 +AudioNodeExternalInputStream*
  1.2871 +MediaStreamGraph::CreateAudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate)
  1.2872 +{
  1.2873 +  MOZ_ASSERT(NS_IsMainThread());
  1.2874 +  if (!aSampleRate) {
  1.2875 +    aSampleRate = aEngine->NodeMainThread()->Context()->SampleRate();
  1.2876 +  }
  1.2877 +  AudioNodeExternalInputStream* stream = new AudioNodeExternalInputStream(aEngine, aSampleRate);
  1.2878 +  NS_ADDREF(stream);
  1.2879 +  MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
  1.2880 +  stream->SetGraphImpl(graph);
  1.2881 +  graph->AppendMessage(new CreateMessage(stream));
  1.2882 +  return stream;
  1.2883 +}
  1.2884 +
  1.2885 +AudioNodeStream*
  1.2886 +MediaStreamGraph::CreateAudioNodeStream(AudioNodeEngine* aEngine,
  1.2887 +                                        AudioNodeStreamKind aKind,
  1.2888 +                                        TrackRate aSampleRate)
  1.2889 +{
  1.2890 +  MOZ_ASSERT(NS_IsMainThread());
  1.2891 +  if (!aSampleRate) {
  1.2892 +    aSampleRate = aEngine->NodeMainThread()->Context()->SampleRate();
  1.2893 +  }
  1.2894 +  AudioNodeStream* stream = new AudioNodeStream(aEngine, aKind, aSampleRate);
  1.2895 +  NS_ADDREF(stream);
  1.2896 +  MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
  1.2897 +  stream->SetGraphImpl(graph);
  1.2898 +  if (aEngine->HasNode()) {
  1.2899 +    stream->SetChannelMixingParametersImpl(aEngine->NodeMainThread()->ChannelCount(),
  1.2900 +                                           aEngine->NodeMainThread()->ChannelCountModeValue(),
  1.2901 +                                           aEngine->NodeMainThread()->ChannelInterpretationValue());
  1.2902 +  }
  1.2903 +  graph->AppendMessage(new CreateMessage(stream));
  1.2904 +  return stream;
  1.2905 +}
  1.2906 +
  1.2907 +bool
  1.2908 +MediaStreamGraph::IsNonRealtime() const
  1.2909 +{
  1.2910 +  return this != gGraph;
  1.2911 +}
  1.2912 +
  1.2913 +void
  1.2914 +MediaStreamGraph::StartNonRealtimeProcessing(TrackRate aRate, uint32_t aTicksToProcess)
  1.2915 +{
  1.2916 +  NS_ASSERTION(NS_IsMainThread(), "main thread only");
  1.2917 +
  1.2918 +  MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
  1.2919 +  NS_ASSERTION(!graph->mRealtime, "non-realtime only");
  1.2920 +
  1.2921 +  if (graph->mNonRealtimeProcessing)
  1.2922 +    return;
  1.2923 +  graph->mEndTime = graph->mCurrentTime + TicksToTimeRoundUp(aRate, aTicksToProcess);
  1.2924 +  graph->mNonRealtimeProcessing = true;
  1.2925 +  graph->EnsureRunInStableState();
  1.2926 +}
  1.2927 +
  1.2928 +void
  1.2929 +ProcessedMediaStream::AddInput(MediaInputPort* aPort)
  1.2930 +{
  1.2931 +  mInputs.AppendElement(aPort);
  1.2932 +  GraphImpl()->SetStreamOrderDirty();
  1.2933 +}
  1.2934 +
  1.2935 +}

mercurial