|
1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/ |
|
2 /* This Source Code Form is subject to the terms of the Mozilla Public |
|
3 * License, v. 2.0. If a copy of the MPL was not distributed with this file, |
|
4 * You can obtain one at http://mozilla.org/MPL/2.0/. */ |
|
5 |
|
6 #include "MediaStreamGraphImpl.h" |
|
7 #include "mozilla/LinkedList.h" |
|
8 #include "mozilla/MathAlgorithms.h" |
|
9 #include "mozilla/unused.h" |
|
10 |
|
11 #include "AudioSegment.h" |
|
12 #include "VideoSegment.h" |
|
13 #include "nsContentUtils.h" |
|
14 #include "nsIAppShell.h" |
|
15 #include "nsIObserver.h" |
|
16 #include "nsPrintfCString.h" |
|
17 #include "nsServiceManagerUtils.h" |
|
18 #include "nsWidgetsCID.h" |
|
19 #include "prerror.h" |
|
20 #include "prlog.h" |
|
21 #include "mozilla/Attributes.h" |
|
22 #include "TrackUnionStream.h" |
|
23 #include "ImageContainer.h" |
|
24 #include "AudioChannelService.h" |
|
25 #include "AudioNodeEngine.h" |
|
26 #include "AudioNodeStream.h" |
|
27 #include "AudioNodeExternalInputStream.h" |
|
28 #include <algorithm> |
|
29 #include "DOMMediaStream.h" |
|
30 #include "GeckoProfiler.h" |
|
31 #include "mozilla/unused.h" |
|
32 #include "speex/speex_resampler.h" |
|
33 #ifdef MOZ_WEBRTC |
|
34 #include "AudioOutputObserver.h" |
|
35 #endif |
|
36 |
|
37 using namespace mozilla::layers; |
|
38 using namespace mozilla::dom; |
|
39 using namespace mozilla::gfx; |
|
40 |
|
41 namespace mozilla { |
|
42 |
|
43 #ifdef PR_LOGGING |
|
44 PRLogModuleInfo* gMediaStreamGraphLog; |
|
45 #define STREAM_LOG(type, msg) PR_LOG(gMediaStreamGraphLog, type, msg) |
|
46 #else |
|
47 #define STREAM_LOG(type, msg) |
|
48 #endif |
|
49 |
|
50 /** |
|
51 * The singleton graph instance. |
|
52 */ |
|
53 static MediaStreamGraphImpl* gGraph; |
|
54 |
|
55 MediaStreamGraphImpl::~MediaStreamGraphImpl() |
|
56 { |
|
57 NS_ASSERTION(IsEmpty(), |
|
58 "All streams should have been destroyed by messages from the main thread"); |
|
59 STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p destroyed", this)); |
|
60 } |
|
61 |
|
62 |
|
63 StreamTime |
|
64 MediaStreamGraphImpl::GetDesiredBufferEnd(MediaStream* aStream) |
|
65 { |
|
66 StreamTime current = mCurrentTime - aStream->mBufferStartTime; |
|
67 // When waking up media decoders, we need a longer safety margin, as it can |
|
68 // take more time to get new samples. A factor of two seem to work. |
|
69 return current + |
|
70 2 * MillisecondsToMediaTime(std::max(AUDIO_TARGET_MS, VIDEO_TARGET_MS)); |
|
71 } |
|
72 |
|
73 void |
|
74 MediaStreamGraphImpl::FinishStream(MediaStream* aStream) |
|
75 { |
|
76 if (aStream->mFinished) |
|
77 return; |
|
78 STREAM_LOG(PR_LOG_DEBUG, ("MediaStream %p will finish", aStream)); |
|
79 aStream->mFinished = true; |
|
80 aStream->mBuffer.AdvanceKnownTracksTime(STREAM_TIME_MAX); |
|
81 // Force at least one more iteration of the control loop, since we rely |
|
82 // on UpdateCurrentTime to notify our listeners once the stream end |
|
83 // has been reached. |
|
84 EnsureNextIteration(); |
|
85 |
|
86 SetStreamOrderDirty(); |
|
87 } |
|
88 |
|
89 void |
|
90 MediaStreamGraphImpl::AddStream(MediaStream* aStream) |
|
91 { |
|
92 aStream->mBufferStartTime = mCurrentTime; |
|
93 *mStreams.AppendElement() = already_AddRefed<MediaStream>(aStream); |
|
94 STREAM_LOG(PR_LOG_DEBUG, ("Adding media stream %p to the graph", aStream)); |
|
95 |
|
96 SetStreamOrderDirty(); |
|
97 } |
|
98 |
|
99 void |
|
100 MediaStreamGraphImpl::RemoveStream(MediaStream* aStream) |
|
101 { |
|
102 // Remove references in mStreamUpdates before we allow aStream to die. |
|
103 // Pending updates are not needed (since the main thread has already given |
|
104 // up the stream) so we will just drop them. |
|
105 { |
|
106 MonitorAutoLock lock(mMonitor); |
|
107 for (uint32_t i = 0; i < mStreamUpdates.Length(); ++i) { |
|
108 if (mStreamUpdates[i].mStream == aStream) { |
|
109 mStreamUpdates[i].mStream = nullptr; |
|
110 } |
|
111 } |
|
112 } |
|
113 |
|
114 // Ensure that mMixer is updated when necessary. |
|
115 SetStreamOrderDirty(); |
|
116 |
|
117 // This unrefs the stream, probably destroying it |
|
118 mStreams.RemoveElement(aStream); |
|
119 |
|
120 STREAM_LOG(PR_LOG_DEBUG, ("Removing media stream %p from the graph", aStream)); |
|
121 } |
|
122 |
|
123 void |
|
124 MediaStreamGraphImpl::UpdateConsumptionState(SourceMediaStream* aStream) |
|
125 { |
|
126 MediaStreamListener::Consumption state = |
|
127 aStream->mIsConsumed ? MediaStreamListener::CONSUMED |
|
128 : MediaStreamListener::NOT_CONSUMED; |
|
129 if (state != aStream->mLastConsumptionState) { |
|
130 aStream->mLastConsumptionState = state; |
|
131 for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) { |
|
132 MediaStreamListener* l = aStream->mListeners[j]; |
|
133 l->NotifyConsumptionChanged(this, state); |
|
134 } |
|
135 } |
|
136 } |
|
137 |
|
138 void |
|
139 MediaStreamGraphImpl::ExtractPendingInput(SourceMediaStream* aStream, |
|
140 GraphTime aDesiredUpToTime, |
|
141 bool* aEnsureNextIteration) |
|
142 { |
|
143 bool finished; |
|
144 { |
|
145 MutexAutoLock lock(aStream->mMutex); |
|
146 if (aStream->mPullEnabled && !aStream->mFinished && |
|
147 !aStream->mListeners.IsEmpty()) { |
|
148 // Compute how much stream time we'll need assuming we don't block |
|
149 // the stream at all between mBlockingDecisionsMadeUntilTime and |
|
150 // aDesiredUpToTime. |
|
151 StreamTime t = |
|
152 GraphTimeToStreamTime(aStream, mStateComputedTime) + |
|
153 (aDesiredUpToTime - mStateComputedTime); |
|
154 STREAM_LOG(PR_LOG_DEBUG+1, ("Calling NotifyPull aStream=%p t=%f current end=%f", aStream, |
|
155 MediaTimeToSeconds(t), |
|
156 MediaTimeToSeconds(aStream->mBuffer.GetEnd()))); |
|
157 if (t > aStream->mBuffer.GetEnd()) { |
|
158 *aEnsureNextIteration = true; |
|
159 #ifdef DEBUG |
|
160 if (aStream->mListeners.Length() == 0) { |
|
161 STREAM_LOG(PR_LOG_ERROR, ("No listeners in NotifyPull aStream=%p desired=%f current end=%f", |
|
162 aStream, MediaTimeToSeconds(t), |
|
163 MediaTimeToSeconds(aStream->mBuffer.GetEnd()))); |
|
164 aStream->DumpTrackInfo(); |
|
165 } |
|
166 #endif |
|
167 for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) { |
|
168 MediaStreamListener* l = aStream->mListeners[j]; |
|
169 { |
|
170 MutexAutoUnlock unlock(aStream->mMutex); |
|
171 l->NotifyPull(this, t); |
|
172 } |
|
173 } |
|
174 } |
|
175 } |
|
176 finished = aStream->mUpdateFinished; |
|
177 for (int32_t i = aStream->mUpdateTracks.Length() - 1; i >= 0; --i) { |
|
178 SourceMediaStream::TrackData* data = &aStream->mUpdateTracks[i]; |
|
179 aStream->ApplyTrackDisabling(data->mID, data->mData); |
|
180 for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) { |
|
181 MediaStreamListener* l = aStream->mListeners[j]; |
|
182 TrackTicks offset = (data->mCommands & SourceMediaStream::TRACK_CREATE) |
|
183 ? data->mStart : aStream->mBuffer.FindTrack(data->mID)->GetSegment()->GetDuration(); |
|
184 l->NotifyQueuedTrackChanges(this, data->mID, data->mOutputRate, |
|
185 offset, data->mCommands, *data->mData); |
|
186 } |
|
187 if (data->mCommands & SourceMediaStream::TRACK_CREATE) { |
|
188 MediaSegment* segment = data->mData.forget(); |
|
189 STREAM_LOG(PR_LOG_DEBUG, ("SourceMediaStream %p creating track %d, rate %d, start %lld, initial end %lld", |
|
190 aStream, data->mID, data->mOutputRate, int64_t(data->mStart), |
|
191 int64_t(segment->GetDuration()))); |
|
192 |
|
193 aStream->mBuffer.AddTrack(data->mID, data->mOutputRate, data->mStart, segment); |
|
194 // The track has taken ownership of data->mData, so let's replace |
|
195 // data->mData with an empty clone. |
|
196 data->mData = segment->CreateEmptyClone(); |
|
197 data->mCommands &= ~SourceMediaStream::TRACK_CREATE; |
|
198 } else if (data->mData->GetDuration() > 0) { |
|
199 MediaSegment* dest = aStream->mBuffer.FindTrack(data->mID)->GetSegment(); |
|
200 STREAM_LOG(PR_LOG_DEBUG+1, ("SourceMediaStream %p track %d, advancing end from %lld to %lld", |
|
201 aStream, data->mID, |
|
202 int64_t(dest->GetDuration()), |
|
203 int64_t(dest->GetDuration() + data->mData->GetDuration()))); |
|
204 dest->AppendFrom(data->mData); |
|
205 } |
|
206 if (data->mCommands & SourceMediaStream::TRACK_END) { |
|
207 aStream->mBuffer.FindTrack(data->mID)->SetEnded(); |
|
208 aStream->mUpdateTracks.RemoveElementAt(i); |
|
209 } |
|
210 } |
|
211 if (!aStream->mFinished) { |
|
212 aStream->mBuffer.AdvanceKnownTracksTime(aStream->mUpdateKnownTracksTime); |
|
213 } |
|
214 } |
|
215 if (aStream->mBuffer.GetEnd() > 0) { |
|
216 aStream->mHasCurrentData = true; |
|
217 } |
|
218 if (finished) { |
|
219 FinishStream(aStream); |
|
220 } |
|
221 } |
|
222 |
|
223 void |
|
224 MediaStreamGraphImpl::UpdateBufferSufficiencyState(SourceMediaStream* aStream) |
|
225 { |
|
226 StreamTime desiredEnd = GetDesiredBufferEnd(aStream); |
|
227 nsTArray<SourceMediaStream::ThreadAndRunnable> runnables; |
|
228 |
|
229 { |
|
230 MutexAutoLock lock(aStream->mMutex); |
|
231 for (uint32_t i = 0; i < aStream->mUpdateTracks.Length(); ++i) { |
|
232 SourceMediaStream::TrackData* data = &aStream->mUpdateTracks[i]; |
|
233 if (data->mCommands & SourceMediaStream::TRACK_CREATE) { |
|
234 // This track hasn't been created yet, so we have no sufficiency |
|
235 // data. The track will be created in the next iteration of the |
|
236 // control loop and then we'll fire insufficiency notifications |
|
237 // if necessary. |
|
238 continue; |
|
239 } |
|
240 if (data->mCommands & SourceMediaStream::TRACK_END) { |
|
241 // This track will end, so no point in firing not-enough-data |
|
242 // callbacks. |
|
243 continue; |
|
244 } |
|
245 StreamBuffer::Track* track = aStream->mBuffer.FindTrack(data->mID); |
|
246 // Note that track->IsEnded() must be false, otherwise we would have |
|
247 // removed the track from mUpdateTracks already. |
|
248 NS_ASSERTION(!track->IsEnded(), "What is this track doing here?"); |
|
249 data->mHaveEnough = track->GetEndTimeRoundDown() >= desiredEnd; |
|
250 if (!data->mHaveEnough) { |
|
251 runnables.MoveElementsFrom(data->mDispatchWhenNotEnough); |
|
252 } |
|
253 } |
|
254 } |
|
255 |
|
256 for (uint32_t i = 0; i < runnables.Length(); ++i) { |
|
257 runnables[i].mTarget->Dispatch(runnables[i].mRunnable, 0); |
|
258 } |
|
259 } |
|
260 |
|
261 StreamTime |
|
262 MediaStreamGraphImpl::GraphTimeToStreamTime(MediaStream* aStream, |
|
263 GraphTime aTime) |
|
264 { |
|
265 NS_ASSERTION(aTime <= mStateComputedTime, |
|
266 "Don't ask about times where we haven't made blocking decisions yet"); |
|
267 if (aTime <= mCurrentTime) { |
|
268 return std::max<StreamTime>(0, aTime - aStream->mBufferStartTime); |
|
269 } |
|
270 GraphTime t = mCurrentTime; |
|
271 StreamTime s = t - aStream->mBufferStartTime; |
|
272 while (t < aTime) { |
|
273 GraphTime end; |
|
274 if (!aStream->mBlocked.GetAt(t, &end)) { |
|
275 s += std::min(aTime, end) - t; |
|
276 } |
|
277 t = end; |
|
278 } |
|
279 return std::max<StreamTime>(0, s); |
|
280 } |
|
281 |
|
282 StreamTime |
|
283 MediaStreamGraphImpl::GraphTimeToStreamTimeOptimistic(MediaStream* aStream, |
|
284 GraphTime aTime) |
|
285 { |
|
286 GraphTime computedUpToTime = std::min(mStateComputedTime, aTime); |
|
287 StreamTime s = GraphTimeToStreamTime(aStream, computedUpToTime); |
|
288 return s + (aTime - computedUpToTime); |
|
289 } |
|
290 |
|
291 GraphTime |
|
292 MediaStreamGraphImpl::StreamTimeToGraphTime(MediaStream* aStream, |
|
293 StreamTime aTime, uint32_t aFlags) |
|
294 { |
|
295 if (aTime >= STREAM_TIME_MAX) { |
|
296 return GRAPH_TIME_MAX; |
|
297 } |
|
298 MediaTime bufferElapsedToCurrentTime = mCurrentTime - aStream->mBufferStartTime; |
|
299 if (aTime < bufferElapsedToCurrentTime || |
|
300 (aTime == bufferElapsedToCurrentTime && !(aFlags & INCLUDE_TRAILING_BLOCKED_INTERVAL))) { |
|
301 return aTime + aStream->mBufferStartTime; |
|
302 } |
|
303 |
|
304 MediaTime streamAmount = aTime - bufferElapsedToCurrentTime; |
|
305 NS_ASSERTION(streamAmount >= 0, "Can't answer queries before current time"); |
|
306 |
|
307 GraphTime t = mCurrentTime; |
|
308 while (t < GRAPH_TIME_MAX) { |
|
309 if (!(aFlags & INCLUDE_TRAILING_BLOCKED_INTERVAL) && streamAmount == 0) { |
|
310 return t; |
|
311 } |
|
312 bool blocked; |
|
313 GraphTime end; |
|
314 if (t < mStateComputedTime) { |
|
315 blocked = aStream->mBlocked.GetAt(t, &end); |
|
316 end = std::min(end, mStateComputedTime); |
|
317 } else { |
|
318 blocked = false; |
|
319 end = GRAPH_TIME_MAX; |
|
320 } |
|
321 if (blocked) { |
|
322 t = end; |
|
323 } else { |
|
324 if (streamAmount == 0) { |
|
325 // No more stream time to consume at time t, so we're done. |
|
326 break; |
|
327 } |
|
328 MediaTime consume = std::min(end - t, streamAmount); |
|
329 streamAmount -= consume; |
|
330 t += consume; |
|
331 } |
|
332 } |
|
333 return t; |
|
334 } |
|
335 |
|
336 GraphTime |
|
337 MediaStreamGraphImpl::GetAudioPosition(MediaStream* aStream) |
|
338 { |
|
339 if (aStream->mAudioOutputStreams.IsEmpty()) { |
|
340 return mCurrentTime; |
|
341 } |
|
342 int64_t positionInFrames = aStream->mAudioOutputStreams[0].mStream->GetPositionInFrames(); |
|
343 if (positionInFrames < 0) { |
|
344 return mCurrentTime; |
|
345 } |
|
346 return aStream->mAudioOutputStreams[0].mAudioPlaybackStartTime + |
|
347 TicksToTimeRoundDown(mSampleRate, |
|
348 positionInFrames); |
|
349 } |
|
350 |
|
351 void |
|
352 MediaStreamGraphImpl::UpdateCurrentTime() |
|
353 { |
|
354 GraphTime prevCurrentTime, nextCurrentTime; |
|
355 if (mRealtime) { |
|
356 TimeStamp now = TimeStamp::Now(); |
|
357 prevCurrentTime = mCurrentTime; |
|
358 nextCurrentTime = |
|
359 SecondsToMediaTime((now - mCurrentTimeStamp).ToSeconds()) + mCurrentTime; |
|
360 |
|
361 mCurrentTimeStamp = now; |
|
362 STREAM_LOG(PR_LOG_DEBUG+1, ("Updating current time to %f (real %f, mStateComputedTime %f)", |
|
363 MediaTimeToSeconds(nextCurrentTime), |
|
364 (now - mInitialTimeStamp).ToSeconds(), |
|
365 MediaTimeToSeconds(mStateComputedTime))); |
|
366 } else { |
|
367 prevCurrentTime = mCurrentTime; |
|
368 nextCurrentTime = mCurrentTime + MillisecondsToMediaTime(MEDIA_GRAPH_TARGET_PERIOD_MS); |
|
369 STREAM_LOG(PR_LOG_DEBUG+1, ("Updating offline current time to %f (mStateComputedTime %f)", |
|
370 MediaTimeToSeconds(nextCurrentTime), |
|
371 MediaTimeToSeconds(mStateComputedTime))); |
|
372 } |
|
373 |
|
374 if (mStateComputedTime < nextCurrentTime) { |
|
375 STREAM_LOG(PR_LOG_WARNING, ("Media graph global underrun detected")); |
|
376 nextCurrentTime = mStateComputedTime; |
|
377 } |
|
378 |
|
379 if (prevCurrentTime >= nextCurrentTime) { |
|
380 NS_ASSERTION(prevCurrentTime == nextCurrentTime, "Time can't go backwards!"); |
|
381 // This could happen due to low clock resolution, maybe? |
|
382 STREAM_LOG(PR_LOG_DEBUG, ("Time did not advance")); |
|
383 // There's not much left to do here, but the code below that notifies |
|
384 // listeners that streams have ended still needs to run. |
|
385 } |
|
386 |
|
387 nsTArray<MediaStream*> streamsReadyToFinish; |
|
388 nsAutoTArray<bool,800> streamHasOutput; |
|
389 streamHasOutput.SetLength(mStreams.Length()); |
|
390 for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
|
391 MediaStream* stream = mStreams[i]; |
|
392 |
|
393 // Calculate blocked time and fire Blocked/Unblocked events |
|
394 GraphTime blockedTime = 0; |
|
395 GraphTime t = prevCurrentTime; |
|
396 // include |nextCurrentTime| to ensure NotifyBlockingChanged() is called |
|
397 // before NotifyFinished() when |nextCurrentTime == stream end time| |
|
398 while (t <= nextCurrentTime) { |
|
399 GraphTime end; |
|
400 bool blocked = stream->mBlocked.GetAt(t, &end); |
|
401 if (blocked) { |
|
402 blockedTime += std::min(end, nextCurrentTime) - t; |
|
403 } |
|
404 if (blocked != stream->mNotifiedBlocked) { |
|
405 for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) { |
|
406 MediaStreamListener* l = stream->mListeners[j]; |
|
407 l->NotifyBlockingChanged(this, |
|
408 blocked ? MediaStreamListener::BLOCKED : MediaStreamListener::UNBLOCKED); |
|
409 } |
|
410 stream->mNotifiedBlocked = blocked; |
|
411 } |
|
412 t = end; |
|
413 } |
|
414 |
|
415 stream->AdvanceTimeVaryingValuesToCurrentTime(nextCurrentTime, blockedTime); |
|
416 // Advance mBlocked last so that implementations of |
|
417 // AdvanceTimeVaryingValuesToCurrentTime can rely on the value of mBlocked. |
|
418 stream->mBlocked.AdvanceCurrentTime(nextCurrentTime); |
|
419 |
|
420 streamHasOutput[i] = blockedTime < nextCurrentTime - prevCurrentTime; |
|
421 // Make this an assertion when bug 957832 is fixed. |
|
422 NS_WARN_IF_FALSE(!streamHasOutput[i] || !stream->mNotifiedFinished, |
|
423 "Shouldn't have already notified of finish *and* have output!"); |
|
424 |
|
425 if (stream->mFinished && !stream->mNotifiedFinished) { |
|
426 streamsReadyToFinish.AppendElement(stream); |
|
427 } |
|
428 STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p bufferStartTime=%f blockedTime=%f", |
|
429 stream, MediaTimeToSeconds(stream->mBufferStartTime), |
|
430 MediaTimeToSeconds(blockedTime))); |
|
431 } |
|
432 |
|
433 mCurrentTime = nextCurrentTime; |
|
434 |
|
435 // Do these after setting mCurrentTime so that StreamTimeToGraphTime works properly. |
|
436 for (uint32_t i = 0; i < streamHasOutput.Length(); ++i) { |
|
437 if (!streamHasOutput[i]) { |
|
438 continue; |
|
439 } |
|
440 MediaStream* stream = mStreams[i]; |
|
441 for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) { |
|
442 MediaStreamListener* l = stream->mListeners[j]; |
|
443 l->NotifyOutput(this, mCurrentTime); |
|
444 } |
|
445 } |
|
446 |
|
447 for (uint32_t i = 0; i < streamsReadyToFinish.Length(); ++i) { |
|
448 MediaStream* stream = streamsReadyToFinish[i]; |
|
449 // The stream is fully finished when all of its track data has been played |
|
450 // out. |
|
451 if (mCurrentTime >= |
|
452 stream->StreamTimeToGraphTime(stream->GetStreamBuffer().GetAllTracksEnd())) { |
|
453 NS_WARN_IF_FALSE(stream->mNotifiedBlocked, |
|
454 "Should've notified blocked=true for a fully finished stream"); |
|
455 stream->mNotifiedFinished = true; |
|
456 stream->mLastPlayedVideoFrame.SetNull(); |
|
457 SetStreamOrderDirty(); |
|
458 for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) { |
|
459 MediaStreamListener* l = stream->mListeners[j]; |
|
460 l->NotifyFinished(this); |
|
461 } |
|
462 } |
|
463 } |
|
464 } |
|
465 |
|
466 bool |
|
467 MediaStreamGraphImpl::WillUnderrun(MediaStream* aStream, GraphTime aTime, |
|
468 GraphTime aEndBlockingDecisions, GraphTime* aEnd) |
|
469 { |
|
470 // Finished streams can't underrun. ProcessedMediaStreams also can't cause |
|
471 // underrun currently, since we'll always be able to produce data for them |
|
472 // unless they block on some other stream. |
|
473 if (aStream->mFinished || aStream->AsProcessedStream()) { |
|
474 return false; |
|
475 } |
|
476 GraphTime bufferEnd = |
|
477 StreamTimeToGraphTime(aStream, aStream->GetBufferEnd(), |
|
478 INCLUDE_TRAILING_BLOCKED_INTERVAL); |
|
479 #ifdef DEBUG |
|
480 if (bufferEnd < mCurrentTime) { |
|
481 STREAM_LOG(PR_LOG_ERROR, ("MediaStream %p underrun, " |
|
482 "bufferEnd %f < mCurrentTime %f (%lld < %lld), Streamtime %lld", |
|
483 aStream, MediaTimeToSeconds(bufferEnd), MediaTimeToSeconds(mCurrentTime), |
|
484 bufferEnd, mCurrentTime, aStream->GetBufferEnd())); |
|
485 aStream->DumpTrackInfo(); |
|
486 NS_ASSERTION(bufferEnd >= mCurrentTime, "Buffer underran"); |
|
487 } |
|
488 #endif |
|
489 // We should block after bufferEnd. |
|
490 if (bufferEnd <= aTime) { |
|
491 STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p will block due to data underrun, " |
|
492 "bufferEnd %f", |
|
493 aStream, MediaTimeToSeconds(bufferEnd))); |
|
494 return true; |
|
495 } |
|
496 // We should keep blocking if we're currently blocked and we don't have |
|
497 // data all the way through to aEndBlockingDecisions. If we don't have |
|
498 // data all the way through to aEndBlockingDecisions, we'll block soon, |
|
499 // but we might as well remain unblocked and play the data we've got while |
|
500 // we can. |
|
501 if (bufferEnd <= aEndBlockingDecisions && aStream->mBlocked.GetBefore(aTime)) { |
|
502 STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p will block due to speculative data underrun, " |
|
503 "bufferEnd %f", |
|
504 aStream, MediaTimeToSeconds(bufferEnd))); |
|
505 return true; |
|
506 } |
|
507 // Reconsider decisions at bufferEnd |
|
508 *aEnd = std::min(*aEnd, bufferEnd); |
|
509 return false; |
|
510 } |
|
511 |
|
512 void |
|
513 MediaStreamGraphImpl::MarkConsumed(MediaStream* aStream) |
|
514 { |
|
515 if (aStream->mIsConsumed) { |
|
516 return; |
|
517 } |
|
518 aStream->mIsConsumed = true; |
|
519 |
|
520 ProcessedMediaStream* ps = aStream->AsProcessedStream(); |
|
521 if (!ps) { |
|
522 return; |
|
523 } |
|
524 // Mark all the inputs to this stream as consumed |
|
525 for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) { |
|
526 MarkConsumed(ps->mInputs[i]->mSource); |
|
527 } |
|
528 } |
|
529 |
|
530 void |
|
531 MediaStreamGraphImpl::UpdateStreamOrderForStream(mozilla::LinkedList<MediaStream>* aStack, |
|
532 already_AddRefed<MediaStream> aStream) |
|
533 { |
|
534 nsRefPtr<MediaStream> stream = aStream; |
|
535 NS_ASSERTION(!stream->mHasBeenOrdered, "stream should not have already been ordered"); |
|
536 if (stream->mIsOnOrderingStack) { |
|
537 MediaStream* iter = aStack->getLast(); |
|
538 AudioNodeStream* ns = stream->AsAudioNodeStream(); |
|
539 bool delayNodePresent = ns ? ns->Engine()->AsDelayNodeEngine() != nullptr : false; |
|
540 bool cycleFound = false; |
|
541 if (iter) { |
|
542 do { |
|
543 cycleFound = true; |
|
544 iter->AsProcessedStream()->mInCycle = true; |
|
545 AudioNodeStream* ns = iter->AsAudioNodeStream(); |
|
546 if (ns && ns->Engine()->AsDelayNodeEngine()) { |
|
547 delayNodePresent = true; |
|
548 } |
|
549 iter = iter->getPrevious(); |
|
550 } while (iter && iter != stream); |
|
551 } |
|
552 if (cycleFound && !delayNodePresent) { |
|
553 // If we have detected a cycle, the previous loop should exit with stream |
|
554 // == iter, or the node is connected to itself. Go back in the cycle and |
|
555 // mute all nodes we find, or just mute the node itself. |
|
556 if (!iter) { |
|
557 // The node is connected to itself. |
|
558 // There can't be a non-AudioNodeStream here, because only AudioNodes |
|
559 // can be self-connected. |
|
560 iter = aStack->getLast(); |
|
561 MOZ_ASSERT(iter->AsAudioNodeStream()); |
|
562 iter->AsAudioNodeStream()->Mute(); |
|
563 } else { |
|
564 MOZ_ASSERT(iter); |
|
565 do { |
|
566 AudioNodeStream* nodeStream = iter->AsAudioNodeStream(); |
|
567 if (nodeStream) { |
|
568 nodeStream->Mute(); |
|
569 } |
|
570 } while((iter = iter->getNext())); |
|
571 } |
|
572 } |
|
573 return; |
|
574 } |
|
575 ProcessedMediaStream* ps = stream->AsProcessedStream(); |
|
576 if (ps) { |
|
577 aStack->insertBack(stream); |
|
578 stream->mIsOnOrderingStack = true; |
|
579 for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) { |
|
580 MediaStream* source = ps->mInputs[i]->mSource; |
|
581 if (!source->mHasBeenOrdered) { |
|
582 nsRefPtr<MediaStream> s = source; |
|
583 UpdateStreamOrderForStream(aStack, s.forget()); |
|
584 } |
|
585 } |
|
586 aStack->popLast(); |
|
587 stream->mIsOnOrderingStack = false; |
|
588 } |
|
589 |
|
590 stream->mHasBeenOrdered = true; |
|
591 *mStreams.AppendElement() = stream.forget(); |
|
592 } |
|
593 |
|
594 static void AudioMixerCallback(AudioDataValue* aMixedBuffer, |
|
595 AudioSampleFormat aFormat, |
|
596 uint32_t aChannels, |
|
597 uint32_t aFrames, |
|
598 uint32_t aSampleRate) |
|
599 { |
|
600 // Need an api to register mixer callbacks, bug 989921 |
|
601 #ifdef MOZ_WEBRTC |
|
602 if (aFrames > 0 && aChannels > 0) { |
|
603 // XXX need Observer base class and registration API |
|
604 if (gFarendObserver) { |
|
605 gFarendObserver->InsertFarEnd(aMixedBuffer, aFrames, false, |
|
606 aSampleRate, aChannels, aFormat); |
|
607 } |
|
608 } |
|
609 #endif |
|
610 } |
|
611 |
|
612 void |
|
613 MediaStreamGraphImpl::UpdateStreamOrder() |
|
614 { |
|
615 mOldStreams.SwapElements(mStreams); |
|
616 mStreams.ClearAndRetainStorage(); |
|
617 bool shouldMix = false; |
|
618 for (uint32_t i = 0; i < mOldStreams.Length(); ++i) { |
|
619 MediaStream* stream = mOldStreams[i]; |
|
620 stream->mHasBeenOrdered = false; |
|
621 stream->mIsConsumed = false; |
|
622 stream->mIsOnOrderingStack = false; |
|
623 stream->mInBlockingSet = false; |
|
624 if (stream->AsSourceStream() && |
|
625 stream->AsSourceStream()->NeedsMixing()) { |
|
626 shouldMix = true; |
|
627 } |
|
628 ProcessedMediaStream* ps = stream->AsProcessedStream(); |
|
629 if (ps) { |
|
630 ps->mInCycle = false; |
|
631 AudioNodeStream* ns = ps->AsAudioNodeStream(); |
|
632 if (ns) { |
|
633 ns->Unmute(); |
|
634 } |
|
635 } |
|
636 } |
|
637 |
|
638 if (!mMixer && shouldMix) { |
|
639 mMixer = new AudioMixer(AudioMixerCallback); |
|
640 } else if (mMixer && !shouldMix) { |
|
641 mMixer = nullptr; |
|
642 } |
|
643 |
|
644 mozilla::LinkedList<MediaStream> stack; |
|
645 for (uint32_t i = 0; i < mOldStreams.Length(); ++i) { |
|
646 nsRefPtr<MediaStream>& s = mOldStreams[i]; |
|
647 if (s->IsIntrinsicallyConsumed()) { |
|
648 MarkConsumed(s); |
|
649 } |
|
650 if (!s->mHasBeenOrdered) { |
|
651 UpdateStreamOrderForStream(&stack, s.forget()); |
|
652 } |
|
653 } |
|
654 } |
|
655 |
|
656 void |
|
657 MediaStreamGraphImpl::RecomputeBlocking(GraphTime aEndBlockingDecisions) |
|
658 { |
|
659 bool blockingDecisionsWillChange = false; |
|
660 |
|
661 STREAM_LOG(PR_LOG_DEBUG+1, ("Media graph %p computing blocking for time %f", |
|
662 this, MediaTimeToSeconds(mStateComputedTime))); |
|
663 for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
|
664 MediaStream* stream = mStreams[i]; |
|
665 if (!stream->mInBlockingSet) { |
|
666 // Compute a partition of the streams containing 'stream' such that we can |
|
667 // compute the blocking status of each subset independently. |
|
668 nsAutoTArray<MediaStream*,10> streamSet; |
|
669 AddBlockingRelatedStreamsToSet(&streamSet, stream); |
|
670 |
|
671 GraphTime end; |
|
672 for (GraphTime t = mStateComputedTime; |
|
673 t < aEndBlockingDecisions; t = end) { |
|
674 end = GRAPH_TIME_MAX; |
|
675 RecomputeBlockingAt(streamSet, t, aEndBlockingDecisions, &end); |
|
676 if (end < GRAPH_TIME_MAX) { |
|
677 blockingDecisionsWillChange = true; |
|
678 } |
|
679 } |
|
680 } |
|
681 |
|
682 GraphTime end; |
|
683 stream->mBlocked.GetAt(mCurrentTime, &end); |
|
684 if (end < GRAPH_TIME_MAX) { |
|
685 blockingDecisionsWillChange = true; |
|
686 } |
|
687 } |
|
688 STREAM_LOG(PR_LOG_DEBUG+1, ("Media graph %p computed blocking for interval %f to %f", |
|
689 this, MediaTimeToSeconds(mStateComputedTime), |
|
690 MediaTimeToSeconds(aEndBlockingDecisions))); |
|
691 mStateComputedTime = aEndBlockingDecisions; |
|
692 |
|
693 if (blockingDecisionsWillChange) { |
|
694 // Make sure we wake up to notify listeners about these changes. |
|
695 EnsureNextIteration(); |
|
696 } |
|
697 } |
|
698 |
|
699 void |
|
700 MediaStreamGraphImpl::AddBlockingRelatedStreamsToSet(nsTArray<MediaStream*>* aStreams, |
|
701 MediaStream* aStream) |
|
702 { |
|
703 if (aStream->mInBlockingSet) |
|
704 return; |
|
705 aStream->mInBlockingSet = true; |
|
706 aStreams->AppendElement(aStream); |
|
707 for (uint32_t i = 0; i < aStream->mConsumers.Length(); ++i) { |
|
708 MediaInputPort* port = aStream->mConsumers[i]; |
|
709 if (port->mFlags & (MediaInputPort::FLAG_BLOCK_INPUT | MediaInputPort::FLAG_BLOCK_OUTPUT)) { |
|
710 AddBlockingRelatedStreamsToSet(aStreams, port->mDest); |
|
711 } |
|
712 } |
|
713 ProcessedMediaStream* ps = aStream->AsProcessedStream(); |
|
714 if (ps) { |
|
715 for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) { |
|
716 MediaInputPort* port = ps->mInputs[i]; |
|
717 if (port->mFlags & (MediaInputPort::FLAG_BLOCK_INPUT | MediaInputPort::FLAG_BLOCK_OUTPUT)) { |
|
718 AddBlockingRelatedStreamsToSet(aStreams, port->mSource); |
|
719 } |
|
720 } |
|
721 } |
|
722 } |
|
723 |
|
724 void |
|
725 MediaStreamGraphImpl::MarkStreamBlocking(MediaStream* aStream) |
|
726 { |
|
727 if (aStream->mBlockInThisPhase) |
|
728 return; |
|
729 aStream->mBlockInThisPhase = true; |
|
730 for (uint32_t i = 0; i < aStream->mConsumers.Length(); ++i) { |
|
731 MediaInputPort* port = aStream->mConsumers[i]; |
|
732 if (port->mFlags & MediaInputPort::FLAG_BLOCK_OUTPUT) { |
|
733 MarkStreamBlocking(port->mDest); |
|
734 } |
|
735 } |
|
736 ProcessedMediaStream* ps = aStream->AsProcessedStream(); |
|
737 if (ps) { |
|
738 for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) { |
|
739 MediaInputPort* port = ps->mInputs[i]; |
|
740 if (port->mFlags & MediaInputPort::FLAG_BLOCK_INPUT) { |
|
741 MarkStreamBlocking(port->mSource); |
|
742 } |
|
743 } |
|
744 } |
|
745 } |
|
746 |
|
747 void |
|
748 MediaStreamGraphImpl::RecomputeBlockingAt(const nsTArray<MediaStream*>& aStreams, |
|
749 GraphTime aTime, |
|
750 GraphTime aEndBlockingDecisions, |
|
751 GraphTime* aEnd) |
|
752 { |
|
753 for (uint32_t i = 0; i < aStreams.Length(); ++i) { |
|
754 MediaStream* stream = aStreams[i]; |
|
755 stream->mBlockInThisPhase = false; |
|
756 } |
|
757 |
|
758 for (uint32_t i = 0; i < aStreams.Length(); ++i) { |
|
759 MediaStream* stream = aStreams[i]; |
|
760 |
|
761 if (stream->mFinished) { |
|
762 GraphTime endTime = StreamTimeToGraphTime(stream, |
|
763 stream->GetStreamBuffer().GetAllTracksEnd()); |
|
764 if (endTime <= aTime) { |
|
765 STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p is blocked due to being finished", stream)); |
|
766 // We'll block indefinitely |
|
767 MarkStreamBlocking(stream); |
|
768 *aEnd = std::min(*aEnd, aEndBlockingDecisions); |
|
769 continue; |
|
770 } else { |
|
771 STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p is finished, but not blocked yet (end at %f, with blocking at %f)", |
|
772 stream, MediaTimeToSeconds(stream->GetBufferEnd()), |
|
773 MediaTimeToSeconds(endTime))); |
|
774 *aEnd = std::min(*aEnd, endTime); |
|
775 } |
|
776 } |
|
777 |
|
778 GraphTime end; |
|
779 bool explicitBlock = stream->mExplicitBlockerCount.GetAt(aTime, &end) > 0; |
|
780 *aEnd = std::min(*aEnd, end); |
|
781 if (explicitBlock) { |
|
782 STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p is blocked due to explicit blocker", stream)); |
|
783 MarkStreamBlocking(stream); |
|
784 continue; |
|
785 } |
|
786 |
|
787 bool underrun = WillUnderrun(stream, aTime, aEndBlockingDecisions, aEnd); |
|
788 if (underrun) { |
|
789 // We'll block indefinitely |
|
790 MarkStreamBlocking(stream); |
|
791 *aEnd = std::min(*aEnd, aEndBlockingDecisions); |
|
792 continue; |
|
793 } |
|
794 } |
|
795 NS_ASSERTION(*aEnd > aTime, "Failed to advance!"); |
|
796 |
|
797 for (uint32_t i = 0; i < aStreams.Length(); ++i) { |
|
798 MediaStream* stream = aStreams[i]; |
|
799 stream->mBlocked.SetAtAndAfter(aTime, stream->mBlockInThisPhase); |
|
800 } |
|
801 } |
|
802 |
|
803 void |
|
804 MediaStreamGraphImpl::NotifyHasCurrentData(MediaStream* aStream) |
|
805 { |
|
806 if (!aStream->mNotifiedHasCurrentData && aStream->mHasCurrentData) { |
|
807 for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) { |
|
808 MediaStreamListener* l = aStream->mListeners[j]; |
|
809 l->NotifyHasCurrentData(this); |
|
810 } |
|
811 aStream->mNotifiedHasCurrentData = true; |
|
812 } |
|
813 } |
|
814 |
|
815 void |
|
816 MediaStreamGraphImpl::CreateOrDestroyAudioStreams(GraphTime aAudioOutputStartTime, |
|
817 MediaStream* aStream) |
|
818 { |
|
819 MOZ_ASSERT(mRealtime, "Should only attempt to create audio streams in real-time mode"); |
|
820 |
|
821 nsAutoTArray<bool,2> audioOutputStreamsFound; |
|
822 for (uint32_t i = 0; i < aStream->mAudioOutputStreams.Length(); ++i) { |
|
823 audioOutputStreamsFound.AppendElement(false); |
|
824 } |
|
825 |
|
826 if (!aStream->mAudioOutputs.IsEmpty()) { |
|
827 for (StreamBuffer::TrackIter tracks(aStream->GetStreamBuffer(), MediaSegment::AUDIO); |
|
828 !tracks.IsEnded(); tracks.Next()) { |
|
829 uint32_t i; |
|
830 for (i = 0; i < audioOutputStreamsFound.Length(); ++i) { |
|
831 if (aStream->mAudioOutputStreams[i].mTrackID == tracks->GetID()) { |
|
832 break; |
|
833 } |
|
834 } |
|
835 if (i < audioOutputStreamsFound.Length()) { |
|
836 audioOutputStreamsFound[i] = true; |
|
837 } else { |
|
838 // No output stream created for this track yet. Check if it's time to |
|
839 // create one. |
|
840 GraphTime startTime = |
|
841 StreamTimeToGraphTime(aStream, tracks->GetStartTimeRoundDown(), |
|
842 INCLUDE_TRAILING_BLOCKED_INTERVAL); |
|
843 if (startTime >= mStateComputedTime) { |
|
844 // The stream wants to play audio, but nothing will play for the forseeable |
|
845 // future, so don't create the stream. |
|
846 continue; |
|
847 } |
|
848 |
|
849 // Allocating a AudioStream would be slow, so we finish the Init async |
|
850 MediaStream::AudioOutputStream* audioOutputStream = |
|
851 aStream->mAudioOutputStreams.AppendElement(); |
|
852 audioOutputStream->mAudioPlaybackStartTime = aAudioOutputStartTime; |
|
853 audioOutputStream->mBlockedAudioTime = 0; |
|
854 audioOutputStream->mLastTickWritten = 0; |
|
855 audioOutputStream->mStream = new AudioStream(); |
|
856 // XXX for now, allocate stereo output. But we need to fix this to |
|
857 // match the system's ideal channel configuration. |
|
858 // NOTE: we presume this is either fast or async-under-the-covers |
|
859 audioOutputStream->mStream->Init(2, mSampleRate, |
|
860 aStream->mAudioChannelType, |
|
861 AudioStream::LowLatency); |
|
862 audioOutputStream->mTrackID = tracks->GetID(); |
|
863 |
|
864 LogLatency(AsyncLatencyLogger::AudioStreamCreate, |
|
865 reinterpret_cast<uint64_t>(aStream), |
|
866 reinterpret_cast<int64_t>(audioOutputStream->mStream.get())); |
|
867 } |
|
868 } |
|
869 } |
|
870 |
|
871 for (int32_t i = audioOutputStreamsFound.Length() - 1; i >= 0; --i) { |
|
872 if (!audioOutputStreamsFound[i]) { |
|
873 aStream->mAudioOutputStreams[i].mStream->Shutdown(); |
|
874 aStream->mAudioOutputStreams.RemoveElementAt(i); |
|
875 } |
|
876 } |
|
877 } |
|
878 |
|
879 TrackTicks |
|
880 MediaStreamGraphImpl::PlayAudio(MediaStream* aStream, |
|
881 GraphTime aFrom, GraphTime aTo) |
|
882 { |
|
883 MOZ_ASSERT(mRealtime, "Should only attempt to play audio in realtime mode"); |
|
884 |
|
885 TrackTicks ticksWritten = 0; |
|
886 // We compute the number of needed ticks by converting a difference of graph |
|
887 // time rather than by substracting two converted stream time to ensure that |
|
888 // the rounding between {Graph,Stream}Time and track ticks is not dependant |
|
889 // on the absolute value of the {Graph,Stream}Time, and so that number of |
|
890 // ticks to play is the same for each cycle. |
|
891 TrackTicks ticksNeeded = TimeToTicksRoundDown(mSampleRate, aTo) - TimeToTicksRoundDown(mSampleRate, aFrom); |
|
892 |
|
893 if (aStream->mAudioOutputStreams.IsEmpty()) { |
|
894 return 0; |
|
895 } |
|
896 |
|
897 // When we're playing multiple copies of this stream at the same time, they're |
|
898 // perfectly correlated so adding volumes is the right thing to do. |
|
899 float volume = 0.0f; |
|
900 for (uint32_t i = 0; i < aStream->mAudioOutputs.Length(); ++i) { |
|
901 volume += aStream->mAudioOutputs[i].mVolume; |
|
902 } |
|
903 |
|
904 for (uint32_t i = 0; i < aStream->mAudioOutputStreams.Length(); ++i) { |
|
905 MediaStream::AudioOutputStream& audioOutput = aStream->mAudioOutputStreams[i]; |
|
906 StreamBuffer::Track* track = aStream->mBuffer.FindTrack(audioOutput.mTrackID); |
|
907 AudioSegment* audio = track->Get<AudioSegment>(); |
|
908 AudioSegment output; |
|
909 MOZ_ASSERT(track->GetRate() == mSampleRate); |
|
910 |
|
911 // offset and audioOutput.mLastTickWritten can differ by at most one sample, |
|
912 // because of the rounding issue. We track that to ensure we don't skip a |
|
913 // sample. One sample may be played twice, but this should not happen |
|
914 // again during an unblocked sequence of track samples. |
|
915 TrackTicks offset = track->TimeToTicksRoundDown(GraphTimeToStreamTime(aStream, aFrom)); |
|
916 if (audioOutput.mLastTickWritten && |
|
917 audioOutput.mLastTickWritten != offset) { |
|
918 // If there is a global underrun of the MSG, this property won't hold, and |
|
919 // we reset the sample count tracking. |
|
920 if (offset - audioOutput.mLastTickWritten == 1) { |
|
921 offset = audioOutput.mLastTickWritten; |
|
922 } |
|
923 } |
|
924 |
|
925 // We don't update aStream->mBufferStartTime here to account for |
|
926 // time spent blocked. Instead, we'll update it in UpdateCurrentTime after the |
|
927 // blocked period has completed. But we do need to make sure we play from the |
|
928 // right offsets in the stream buffer, even if we've already written silence for |
|
929 // some amount of blocked time after the current time. |
|
930 GraphTime t = aFrom; |
|
931 while (ticksNeeded) { |
|
932 GraphTime end; |
|
933 bool blocked = aStream->mBlocked.GetAt(t, &end); |
|
934 end = std::min(end, aTo); |
|
935 |
|
936 // Check how many ticks of sound we can provide if we are blocked some |
|
937 // time in the middle of this cycle. |
|
938 TrackTicks toWrite = 0; |
|
939 if (end >= aTo) { |
|
940 toWrite = ticksNeeded; |
|
941 } else { |
|
942 toWrite = TimeToTicksRoundDown(mSampleRate, end - aFrom); |
|
943 } |
|
944 ticksNeeded -= toWrite; |
|
945 |
|
946 if (blocked) { |
|
947 output.InsertNullDataAtStart(toWrite); |
|
948 STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing %ld blocking-silence samples for %f to %f (%ld to %ld)\n", |
|
949 aStream, toWrite, MediaTimeToSeconds(t), MediaTimeToSeconds(end), |
|
950 offset, offset + toWrite)); |
|
951 } else { |
|
952 TrackTicks endTicksNeeded = offset + toWrite; |
|
953 TrackTicks endTicksAvailable = audio->GetDuration(); |
|
954 STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing %ld samples for %f to %f (samples %ld to %ld)\n", |
|
955 aStream, toWrite, MediaTimeToSeconds(t), MediaTimeToSeconds(end), |
|
956 offset, endTicksNeeded)); |
|
957 |
|
958 if (endTicksNeeded <= endTicksAvailable) { |
|
959 output.AppendSlice(*audio, offset, endTicksNeeded); |
|
960 offset = endTicksNeeded; |
|
961 } else { |
|
962 MOZ_ASSERT(track->IsEnded(), "Not enough data, and track not ended."); |
|
963 // If we are at the end of the track, maybe write the remaining |
|
964 // samples, and pad with/output silence. |
|
965 if (endTicksNeeded > endTicksAvailable && |
|
966 offset < endTicksAvailable) { |
|
967 output.AppendSlice(*audio, offset, endTicksAvailable); |
|
968 toWrite -= endTicksAvailable - offset; |
|
969 offset = endTicksAvailable; |
|
970 } |
|
971 output.AppendNullData(toWrite); |
|
972 } |
|
973 output.ApplyVolume(volume); |
|
974 } |
|
975 t = end; |
|
976 } |
|
977 audioOutput.mLastTickWritten = offset; |
|
978 |
|
979 // Need unique id for stream & track - and we want it to match the inserter |
|
980 output.WriteTo(LATENCY_STREAM_ID(aStream, track->GetID()), |
|
981 audioOutput.mStream, mMixer); |
|
982 } |
|
983 return ticksWritten; |
|
984 } |
|
985 |
|
986 static void |
|
987 SetImageToBlackPixel(PlanarYCbCrImage* aImage) |
|
988 { |
|
989 uint8_t blackPixel[] = { 0x10, 0x80, 0x80 }; |
|
990 |
|
991 PlanarYCbCrData data; |
|
992 data.mYChannel = blackPixel; |
|
993 data.mCbChannel = blackPixel + 1; |
|
994 data.mCrChannel = blackPixel + 2; |
|
995 data.mYStride = data.mCbCrStride = 1; |
|
996 data.mPicSize = data.mYSize = data.mCbCrSize = IntSize(1, 1); |
|
997 aImage->SetData(data); |
|
998 } |
|
999 |
|
1000 void |
|
1001 MediaStreamGraphImpl::PlayVideo(MediaStream* aStream) |
|
1002 { |
|
1003 MOZ_ASSERT(mRealtime, "Should only attempt to play video in realtime mode"); |
|
1004 |
|
1005 if (aStream->mVideoOutputs.IsEmpty()) |
|
1006 return; |
|
1007 |
|
1008 // Display the next frame a bit early. This is better than letting the current |
|
1009 // frame be displayed for too long. |
|
1010 GraphTime framePosition = mCurrentTime + MEDIA_GRAPH_TARGET_PERIOD_MS; |
|
1011 NS_ASSERTION(framePosition >= aStream->mBufferStartTime, "frame position before buffer?"); |
|
1012 StreamTime frameBufferTime = GraphTimeToStreamTime(aStream, framePosition); |
|
1013 |
|
1014 TrackTicks start; |
|
1015 const VideoFrame* frame = nullptr; |
|
1016 StreamBuffer::Track* track; |
|
1017 for (StreamBuffer::TrackIter tracks(aStream->GetStreamBuffer(), MediaSegment::VIDEO); |
|
1018 !tracks.IsEnded(); tracks.Next()) { |
|
1019 VideoSegment* segment = tracks->Get<VideoSegment>(); |
|
1020 TrackTicks thisStart; |
|
1021 const VideoFrame* thisFrame = |
|
1022 segment->GetFrameAt(tracks->TimeToTicksRoundDown(frameBufferTime), &thisStart); |
|
1023 if (thisFrame && thisFrame->GetImage()) { |
|
1024 start = thisStart; |
|
1025 frame = thisFrame; |
|
1026 track = tracks.get(); |
|
1027 } |
|
1028 } |
|
1029 if (!frame || *frame == aStream->mLastPlayedVideoFrame) |
|
1030 return; |
|
1031 |
|
1032 STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing video frame %p (%dx%d)", |
|
1033 aStream, frame->GetImage(), frame->GetIntrinsicSize().width, |
|
1034 frame->GetIntrinsicSize().height)); |
|
1035 GraphTime startTime = StreamTimeToGraphTime(aStream, |
|
1036 track->TicksToTimeRoundDown(start), INCLUDE_TRAILING_BLOCKED_INTERVAL); |
|
1037 TimeStamp targetTime = mCurrentTimeStamp + |
|
1038 TimeDuration::FromMilliseconds(double(startTime - mCurrentTime)); |
|
1039 for (uint32_t i = 0; i < aStream->mVideoOutputs.Length(); ++i) { |
|
1040 VideoFrameContainer* output = aStream->mVideoOutputs[i]; |
|
1041 |
|
1042 if (frame->GetForceBlack()) { |
|
1043 nsRefPtr<Image> image = |
|
1044 output->GetImageContainer()->CreateImage(ImageFormat::PLANAR_YCBCR); |
|
1045 if (image) { |
|
1046 // Sets the image to a single black pixel, which will be scaled to fill |
|
1047 // the rendered size. |
|
1048 SetImageToBlackPixel(static_cast<PlanarYCbCrImage*>(image.get())); |
|
1049 } |
|
1050 output->SetCurrentFrame(frame->GetIntrinsicSize(), image, |
|
1051 targetTime); |
|
1052 } else { |
|
1053 output->SetCurrentFrame(frame->GetIntrinsicSize(), frame->GetImage(), |
|
1054 targetTime); |
|
1055 } |
|
1056 |
|
1057 nsCOMPtr<nsIRunnable> event = |
|
1058 NS_NewRunnableMethod(output, &VideoFrameContainer::Invalidate); |
|
1059 NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL); |
|
1060 } |
|
1061 if (!aStream->mNotifiedFinished) { |
|
1062 aStream->mLastPlayedVideoFrame = *frame; |
|
1063 } |
|
1064 } |
|
1065 |
|
1066 bool |
|
1067 MediaStreamGraphImpl::ShouldUpdateMainThread() |
|
1068 { |
|
1069 if (mRealtime) { |
|
1070 return true; |
|
1071 } |
|
1072 |
|
1073 TimeStamp now = TimeStamp::Now(); |
|
1074 if ((now - mLastMainThreadUpdate).ToMilliseconds() > MEDIA_GRAPH_TARGET_PERIOD_MS) { |
|
1075 mLastMainThreadUpdate = now; |
|
1076 return true; |
|
1077 } |
|
1078 return false; |
|
1079 } |
|
1080 |
|
1081 void |
|
1082 MediaStreamGraphImpl::PrepareUpdatesToMainThreadState(bool aFinalUpdate) |
|
1083 { |
|
1084 mMonitor.AssertCurrentThreadOwns(); |
|
1085 |
|
1086 // We don't want to frequently update the main thread about timing update |
|
1087 // when we are not running in realtime. |
|
1088 if (aFinalUpdate || ShouldUpdateMainThread()) { |
|
1089 mStreamUpdates.SetCapacity(mStreamUpdates.Length() + mStreams.Length()); |
|
1090 for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
|
1091 MediaStream* stream = mStreams[i]; |
|
1092 if (!stream->MainThreadNeedsUpdates()) { |
|
1093 continue; |
|
1094 } |
|
1095 StreamUpdate* update = mStreamUpdates.AppendElement(); |
|
1096 update->mGraphUpdateIndex = stream->mGraphUpdateIndices.GetAt(mCurrentTime); |
|
1097 update->mStream = stream; |
|
1098 update->mNextMainThreadCurrentTime = |
|
1099 GraphTimeToStreamTime(stream, mCurrentTime); |
|
1100 update->mNextMainThreadFinished = stream->mNotifiedFinished; |
|
1101 } |
|
1102 if (!mPendingUpdateRunnables.IsEmpty()) { |
|
1103 mUpdateRunnables.MoveElementsFrom(mPendingUpdateRunnables); |
|
1104 } |
|
1105 } |
|
1106 |
|
1107 // Don't send the message to the main thread if it's not going to have |
|
1108 // any work to do. |
|
1109 if (aFinalUpdate || |
|
1110 !mUpdateRunnables.IsEmpty() || |
|
1111 !mStreamUpdates.IsEmpty()) { |
|
1112 EnsureStableStateEventPosted(); |
|
1113 } |
|
1114 } |
|
1115 |
|
1116 void |
|
1117 MediaStreamGraphImpl::EnsureImmediateWakeUpLocked(MonitorAutoLock& aLock) |
|
1118 { |
|
1119 if (mWaitState == WAITSTATE_WAITING_FOR_NEXT_ITERATION || |
|
1120 mWaitState == WAITSTATE_WAITING_INDEFINITELY) { |
|
1121 mWaitState = WAITSTATE_WAKING_UP; |
|
1122 aLock.Notify(); |
|
1123 } |
|
1124 } |
|
1125 |
|
1126 void |
|
1127 MediaStreamGraphImpl::EnsureNextIteration() |
|
1128 { |
|
1129 MonitorAutoLock lock(mMonitor); |
|
1130 EnsureNextIterationLocked(lock); |
|
1131 } |
|
1132 |
|
1133 void |
|
1134 MediaStreamGraphImpl::EnsureNextIterationLocked(MonitorAutoLock& aLock) |
|
1135 { |
|
1136 if (mNeedAnotherIteration) |
|
1137 return; |
|
1138 mNeedAnotherIteration = true; |
|
1139 if (mWaitState == WAITSTATE_WAITING_INDEFINITELY) { |
|
1140 mWaitState = WAITSTATE_WAKING_UP; |
|
1141 aLock.Notify(); |
|
1142 } |
|
1143 } |
|
1144 |
|
1145 /** |
|
1146 * Returns smallest value of t such that |
|
1147 * TimeToTicksRoundUp(aSampleRate, t) is a multiple of WEBAUDIO_BLOCK_SIZE |
|
1148 * and floor(TimeToTicksRoundUp(aSampleRate, t)/WEBAUDIO_BLOCK_SIZE) > |
|
1149 * floor(TimeToTicksRoundUp(aSampleRate, aTime)/WEBAUDIO_BLOCK_SIZE). |
|
1150 */ |
|
1151 static GraphTime |
|
1152 RoundUpToNextAudioBlock(TrackRate aSampleRate, GraphTime aTime) |
|
1153 { |
|
1154 TrackTicks ticks = TimeToTicksRoundUp(aSampleRate, aTime); |
|
1155 uint64_t block = ticks >> WEBAUDIO_BLOCK_SIZE_BITS; |
|
1156 uint64_t nextBlock = block + 1; |
|
1157 TrackTicks nextTicks = nextBlock << WEBAUDIO_BLOCK_SIZE_BITS; |
|
1158 // Find the smallest time t such that TimeToTicksRoundUp(aSampleRate,t) == nextTicks |
|
1159 // That's the smallest integer t such that |
|
1160 // t*aSampleRate > ((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) |
|
1161 // Both sides are integers, so this is equivalent to |
|
1162 // t*aSampleRate >= ((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) + 1 |
|
1163 // t >= (((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) + 1)/aSampleRate |
|
1164 // t = ceil((((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) + 1)/aSampleRate) |
|
1165 // Using integer division, that's |
|
1166 // t = (((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) + 1 + aSampleRate - 1)/aSampleRate |
|
1167 // = ((nextTicks - 1) << MEDIA_TIME_FRAC_BITS)/aSampleRate + 1 |
|
1168 return ((nextTicks - 1) << MEDIA_TIME_FRAC_BITS)/aSampleRate + 1; |
|
1169 } |
|
1170 |
|
1171 void |
|
1172 MediaStreamGraphImpl::ProduceDataForStreamsBlockByBlock(uint32_t aStreamIndex, |
|
1173 TrackRate aSampleRate, |
|
1174 GraphTime aFrom, |
|
1175 GraphTime aTo) |
|
1176 { |
|
1177 GraphTime t = aFrom; |
|
1178 while (t < aTo) { |
|
1179 GraphTime next = RoundUpToNextAudioBlock(aSampleRate, t); |
|
1180 for (uint32_t i = aStreamIndex; i < mStreams.Length(); ++i) { |
|
1181 ProcessedMediaStream* ps = mStreams[i]->AsProcessedStream(); |
|
1182 if (ps) { |
|
1183 ps->ProcessInput(t, next, (next == aTo) ? ProcessedMediaStream::ALLOW_FINISH : 0); |
|
1184 } |
|
1185 } |
|
1186 t = next; |
|
1187 } |
|
1188 NS_ASSERTION(t == aTo, "Something went wrong with rounding to block boundaries"); |
|
1189 } |
|
1190 |
|
1191 bool |
|
1192 MediaStreamGraphImpl::AllFinishedStreamsNotified() |
|
1193 { |
|
1194 for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
|
1195 MediaStream* s = mStreams[i]; |
|
1196 if (s->mFinished && !s->mNotifiedFinished) { |
|
1197 return false; |
|
1198 } |
|
1199 } |
|
1200 return true; |
|
1201 } |
|
1202 |
|
1203 void |
|
1204 MediaStreamGraphImpl::PauseAllAudioOutputs() |
|
1205 { |
|
1206 for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
|
1207 MediaStream* s = mStreams[i]; |
|
1208 for (uint32_t j = 0; j < s->mAudioOutputStreams.Length(); ++j) { |
|
1209 s->mAudioOutputStreams[j].mStream->Pause(); |
|
1210 } |
|
1211 } |
|
1212 } |
|
1213 |
|
1214 void |
|
1215 MediaStreamGraphImpl::ResumeAllAudioOutputs() |
|
1216 { |
|
1217 for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
|
1218 MediaStream* s = mStreams[i]; |
|
1219 for (uint32_t j = 0; j < s->mAudioOutputStreams.Length(); ++j) { |
|
1220 s->mAudioOutputStreams[j].mStream->Resume(); |
|
1221 } |
|
1222 } |
|
1223 } |
|
1224 |
|
1225 struct AutoProfilerUnregisterThread |
|
1226 { |
|
1227 // The empty ctor is used to silence a pre-4.8.0 GCC unused variable warning. |
|
1228 AutoProfilerUnregisterThread() |
|
1229 { |
|
1230 } |
|
1231 |
|
1232 ~AutoProfilerUnregisterThread() |
|
1233 { |
|
1234 profiler_unregister_thread(); |
|
1235 } |
|
1236 }; |
|
1237 |
|
1238 void |
|
1239 MediaStreamGraphImpl::RunThread() |
|
1240 { |
|
1241 nsTArray<MessageBlock> messageQueue; |
|
1242 { |
|
1243 MonitorAutoLock lock(mMonitor); |
|
1244 messageQueue.SwapElements(mMessageQueue); |
|
1245 } |
|
1246 NS_ASSERTION(!messageQueue.IsEmpty(), |
|
1247 "Shouldn't have started a graph with empty message queue!"); |
|
1248 |
|
1249 uint32_t ticksProcessed = 0; |
|
1250 AutoProfilerUnregisterThread autoUnregister; |
|
1251 |
|
1252 for (;;) { |
|
1253 // Check if a memory report has been requested. |
|
1254 { |
|
1255 MonitorAutoLock lock(mMemoryReportMonitor); |
|
1256 if (mNeedsMemoryReport) { |
|
1257 mNeedsMemoryReport = false; |
|
1258 |
|
1259 for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
|
1260 AudioNodeStream* stream = mStreams[i]->AsAudioNodeStream(); |
|
1261 if (stream) { |
|
1262 AudioNodeSizes usage; |
|
1263 stream->SizeOfAudioNodesIncludingThis(MallocSizeOf, usage); |
|
1264 mAudioStreamSizes.AppendElement(usage); |
|
1265 } |
|
1266 } |
|
1267 |
|
1268 lock.Notify(); |
|
1269 } |
|
1270 } |
|
1271 |
|
1272 // Update mCurrentTime to the min of the playing audio times, or using the |
|
1273 // wall-clock time change if no audio is playing. |
|
1274 UpdateCurrentTime(); |
|
1275 |
|
1276 // Calculate independent action times for each batch of messages (each |
|
1277 // batch corresponding to an event loop task). This isolates the performance |
|
1278 // of different scripts to some extent. |
|
1279 for (uint32_t i = 0; i < messageQueue.Length(); ++i) { |
|
1280 mProcessingGraphUpdateIndex = messageQueue[i].mGraphUpdateIndex; |
|
1281 nsTArray<nsAutoPtr<ControlMessage> >& messages = messageQueue[i].mMessages; |
|
1282 |
|
1283 for (uint32_t j = 0; j < messages.Length(); ++j) { |
|
1284 messages[j]->Run(); |
|
1285 } |
|
1286 } |
|
1287 messageQueue.Clear(); |
|
1288 |
|
1289 if (mStreamOrderDirty) { |
|
1290 UpdateStreamOrder(); |
|
1291 } |
|
1292 |
|
1293 GraphTime endBlockingDecisions = |
|
1294 RoundUpToNextAudioBlock(mSampleRate, mCurrentTime + MillisecondsToMediaTime(AUDIO_TARGET_MS)); |
|
1295 bool ensureNextIteration = false; |
|
1296 |
|
1297 // Grab pending stream input. |
|
1298 for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
|
1299 SourceMediaStream* is = mStreams[i]->AsSourceStream(); |
|
1300 if (is) { |
|
1301 UpdateConsumptionState(is); |
|
1302 ExtractPendingInput(is, endBlockingDecisions, &ensureNextIteration); |
|
1303 } |
|
1304 } |
|
1305 |
|
1306 // The loop is woken up so soon that mCurrentTime barely advances and we |
|
1307 // end up having endBlockingDecisions == mStateComputedTime. |
|
1308 // Since stream blocking is computed in the interval of |
|
1309 // [mStateComputedTime, endBlockingDecisions), it won't be computed at all. |
|
1310 // We should ensure next iteration so that pending blocking changes will be |
|
1311 // computed in next loop. |
|
1312 if (endBlockingDecisions == mStateComputedTime) { |
|
1313 ensureNextIteration = true; |
|
1314 } |
|
1315 |
|
1316 // Figure out which streams are blocked and when. |
|
1317 GraphTime prevComputedTime = mStateComputedTime; |
|
1318 RecomputeBlocking(endBlockingDecisions); |
|
1319 |
|
1320 // Play stream contents. |
|
1321 bool allBlockedForever = true; |
|
1322 // True when we've done ProcessInput for all processed streams. |
|
1323 bool doneAllProducing = false; |
|
1324 // This is the number of frame that are written to the AudioStreams, for |
|
1325 // this cycle. |
|
1326 TrackTicks ticksPlayed = 0; |
|
1327 // Figure out what each stream wants to do |
|
1328 for (uint32_t i = 0; i < mStreams.Length(); ++i) { |
|
1329 MediaStream* stream = mStreams[i]; |
|
1330 if (!doneAllProducing) { |
|
1331 ProcessedMediaStream* ps = stream->AsProcessedStream(); |
|
1332 if (ps) { |
|
1333 AudioNodeStream* n = stream->AsAudioNodeStream(); |
|
1334 if (n) { |
|
1335 #ifdef DEBUG |
|
1336 // Verify that the sampling rate for all of the following streams is the same |
|
1337 for (uint32_t j = i + 1; j < mStreams.Length(); ++j) { |
|
1338 AudioNodeStream* nextStream = mStreams[j]->AsAudioNodeStream(); |
|
1339 if (nextStream) { |
|
1340 MOZ_ASSERT(n->SampleRate() == nextStream->SampleRate(), |
|
1341 "All AudioNodeStreams in the graph must have the same sampling rate"); |
|
1342 } |
|
1343 } |
|
1344 #endif |
|
1345 // Since an AudioNodeStream is present, go ahead and |
|
1346 // produce audio block by block for all the rest of the streams. |
|
1347 ProduceDataForStreamsBlockByBlock(i, n->SampleRate(), prevComputedTime, mStateComputedTime); |
|
1348 ticksProcessed += TimeToTicksRoundDown(n->SampleRate(), mStateComputedTime - prevComputedTime); |
|
1349 doneAllProducing = true; |
|
1350 } else { |
|
1351 ps->ProcessInput(prevComputedTime, mStateComputedTime, |
|
1352 ProcessedMediaStream::ALLOW_FINISH); |
|
1353 NS_WARN_IF_FALSE(stream->mBuffer.GetEnd() >= |
|
1354 GraphTimeToStreamTime(stream, mStateComputedTime), |
|
1355 "Stream did not produce enough data"); |
|
1356 } |
|
1357 } |
|
1358 } |
|
1359 NotifyHasCurrentData(stream); |
|
1360 if (mRealtime) { |
|
1361 // Only playback audio and video in real-time mode |
|
1362 CreateOrDestroyAudioStreams(prevComputedTime, stream); |
|
1363 TrackTicks ticksPlayedForThisStream = PlayAudio(stream, prevComputedTime, mStateComputedTime); |
|
1364 if (!ticksPlayed) { |
|
1365 ticksPlayed = ticksPlayedForThisStream; |
|
1366 } else { |
|
1367 MOZ_ASSERT(!ticksPlayedForThisStream || ticksPlayedForThisStream == ticksPlayed, |
|
1368 "Each stream should have the same number of frame."); |
|
1369 } |
|
1370 PlayVideo(stream); |
|
1371 } |
|
1372 SourceMediaStream* is = stream->AsSourceStream(); |
|
1373 if (is) { |
|
1374 UpdateBufferSufficiencyState(is); |
|
1375 } |
|
1376 GraphTime end; |
|
1377 if (!stream->mBlocked.GetAt(mCurrentTime, &end) || end < GRAPH_TIME_MAX) { |
|
1378 allBlockedForever = false; |
|
1379 } |
|
1380 } |
|
1381 |
|
1382 if (mMixer) { |
|
1383 mMixer->FinishMixing(); |
|
1384 } |
|
1385 |
|
1386 if (ensureNextIteration || !allBlockedForever) { |
|
1387 EnsureNextIteration(); |
|
1388 } |
|
1389 |
|
1390 // Send updates to the main thread and wait for the next control loop |
|
1391 // iteration. |
|
1392 { |
|
1393 MonitorAutoLock lock(mMonitor); |
|
1394 bool finalUpdate = mForceShutDown || |
|
1395 (mCurrentTime >= mEndTime && AllFinishedStreamsNotified()) || |
|
1396 (IsEmpty() && mMessageQueue.IsEmpty()); |
|
1397 PrepareUpdatesToMainThreadState(finalUpdate); |
|
1398 if (finalUpdate) { |
|
1399 // Enter shutdown mode. The stable-state handler will detect this |
|
1400 // and complete shutdown. Destroy any streams immediately. |
|
1401 STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p waiting for main thread cleanup", this)); |
|
1402 // We'll shut down this graph object if it does not get restarted. |
|
1403 mLifecycleState = LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP; |
|
1404 // No need to Destroy streams here. The main-thread owner of each |
|
1405 // stream is responsible for calling Destroy on them. |
|
1406 return; |
|
1407 } |
|
1408 |
|
1409 // No need to wait in non-realtime mode, just churn through the input as soon |
|
1410 // as possible. |
|
1411 if (mRealtime) { |
|
1412 PRIntervalTime timeout = PR_INTERVAL_NO_TIMEOUT; |
|
1413 TimeStamp now = TimeStamp::Now(); |
|
1414 bool pausedOutputs = false; |
|
1415 if (mNeedAnotherIteration) { |
|
1416 int64_t timeoutMS = MEDIA_GRAPH_TARGET_PERIOD_MS - |
|
1417 int64_t((now - mCurrentTimeStamp).ToMilliseconds()); |
|
1418 // Make sure timeoutMS doesn't overflow 32 bits by waking up at |
|
1419 // least once a minute, if we need to wake up at all |
|
1420 timeoutMS = std::max<int64_t>(0, std::min<int64_t>(timeoutMS, 60*1000)); |
|
1421 timeout = PR_MillisecondsToInterval(uint32_t(timeoutMS)); |
|
1422 STREAM_LOG(PR_LOG_DEBUG+1, ("Waiting for next iteration; at %f, timeout=%f", |
|
1423 (now - mInitialTimeStamp).ToSeconds(), timeoutMS/1000.0)); |
|
1424 mWaitState = WAITSTATE_WAITING_FOR_NEXT_ITERATION; |
|
1425 } else { |
|
1426 mWaitState = WAITSTATE_WAITING_INDEFINITELY; |
|
1427 PauseAllAudioOutputs(); |
|
1428 pausedOutputs = true; |
|
1429 } |
|
1430 if (timeout > 0) { |
|
1431 mMonitor.Wait(timeout); |
|
1432 STREAM_LOG(PR_LOG_DEBUG+1, ("Resuming after timeout; at %f, elapsed=%f", |
|
1433 (TimeStamp::Now() - mInitialTimeStamp).ToSeconds(), |
|
1434 (TimeStamp::Now() - now).ToSeconds())); |
|
1435 } |
|
1436 if (pausedOutputs) { |
|
1437 ResumeAllAudioOutputs(); |
|
1438 } |
|
1439 } |
|
1440 mWaitState = WAITSTATE_RUNNING; |
|
1441 mNeedAnotherIteration = false; |
|
1442 messageQueue.SwapElements(mMessageQueue); |
|
1443 } |
|
1444 } |
|
1445 } |
|
1446 |
|
1447 void |
|
1448 MediaStreamGraphImpl::ApplyStreamUpdate(StreamUpdate* aUpdate) |
|
1449 { |
|
1450 mMonitor.AssertCurrentThreadOwns(); |
|
1451 |
|
1452 MediaStream* stream = aUpdate->mStream; |
|
1453 if (!stream) |
|
1454 return; |
|
1455 stream->mMainThreadCurrentTime = aUpdate->mNextMainThreadCurrentTime; |
|
1456 stream->mMainThreadFinished = aUpdate->mNextMainThreadFinished; |
|
1457 |
|
1458 if (stream->mWrapper) { |
|
1459 stream->mWrapper->NotifyStreamStateChanged(); |
|
1460 } |
|
1461 for (int32_t i = stream->mMainThreadListeners.Length() - 1; i >= 0; --i) { |
|
1462 stream->mMainThreadListeners[i]->NotifyMainThreadStateChanged(); |
|
1463 } |
|
1464 } |
|
1465 |
|
1466 void |
|
1467 MediaStreamGraphImpl::ShutdownThreads() |
|
1468 { |
|
1469 NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread"); |
|
1470 // mGraph's thread is not running so it's OK to do whatever here |
|
1471 STREAM_LOG(PR_LOG_DEBUG, ("Stopping threads for MediaStreamGraph %p", this)); |
|
1472 |
|
1473 if (mThread) { |
|
1474 mThread->Shutdown(); |
|
1475 mThread = nullptr; |
|
1476 } |
|
1477 } |
|
1478 |
|
1479 void |
|
1480 MediaStreamGraphImpl::ForceShutDown() |
|
1481 { |
|
1482 NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread"); |
|
1483 STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p ForceShutdown", this)); |
|
1484 { |
|
1485 MonitorAutoLock lock(mMonitor); |
|
1486 mForceShutDown = true; |
|
1487 EnsureImmediateWakeUpLocked(lock); |
|
1488 } |
|
1489 } |
|
1490 |
|
1491 namespace { |
|
1492 |
|
1493 class MediaStreamGraphInitThreadRunnable : public nsRunnable { |
|
1494 public: |
|
1495 explicit MediaStreamGraphInitThreadRunnable(MediaStreamGraphImpl* aGraph) |
|
1496 : mGraph(aGraph) |
|
1497 { |
|
1498 } |
|
1499 NS_IMETHOD Run() |
|
1500 { |
|
1501 char aLocal; |
|
1502 profiler_register_thread("MediaStreamGraph", &aLocal); |
|
1503 mGraph->RunThread(); |
|
1504 return NS_OK; |
|
1505 } |
|
1506 private: |
|
1507 MediaStreamGraphImpl* mGraph; |
|
1508 }; |
|
1509 |
|
1510 class MediaStreamGraphThreadRunnable : public nsRunnable { |
|
1511 public: |
|
1512 explicit MediaStreamGraphThreadRunnable(MediaStreamGraphImpl* aGraph) |
|
1513 : mGraph(aGraph) |
|
1514 { |
|
1515 } |
|
1516 NS_IMETHOD Run() |
|
1517 { |
|
1518 mGraph->RunThread(); |
|
1519 return NS_OK; |
|
1520 } |
|
1521 private: |
|
1522 MediaStreamGraphImpl* mGraph; |
|
1523 }; |
|
1524 |
|
1525 class MediaStreamGraphShutDownRunnable : public nsRunnable { |
|
1526 public: |
|
1527 MediaStreamGraphShutDownRunnable(MediaStreamGraphImpl* aGraph) : mGraph(aGraph) {} |
|
1528 NS_IMETHOD Run() |
|
1529 { |
|
1530 NS_ASSERTION(mGraph->mDetectedNotRunning, |
|
1531 "We should know the graph thread control loop isn't running!"); |
|
1532 |
|
1533 mGraph->ShutdownThreads(); |
|
1534 |
|
1535 // mGraph's thread is not running so it's OK to do whatever here |
|
1536 if (mGraph->IsEmpty()) { |
|
1537 // mGraph is no longer needed, so delete it. |
|
1538 mGraph->Destroy(); |
|
1539 } else { |
|
1540 // The graph is not empty. We must be in a forced shutdown, or a |
|
1541 // non-realtime graph that has finished processing. Some later |
|
1542 // AppendMessage will detect that the manager has been emptied, and |
|
1543 // delete it. |
|
1544 NS_ASSERTION(mGraph->mForceShutDown || !mGraph->mRealtime, |
|
1545 "Not in forced shutdown?"); |
|
1546 for (uint32_t i = 0; i < mGraph->mStreams.Length(); ++i) { |
|
1547 DOMMediaStream* s = mGraph->mStreams[i]->GetWrapper(); |
|
1548 if (s) { |
|
1549 s->NotifyMediaStreamGraphShutdown(); |
|
1550 } |
|
1551 } |
|
1552 |
|
1553 mGraph->mLifecycleState = |
|
1554 MediaStreamGraphImpl::LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION; |
|
1555 } |
|
1556 return NS_OK; |
|
1557 } |
|
1558 private: |
|
1559 MediaStreamGraphImpl* mGraph; |
|
1560 }; |
|
1561 |
|
1562 class MediaStreamGraphStableStateRunnable : public nsRunnable { |
|
1563 public: |
|
1564 explicit MediaStreamGraphStableStateRunnable(MediaStreamGraphImpl* aGraph) |
|
1565 : mGraph(aGraph) |
|
1566 { |
|
1567 } |
|
1568 NS_IMETHOD Run() |
|
1569 { |
|
1570 if (mGraph) { |
|
1571 mGraph->RunInStableState(); |
|
1572 } |
|
1573 return NS_OK; |
|
1574 } |
|
1575 private: |
|
1576 MediaStreamGraphImpl* mGraph; |
|
1577 }; |
|
1578 |
|
1579 /* |
|
1580 * Control messages forwarded from main thread to graph manager thread |
|
1581 */ |
|
1582 class CreateMessage : public ControlMessage { |
|
1583 public: |
|
1584 CreateMessage(MediaStream* aStream) : ControlMessage(aStream) {} |
|
1585 virtual void Run() MOZ_OVERRIDE |
|
1586 { |
|
1587 mStream->GraphImpl()->AddStream(mStream); |
|
1588 mStream->Init(); |
|
1589 } |
|
1590 virtual void RunDuringShutdown() MOZ_OVERRIDE |
|
1591 { |
|
1592 // Make sure to run this message during shutdown too, to make sure |
|
1593 // that we balance the number of streams registered with the graph |
|
1594 // as they're destroyed during shutdown. |
|
1595 Run(); |
|
1596 } |
|
1597 }; |
|
1598 |
|
1599 class MediaStreamGraphShutdownObserver MOZ_FINAL : public nsIObserver |
|
1600 { |
|
1601 public: |
|
1602 NS_DECL_ISUPPORTS |
|
1603 NS_DECL_NSIOBSERVER |
|
1604 }; |
|
1605 |
|
1606 } |
|
1607 |
|
1608 void |
|
1609 MediaStreamGraphImpl::RunInStableState() |
|
1610 { |
|
1611 NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread"); |
|
1612 |
|
1613 nsTArray<nsCOMPtr<nsIRunnable> > runnables; |
|
1614 // When we're doing a forced shutdown, pending control messages may be |
|
1615 // run on the main thread via RunDuringShutdown. Those messages must |
|
1616 // run without the graph monitor being held. So, we collect them here. |
|
1617 nsTArray<nsAutoPtr<ControlMessage> > controlMessagesToRunDuringShutdown; |
|
1618 |
|
1619 { |
|
1620 MonitorAutoLock lock(mMonitor); |
|
1621 mPostedRunInStableStateEvent = false; |
|
1622 |
|
1623 runnables.SwapElements(mUpdateRunnables); |
|
1624 for (uint32_t i = 0; i < mStreamUpdates.Length(); ++i) { |
|
1625 StreamUpdate* update = &mStreamUpdates[i]; |
|
1626 if (update->mStream) { |
|
1627 ApplyStreamUpdate(update); |
|
1628 } |
|
1629 } |
|
1630 mStreamUpdates.Clear(); |
|
1631 |
|
1632 // Don't start the thread for a non-realtime graph until it has been |
|
1633 // explicitly started by StartNonRealtimeProcessing. |
|
1634 if (mLifecycleState == LIFECYCLE_THREAD_NOT_STARTED && |
|
1635 (mRealtime || mNonRealtimeProcessing)) { |
|
1636 mLifecycleState = LIFECYCLE_RUNNING; |
|
1637 // Start the thread now. We couldn't start it earlier because |
|
1638 // the graph might exit immediately on finding it has no streams. The |
|
1639 // first message for a new graph must create a stream. |
|
1640 nsCOMPtr<nsIRunnable> event = new MediaStreamGraphInitThreadRunnable(this); |
|
1641 NS_NewNamedThread("MediaStreamGrph", getter_AddRefs(mThread), event); |
|
1642 } |
|
1643 |
|
1644 if (mCurrentTaskMessageQueue.IsEmpty()) { |
|
1645 if (mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP && IsEmpty()) { |
|
1646 // Complete shutdown. First, ensure that this graph is no longer used. |
|
1647 // A new graph graph will be created if one is needed. |
|
1648 STREAM_LOG(PR_LOG_DEBUG, ("Disconnecting MediaStreamGraph %p", this)); |
|
1649 if (this == gGraph) { |
|
1650 // null out gGraph if that's the graph being shut down |
|
1651 gGraph = nullptr; |
|
1652 } |
|
1653 // Asynchronously clean up old graph. We don't want to do this |
|
1654 // synchronously because it spins the event loop waiting for threads |
|
1655 // to shut down, and we don't want to do that in a stable state handler. |
|
1656 mLifecycleState = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN; |
|
1657 nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this); |
|
1658 NS_DispatchToMainThread(event); |
|
1659 } |
|
1660 } else { |
|
1661 if (mLifecycleState <= LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) { |
|
1662 MessageBlock* block = mMessageQueue.AppendElement(); |
|
1663 block->mMessages.SwapElements(mCurrentTaskMessageQueue); |
|
1664 block->mGraphUpdateIndex = mNextGraphUpdateIndex; |
|
1665 ++mNextGraphUpdateIndex; |
|
1666 EnsureNextIterationLocked(lock); |
|
1667 } |
|
1668 |
|
1669 // If the MediaStreamGraph has more messages going to it, try to revive |
|
1670 // it to process those messages. Don't do this if we're in a forced |
|
1671 // shutdown or it's a non-realtime graph that has already terminated |
|
1672 // processing. |
|
1673 if (mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP && |
|
1674 mRealtime && !mForceShutDown) { |
|
1675 mLifecycleState = LIFECYCLE_RUNNING; |
|
1676 // Revive the MediaStreamGraph since we have more messages going to it. |
|
1677 // Note that we need to put messages into its queue before reviving it, |
|
1678 // or it might exit immediately. |
|
1679 nsCOMPtr<nsIRunnable> event = new MediaStreamGraphThreadRunnable(this); |
|
1680 mThread->Dispatch(event, 0); |
|
1681 } |
|
1682 } |
|
1683 |
|
1684 if ((mForceShutDown || !mRealtime) && |
|
1685 mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) { |
|
1686 // Defer calls to RunDuringShutdown() to happen while mMonitor is not held. |
|
1687 for (uint32_t i = 0; i < mMessageQueue.Length(); ++i) { |
|
1688 MessageBlock& mb = mMessageQueue[i]; |
|
1689 controlMessagesToRunDuringShutdown.MoveElementsFrom(mb.mMessages); |
|
1690 } |
|
1691 mMessageQueue.Clear(); |
|
1692 MOZ_ASSERT(mCurrentTaskMessageQueue.IsEmpty()); |
|
1693 // Stop MediaStreamGraph threads. Do not clear gGraph since |
|
1694 // we have outstanding DOM objects that may need it. |
|
1695 mLifecycleState = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN; |
|
1696 nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this); |
|
1697 NS_DispatchToMainThread(event); |
|
1698 } |
|
1699 |
|
1700 mDetectedNotRunning = mLifecycleState > LIFECYCLE_RUNNING; |
|
1701 } |
|
1702 |
|
1703 // Make sure we get a new current time in the next event loop task |
|
1704 mPostedRunInStableState = false; |
|
1705 |
|
1706 for (uint32_t i = 0; i < runnables.Length(); ++i) { |
|
1707 runnables[i]->Run(); |
|
1708 } |
|
1709 for (uint32_t i = 0; i < controlMessagesToRunDuringShutdown.Length(); ++i) { |
|
1710 controlMessagesToRunDuringShutdown[i]->RunDuringShutdown(); |
|
1711 } |
|
1712 |
|
1713 #ifdef DEBUG |
|
1714 mCanRunMessagesSynchronously = mDetectedNotRunning && |
|
1715 mLifecycleState >= LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN; |
|
1716 #endif |
|
1717 } |
|
1718 |
|
1719 static NS_DEFINE_CID(kAppShellCID, NS_APPSHELL_CID); |
|
1720 |
|
1721 void |
|
1722 MediaStreamGraphImpl::EnsureRunInStableState() |
|
1723 { |
|
1724 NS_ASSERTION(NS_IsMainThread(), "main thread only"); |
|
1725 |
|
1726 if (mPostedRunInStableState) |
|
1727 return; |
|
1728 mPostedRunInStableState = true; |
|
1729 nsCOMPtr<nsIRunnable> event = new MediaStreamGraphStableStateRunnable(this); |
|
1730 nsCOMPtr<nsIAppShell> appShell = do_GetService(kAppShellCID); |
|
1731 if (appShell) { |
|
1732 appShell->RunInStableState(event); |
|
1733 } else { |
|
1734 NS_ERROR("Appshell already destroyed?"); |
|
1735 } |
|
1736 } |
|
1737 |
|
1738 void |
|
1739 MediaStreamGraphImpl::EnsureStableStateEventPosted() |
|
1740 { |
|
1741 mMonitor.AssertCurrentThreadOwns(); |
|
1742 |
|
1743 if (mPostedRunInStableStateEvent) |
|
1744 return; |
|
1745 mPostedRunInStableStateEvent = true; |
|
1746 nsCOMPtr<nsIRunnable> event = new MediaStreamGraphStableStateRunnable(this); |
|
1747 NS_DispatchToMainThread(event); |
|
1748 } |
|
1749 |
|
1750 void |
|
1751 MediaStreamGraphImpl::AppendMessage(ControlMessage* aMessage) |
|
1752 { |
|
1753 NS_ASSERTION(NS_IsMainThread(), "main thread only"); |
|
1754 NS_ASSERTION(!aMessage->GetStream() || |
|
1755 !aMessage->GetStream()->IsDestroyed(), |
|
1756 "Stream already destroyed"); |
|
1757 |
|
1758 if (mDetectedNotRunning && |
|
1759 mLifecycleState > LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) { |
|
1760 // The graph control loop is not running and main thread cleanup has |
|
1761 // happened. From now on we can't append messages to mCurrentTaskMessageQueue, |
|
1762 // because that will never be processed again, so just RunDuringShutdown |
|
1763 // this message. |
|
1764 // This should only happen during forced shutdown, or after a non-realtime |
|
1765 // graph has finished processing. |
|
1766 #ifdef DEBUG |
|
1767 MOZ_ASSERT(mCanRunMessagesSynchronously); |
|
1768 mCanRunMessagesSynchronously = false; |
|
1769 #endif |
|
1770 aMessage->RunDuringShutdown(); |
|
1771 #ifdef DEBUG |
|
1772 mCanRunMessagesSynchronously = true; |
|
1773 #endif |
|
1774 delete aMessage; |
|
1775 if (IsEmpty() && |
|
1776 mLifecycleState >= LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION) { |
|
1777 if (gGraph == this) { |
|
1778 gGraph = nullptr; |
|
1779 } |
|
1780 Destroy(); |
|
1781 } |
|
1782 return; |
|
1783 } |
|
1784 |
|
1785 mCurrentTaskMessageQueue.AppendElement(aMessage); |
|
1786 EnsureRunInStableState(); |
|
1787 } |
|
1788 |
|
1789 MediaStream::MediaStream(DOMMediaStream* aWrapper) |
|
1790 : mBufferStartTime(0) |
|
1791 , mExplicitBlockerCount(0) |
|
1792 , mBlocked(false) |
|
1793 , mGraphUpdateIndices(0) |
|
1794 , mFinished(false) |
|
1795 , mNotifiedFinished(false) |
|
1796 , mNotifiedBlocked(false) |
|
1797 , mHasCurrentData(false) |
|
1798 , mNotifiedHasCurrentData(false) |
|
1799 , mWrapper(aWrapper) |
|
1800 , mMainThreadCurrentTime(0) |
|
1801 , mMainThreadFinished(false) |
|
1802 , mMainThreadDestroyed(false) |
|
1803 , mGraph(nullptr) |
|
1804 , mAudioChannelType(dom::AudioChannel::Normal) |
|
1805 { |
|
1806 MOZ_COUNT_CTOR(MediaStream); |
|
1807 // aWrapper should not already be connected to a MediaStream! It needs |
|
1808 // to be hooked up to this stream, and since this stream is only just |
|
1809 // being created now, aWrapper must not be connected to anything. |
|
1810 NS_ASSERTION(!aWrapper || !aWrapper->GetStream(), |
|
1811 "Wrapper already has another media stream hooked up to it!"); |
|
1812 } |
|
1813 |
|
1814 size_t |
|
1815 MediaStream::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const |
|
1816 { |
|
1817 size_t amount = 0; |
|
1818 |
|
1819 // Not owned: |
|
1820 // - mGraph - Not reported here |
|
1821 // - mConsumers - elements |
|
1822 // Future: |
|
1823 // - mWrapper |
|
1824 // - mVideoOutputs - elements |
|
1825 // - mLastPlayedVideoFrame |
|
1826 // - mListeners - elements |
|
1827 // - mAudioOutputStreams - elements |
|
1828 |
|
1829 amount += mBuffer.SizeOfExcludingThis(aMallocSizeOf); |
|
1830 amount += mAudioOutputs.SizeOfExcludingThis(aMallocSizeOf); |
|
1831 amount += mVideoOutputs.SizeOfExcludingThis(aMallocSizeOf); |
|
1832 amount += mExplicitBlockerCount.SizeOfExcludingThis(aMallocSizeOf); |
|
1833 amount += mListeners.SizeOfExcludingThis(aMallocSizeOf); |
|
1834 amount += mMainThreadListeners.SizeOfExcludingThis(aMallocSizeOf); |
|
1835 amount += mDisabledTrackIDs.SizeOfExcludingThis(aMallocSizeOf); |
|
1836 amount += mBlocked.SizeOfExcludingThis(aMallocSizeOf); |
|
1837 amount += mGraphUpdateIndices.SizeOfExcludingThis(aMallocSizeOf); |
|
1838 amount += mConsumers.SizeOfExcludingThis(aMallocSizeOf); |
|
1839 amount += mAudioOutputStreams.SizeOfExcludingThis(aMallocSizeOf); |
|
1840 for (size_t i = 0; i < mAudioOutputStreams.Length(); i++) { |
|
1841 amount += mAudioOutputStreams[i].SizeOfExcludingThis(aMallocSizeOf); |
|
1842 } |
|
1843 |
|
1844 return amount; |
|
1845 } |
|
1846 |
|
1847 size_t |
|
1848 MediaStream::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const |
|
1849 { |
|
1850 return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf); |
|
1851 } |
|
1852 |
|
1853 void |
|
1854 MediaStream::Init() |
|
1855 { |
|
1856 MediaStreamGraphImpl* graph = GraphImpl(); |
|
1857 mBlocked.SetAtAndAfter(graph->mCurrentTime, true); |
|
1858 mExplicitBlockerCount.SetAtAndAfter(graph->mCurrentTime, true); |
|
1859 mExplicitBlockerCount.SetAtAndAfter(graph->mStateComputedTime, false); |
|
1860 } |
|
1861 |
|
1862 MediaStreamGraphImpl* |
|
1863 MediaStream::GraphImpl() |
|
1864 { |
|
1865 return mGraph; |
|
1866 } |
|
1867 |
|
1868 MediaStreamGraph* |
|
1869 MediaStream::Graph() |
|
1870 { |
|
1871 return mGraph; |
|
1872 } |
|
1873 |
|
1874 void |
|
1875 MediaStream::SetGraphImpl(MediaStreamGraphImpl* aGraph) |
|
1876 { |
|
1877 MOZ_ASSERT(!mGraph, "Should only be called once"); |
|
1878 mGraph = aGraph; |
|
1879 } |
|
1880 |
|
1881 void |
|
1882 MediaStream::SetGraphImpl(MediaStreamGraph* aGraph) |
|
1883 { |
|
1884 MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(aGraph); |
|
1885 SetGraphImpl(graph); |
|
1886 } |
|
1887 |
|
1888 StreamTime |
|
1889 MediaStream::GraphTimeToStreamTime(GraphTime aTime) |
|
1890 { |
|
1891 return GraphImpl()->GraphTimeToStreamTime(this, aTime); |
|
1892 } |
|
1893 |
|
1894 StreamTime |
|
1895 MediaStream::GraphTimeToStreamTimeOptimistic(GraphTime aTime) |
|
1896 { |
|
1897 return GraphImpl()->GraphTimeToStreamTimeOptimistic(this, aTime); |
|
1898 } |
|
1899 |
|
1900 GraphTime |
|
1901 MediaStream::StreamTimeToGraphTime(StreamTime aTime) |
|
1902 { |
|
1903 return GraphImpl()->StreamTimeToGraphTime(this, aTime, 0); |
|
1904 } |
|
1905 |
|
1906 void |
|
1907 MediaStream::FinishOnGraphThread() |
|
1908 { |
|
1909 GraphImpl()->FinishStream(this); |
|
1910 } |
|
1911 |
|
1912 int64_t |
|
1913 MediaStream::GetProcessingGraphUpdateIndex() |
|
1914 { |
|
1915 return GraphImpl()->GetProcessingGraphUpdateIndex(); |
|
1916 } |
|
1917 |
|
1918 StreamBuffer::Track* |
|
1919 MediaStream::EnsureTrack(TrackID aTrackId, TrackRate aSampleRate) |
|
1920 { |
|
1921 StreamBuffer::Track* track = mBuffer.FindTrack(aTrackId); |
|
1922 if (!track) { |
|
1923 nsAutoPtr<MediaSegment> segment(new AudioSegment()); |
|
1924 for (uint32_t j = 0; j < mListeners.Length(); ++j) { |
|
1925 MediaStreamListener* l = mListeners[j]; |
|
1926 l->NotifyQueuedTrackChanges(Graph(), aTrackId, |
|
1927 GraphImpl()->AudioSampleRate(), 0, |
|
1928 MediaStreamListener::TRACK_EVENT_CREATED, |
|
1929 *segment); |
|
1930 } |
|
1931 track = &mBuffer.AddTrack(aTrackId, aSampleRate, 0, segment.forget()); |
|
1932 } |
|
1933 return track; |
|
1934 } |
|
1935 |
|
1936 void |
|
1937 MediaStream::RemoveAllListenersImpl() |
|
1938 { |
|
1939 for (int32_t i = mListeners.Length() - 1; i >= 0; --i) { |
|
1940 nsRefPtr<MediaStreamListener> listener = mListeners[i].forget(); |
|
1941 listener->NotifyRemoved(GraphImpl()); |
|
1942 } |
|
1943 mListeners.Clear(); |
|
1944 } |
|
1945 |
|
1946 void |
|
1947 MediaStream::DestroyImpl() |
|
1948 { |
|
1949 for (int32_t i = mConsumers.Length() - 1; i >= 0; --i) { |
|
1950 mConsumers[i]->Disconnect(); |
|
1951 } |
|
1952 for (uint32_t i = 0; i < mAudioOutputStreams.Length(); ++i) { |
|
1953 mAudioOutputStreams[i].mStream->Shutdown(); |
|
1954 } |
|
1955 mAudioOutputStreams.Clear(); |
|
1956 mGraph = nullptr; |
|
1957 } |
|
1958 |
|
1959 void |
|
1960 MediaStream::Destroy() |
|
1961 { |
|
1962 // Keep this stream alive until we leave this method |
|
1963 nsRefPtr<MediaStream> kungFuDeathGrip = this; |
|
1964 |
|
1965 class Message : public ControlMessage { |
|
1966 public: |
|
1967 Message(MediaStream* aStream) : ControlMessage(aStream) {} |
|
1968 virtual void Run() |
|
1969 { |
|
1970 mStream->RemoveAllListenersImpl(); |
|
1971 auto graph = mStream->GraphImpl(); |
|
1972 mStream->DestroyImpl(); |
|
1973 graph->RemoveStream(mStream); |
|
1974 } |
|
1975 virtual void RunDuringShutdown() |
|
1976 { Run(); } |
|
1977 }; |
|
1978 mWrapper = nullptr; |
|
1979 GraphImpl()->AppendMessage(new Message(this)); |
|
1980 // Message::RunDuringShutdown may have removed this stream from the graph, |
|
1981 // but our kungFuDeathGrip above will have kept this stream alive if |
|
1982 // necessary. |
|
1983 mMainThreadDestroyed = true; |
|
1984 } |
|
1985 |
|
1986 void |
|
1987 MediaStream::AddAudioOutput(void* aKey) |
|
1988 { |
|
1989 class Message : public ControlMessage { |
|
1990 public: |
|
1991 Message(MediaStream* aStream, void* aKey) : ControlMessage(aStream), mKey(aKey) {} |
|
1992 virtual void Run() |
|
1993 { |
|
1994 mStream->AddAudioOutputImpl(mKey); |
|
1995 } |
|
1996 void* mKey; |
|
1997 }; |
|
1998 GraphImpl()->AppendMessage(new Message(this, aKey)); |
|
1999 } |
|
2000 |
|
2001 void |
|
2002 MediaStream::SetAudioOutputVolumeImpl(void* aKey, float aVolume) |
|
2003 { |
|
2004 for (uint32_t i = 0; i < mAudioOutputs.Length(); ++i) { |
|
2005 if (mAudioOutputs[i].mKey == aKey) { |
|
2006 mAudioOutputs[i].mVolume = aVolume; |
|
2007 return; |
|
2008 } |
|
2009 } |
|
2010 NS_ERROR("Audio output key not found"); |
|
2011 } |
|
2012 |
|
2013 void |
|
2014 MediaStream::SetAudioOutputVolume(void* aKey, float aVolume) |
|
2015 { |
|
2016 class Message : public ControlMessage { |
|
2017 public: |
|
2018 Message(MediaStream* aStream, void* aKey, float aVolume) : |
|
2019 ControlMessage(aStream), mKey(aKey), mVolume(aVolume) {} |
|
2020 virtual void Run() |
|
2021 { |
|
2022 mStream->SetAudioOutputVolumeImpl(mKey, mVolume); |
|
2023 } |
|
2024 void* mKey; |
|
2025 float mVolume; |
|
2026 }; |
|
2027 GraphImpl()->AppendMessage(new Message(this, aKey, aVolume)); |
|
2028 } |
|
2029 |
|
2030 void |
|
2031 MediaStream::RemoveAudioOutputImpl(void* aKey) |
|
2032 { |
|
2033 for (uint32_t i = 0; i < mAudioOutputs.Length(); ++i) { |
|
2034 if (mAudioOutputs[i].mKey == aKey) { |
|
2035 mAudioOutputs.RemoveElementAt(i); |
|
2036 return; |
|
2037 } |
|
2038 } |
|
2039 NS_ERROR("Audio output key not found"); |
|
2040 } |
|
2041 |
|
2042 void |
|
2043 MediaStream::RemoveAudioOutput(void* aKey) |
|
2044 { |
|
2045 class Message : public ControlMessage { |
|
2046 public: |
|
2047 Message(MediaStream* aStream, void* aKey) : |
|
2048 ControlMessage(aStream), mKey(aKey) {} |
|
2049 virtual void Run() |
|
2050 { |
|
2051 mStream->RemoveAudioOutputImpl(mKey); |
|
2052 } |
|
2053 void* mKey; |
|
2054 }; |
|
2055 GraphImpl()->AppendMessage(new Message(this, aKey)); |
|
2056 } |
|
2057 |
|
2058 void |
|
2059 MediaStream::AddVideoOutput(VideoFrameContainer* aContainer) |
|
2060 { |
|
2061 class Message : public ControlMessage { |
|
2062 public: |
|
2063 Message(MediaStream* aStream, VideoFrameContainer* aContainer) : |
|
2064 ControlMessage(aStream), mContainer(aContainer) {} |
|
2065 virtual void Run() |
|
2066 { |
|
2067 mStream->AddVideoOutputImpl(mContainer.forget()); |
|
2068 } |
|
2069 nsRefPtr<VideoFrameContainer> mContainer; |
|
2070 }; |
|
2071 GraphImpl()->AppendMessage(new Message(this, aContainer)); |
|
2072 } |
|
2073 |
|
2074 void |
|
2075 MediaStream::RemoveVideoOutput(VideoFrameContainer* aContainer) |
|
2076 { |
|
2077 class Message : public ControlMessage { |
|
2078 public: |
|
2079 Message(MediaStream* aStream, VideoFrameContainer* aContainer) : |
|
2080 ControlMessage(aStream), mContainer(aContainer) {} |
|
2081 virtual void Run() |
|
2082 { |
|
2083 mStream->RemoveVideoOutputImpl(mContainer); |
|
2084 } |
|
2085 nsRefPtr<VideoFrameContainer> mContainer; |
|
2086 }; |
|
2087 GraphImpl()->AppendMessage(new Message(this, aContainer)); |
|
2088 } |
|
2089 |
|
2090 void |
|
2091 MediaStream::ChangeExplicitBlockerCount(int32_t aDelta) |
|
2092 { |
|
2093 class Message : public ControlMessage { |
|
2094 public: |
|
2095 Message(MediaStream* aStream, int32_t aDelta) : |
|
2096 ControlMessage(aStream), mDelta(aDelta) {} |
|
2097 virtual void Run() |
|
2098 { |
|
2099 mStream->ChangeExplicitBlockerCountImpl( |
|
2100 mStream->GraphImpl()->mStateComputedTime, mDelta); |
|
2101 } |
|
2102 int32_t mDelta; |
|
2103 }; |
|
2104 |
|
2105 // This can happen if this method has been called asynchronously, and the |
|
2106 // stream has been destroyed since then. |
|
2107 if (mMainThreadDestroyed) { |
|
2108 return; |
|
2109 } |
|
2110 GraphImpl()->AppendMessage(new Message(this, aDelta)); |
|
2111 } |
|
2112 |
|
2113 void |
|
2114 MediaStream::AddListenerImpl(already_AddRefed<MediaStreamListener> aListener) |
|
2115 { |
|
2116 MediaStreamListener* listener = *mListeners.AppendElement() = aListener; |
|
2117 listener->NotifyBlockingChanged(GraphImpl(), |
|
2118 mNotifiedBlocked ? MediaStreamListener::BLOCKED : MediaStreamListener::UNBLOCKED); |
|
2119 if (mNotifiedFinished) { |
|
2120 listener->NotifyFinished(GraphImpl()); |
|
2121 } |
|
2122 if (mNotifiedHasCurrentData) { |
|
2123 listener->NotifyHasCurrentData(GraphImpl()); |
|
2124 } |
|
2125 } |
|
2126 |
|
2127 void |
|
2128 MediaStream::AddListener(MediaStreamListener* aListener) |
|
2129 { |
|
2130 class Message : public ControlMessage { |
|
2131 public: |
|
2132 Message(MediaStream* aStream, MediaStreamListener* aListener) : |
|
2133 ControlMessage(aStream), mListener(aListener) {} |
|
2134 virtual void Run() |
|
2135 { |
|
2136 mStream->AddListenerImpl(mListener.forget()); |
|
2137 } |
|
2138 nsRefPtr<MediaStreamListener> mListener; |
|
2139 }; |
|
2140 GraphImpl()->AppendMessage(new Message(this, aListener)); |
|
2141 } |
|
2142 |
|
2143 void |
|
2144 MediaStream::RemoveListenerImpl(MediaStreamListener* aListener) |
|
2145 { |
|
2146 // wouldn't need this if we could do it in the opposite order |
|
2147 nsRefPtr<MediaStreamListener> listener(aListener); |
|
2148 mListeners.RemoveElement(aListener); |
|
2149 listener->NotifyRemoved(GraphImpl()); |
|
2150 } |
|
2151 |
|
2152 void |
|
2153 MediaStream::RemoveListener(MediaStreamListener* aListener) |
|
2154 { |
|
2155 class Message : public ControlMessage { |
|
2156 public: |
|
2157 Message(MediaStream* aStream, MediaStreamListener* aListener) : |
|
2158 ControlMessage(aStream), mListener(aListener) {} |
|
2159 virtual void Run() |
|
2160 { |
|
2161 mStream->RemoveListenerImpl(mListener); |
|
2162 } |
|
2163 nsRefPtr<MediaStreamListener> mListener; |
|
2164 }; |
|
2165 // If the stream is destroyed the Listeners have or will be |
|
2166 // removed. |
|
2167 if (!IsDestroyed()) { |
|
2168 GraphImpl()->AppendMessage(new Message(this, aListener)); |
|
2169 } |
|
2170 } |
|
2171 |
|
2172 void |
|
2173 MediaStream::RunAfterPendingUpdates(nsRefPtr<nsIRunnable> aRunnable) |
|
2174 { |
|
2175 MOZ_ASSERT(NS_IsMainThread()); |
|
2176 MediaStreamGraphImpl* graph = GraphImpl(); |
|
2177 |
|
2178 // Special case when a non-realtime graph has not started, to ensure the |
|
2179 // runnable will run in finite time. |
|
2180 if (!(graph->mRealtime || graph->mNonRealtimeProcessing)) { |
|
2181 aRunnable->Run(); |
|
2182 } |
|
2183 |
|
2184 class Message : public ControlMessage { |
|
2185 public: |
|
2186 explicit Message(MediaStream* aStream, |
|
2187 already_AddRefed<nsIRunnable> aRunnable) |
|
2188 : ControlMessage(aStream) |
|
2189 , mRunnable(aRunnable) {} |
|
2190 virtual void Run() MOZ_OVERRIDE |
|
2191 { |
|
2192 mStream->Graph()-> |
|
2193 DispatchToMainThreadAfterStreamStateUpdate(mRunnable.forget()); |
|
2194 } |
|
2195 virtual void RunDuringShutdown() MOZ_OVERRIDE |
|
2196 { |
|
2197 // Don't run mRunnable now as it may call AppendMessage() which would |
|
2198 // assume that there are no remaining controlMessagesToRunDuringShutdown. |
|
2199 MOZ_ASSERT(NS_IsMainThread()); |
|
2200 NS_DispatchToCurrentThread(mRunnable); |
|
2201 } |
|
2202 private: |
|
2203 nsRefPtr<nsIRunnable> mRunnable; |
|
2204 }; |
|
2205 |
|
2206 graph->AppendMessage(new Message(this, aRunnable.forget())); |
|
2207 } |
|
2208 |
|
2209 void |
|
2210 MediaStream::SetTrackEnabledImpl(TrackID aTrackID, bool aEnabled) |
|
2211 { |
|
2212 if (aEnabled) { |
|
2213 mDisabledTrackIDs.RemoveElement(aTrackID); |
|
2214 } else { |
|
2215 if (!mDisabledTrackIDs.Contains(aTrackID)) { |
|
2216 mDisabledTrackIDs.AppendElement(aTrackID); |
|
2217 } |
|
2218 } |
|
2219 } |
|
2220 |
|
2221 void |
|
2222 MediaStream::SetTrackEnabled(TrackID aTrackID, bool aEnabled) |
|
2223 { |
|
2224 class Message : public ControlMessage { |
|
2225 public: |
|
2226 Message(MediaStream* aStream, TrackID aTrackID, bool aEnabled) : |
|
2227 ControlMessage(aStream), mTrackID(aTrackID), mEnabled(aEnabled) {} |
|
2228 virtual void Run() |
|
2229 { |
|
2230 mStream->SetTrackEnabledImpl(mTrackID, mEnabled); |
|
2231 } |
|
2232 TrackID mTrackID; |
|
2233 bool mEnabled; |
|
2234 }; |
|
2235 GraphImpl()->AppendMessage(new Message(this, aTrackID, aEnabled)); |
|
2236 } |
|
2237 |
|
2238 void |
|
2239 MediaStream::ApplyTrackDisabling(TrackID aTrackID, MediaSegment* aSegment, MediaSegment* aRawSegment) |
|
2240 { |
|
2241 // mMutex must be owned here if this is a SourceMediaStream |
|
2242 if (!mDisabledTrackIDs.Contains(aTrackID)) { |
|
2243 return; |
|
2244 } |
|
2245 aSegment->ReplaceWithDisabled(); |
|
2246 if (aRawSegment) { |
|
2247 aRawSegment->ReplaceWithDisabled(); |
|
2248 } |
|
2249 } |
|
2250 |
|
2251 void |
|
2252 SourceMediaStream::DestroyImpl() |
|
2253 { |
|
2254 // Hold mMutex while mGraph is reset so that other threads holding mMutex |
|
2255 // can null-check know that the graph will not destroyed. |
|
2256 MutexAutoLock lock(mMutex); |
|
2257 MediaStream::DestroyImpl(); |
|
2258 } |
|
2259 |
|
2260 void |
|
2261 SourceMediaStream::SetPullEnabled(bool aEnabled) |
|
2262 { |
|
2263 MutexAutoLock lock(mMutex); |
|
2264 mPullEnabled = aEnabled; |
|
2265 if (mPullEnabled && GraphImpl()) { |
|
2266 GraphImpl()->EnsureNextIteration(); |
|
2267 } |
|
2268 } |
|
2269 |
|
2270 void |
|
2271 SourceMediaStream::AddTrack(TrackID aID, TrackRate aRate, TrackTicks aStart, |
|
2272 MediaSegment* aSegment) |
|
2273 { |
|
2274 MutexAutoLock lock(mMutex); |
|
2275 TrackData* data = mUpdateTracks.AppendElement(); |
|
2276 data->mID = aID; |
|
2277 data->mInputRate = aRate; |
|
2278 // We resample all audio input tracks to the sample rate of the audio mixer. |
|
2279 data->mOutputRate = aSegment->GetType() == MediaSegment::AUDIO ? |
|
2280 GraphImpl()->AudioSampleRate() : aRate; |
|
2281 data->mStart = aStart; |
|
2282 data->mCommands = TRACK_CREATE; |
|
2283 data->mData = aSegment; |
|
2284 data->mHaveEnough = false; |
|
2285 if (auto graph = GraphImpl()) { |
|
2286 graph->EnsureNextIteration(); |
|
2287 } |
|
2288 } |
|
2289 |
|
2290 void |
|
2291 SourceMediaStream::ResampleAudioToGraphSampleRate(TrackData* aTrackData, MediaSegment* aSegment) |
|
2292 { |
|
2293 if (aSegment->GetType() != MediaSegment::AUDIO || |
|
2294 aTrackData->mInputRate == GraphImpl()->AudioSampleRate()) { |
|
2295 return; |
|
2296 } |
|
2297 AudioSegment* segment = static_cast<AudioSegment*>(aSegment); |
|
2298 if (!aTrackData->mResampler) { |
|
2299 int channels = segment->ChannelCount(); |
|
2300 |
|
2301 // If this segment is just silence, we delay instanciating the resampler. |
|
2302 if (channels) { |
|
2303 SpeexResamplerState* state = speex_resampler_init(channels, |
|
2304 aTrackData->mInputRate, |
|
2305 GraphImpl()->AudioSampleRate(), |
|
2306 SPEEX_RESAMPLER_QUALITY_DEFAULT, |
|
2307 nullptr); |
|
2308 if (!state) { |
|
2309 return; |
|
2310 } |
|
2311 aTrackData->mResampler.own(state); |
|
2312 } |
|
2313 } |
|
2314 segment->ResampleChunks(aTrackData->mResampler); |
|
2315 } |
|
2316 |
|
2317 bool |
|
2318 SourceMediaStream::AppendToTrack(TrackID aID, MediaSegment* aSegment, MediaSegment *aRawSegment) |
|
2319 { |
|
2320 MutexAutoLock lock(mMutex); |
|
2321 // ::EndAllTrackAndFinished() can end these before the sources notice |
|
2322 bool appended = false; |
|
2323 auto graph = GraphImpl(); |
|
2324 if (!mFinished && graph) { |
|
2325 TrackData *track = FindDataForTrack(aID); |
|
2326 if (track) { |
|
2327 // Data goes into mData, and on the next iteration of the MSG moves |
|
2328 // into the track's segment after NotifyQueuedTrackChanges(). This adds |
|
2329 // 0-10ms of delay before data gets to direct listeners. |
|
2330 // Indirect listeners (via subsequent TrackUnion nodes) are synced to |
|
2331 // playout time, and so can be delayed by buffering. |
|
2332 |
|
2333 // Apply track disabling before notifying any consumers directly |
|
2334 // or inserting into the graph |
|
2335 ApplyTrackDisabling(aID, aSegment, aRawSegment); |
|
2336 |
|
2337 ResampleAudioToGraphSampleRate(track, aSegment); |
|
2338 |
|
2339 // Must notify first, since AppendFrom() will empty out aSegment |
|
2340 NotifyDirectConsumers(track, aRawSegment ? aRawSegment : aSegment); |
|
2341 track->mData->AppendFrom(aSegment); // note: aSegment is now dead |
|
2342 appended = true; |
|
2343 graph->EnsureNextIteration(); |
|
2344 } else { |
|
2345 aSegment->Clear(); |
|
2346 } |
|
2347 } |
|
2348 return appended; |
|
2349 } |
|
2350 |
|
2351 void |
|
2352 SourceMediaStream::NotifyDirectConsumers(TrackData *aTrack, |
|
2353 MediaSegment *aSegment) |
|
2354 { |
|
2355 // Call with mMutex locked |
|
2356 MOZ_ASSERT(aTrack); |
|
2357 |
|
2358 for (uint32_t j = 0; j < mDirectListeners.Length(); ++j) { |
|
2359 MediaStreamDirectListener* l = mDirectListeners[j]; |
|
2360 TrackTicks offset = 0; // FIX! need a separate TrackTicks.... or the end of the internal buffer |
|
2361 l->NotifyRealtimeData(static_cast<MediaStreamGraph*>(GraphImpl()), aTrack->mID, aTrack->mOutputRate, |
|
2362 offset, aTrack->mCommands, *aSegment); |
|
2363 } |
|
2364 } |
|
2365 |
|
2366 void |
|
2367 SourceMediaStream::AddDirectListener(MediaStreamDirectListener* aListener) |
|
2368 { |
|
2369 MutexAutoLock lock(mMutex); |
|
2370 mDirectListeners.AppendElement(aListener); |
|
2371 } |
|
2372 |
|
2373 void |
|
2374 SourceMediaStream::RemoveDirectListener(MediaStreamDirectListener* aListener) |
|
2375 { |
|
2376 MutexAutoLock lock(mMutex); |
|
2377 mDirectListeners.RemoveElement(aListener); |
|
2378 } |
|
2379 |
|
2380 bool |
|
2381 SourceMediaStream::HaveEnoughBuffered(TrackID aID) |
|
2382 { |
|
2383 MutexAutoLock lock(mMutex); |
|
2384 TrackData *track = FindDataForTrack(aID); |
|
2385 if (track) { |
|
2386 return track->mHaveEnough; |
|
2387 } |
|
2388 return false; |
|
2389 } |
|
2390 |
|
2391 void |
|
2392 SourceMediaStream::DispatchWhenNotEnoughBuffered(TrackID aID, |
|
2393 nsIEventTarget* aSignalThread, nsIRunnable* aSignalRunnable) |
|
2394 { |
|
2395 MutexAutoLock lock(mMutex); |
|
2396 TrackData* data = FindDataForTrack(aID); |
|
2397 if (!data) { |
|
2398 aSignalThread->Dispatch(aSignalRunnable, 0); |
|
2399 return; |
|
2400 } |
|
2401 |
|
2402 if (data->mHaveEnough) { |
|
2403 if (data->mDispatchWhenNotEnough.IsEmpty()) { |
|
2404 data->mDispatchWhenNotEnough.AppendElement()->Init(aSignalThread, aSignalRunnable); |
|
2405 } |
|
2406 } else { |
|
2407 aSignalThread->Dispatch(aSignalRunnable, 0); |
|
2408 } |
|
2409 } |
|
2410 |
|
2411 void |
|
2412 SourceMediaStream::EndTrack(TrackID aID) |
|
2413 { |
|
2414 MutexAutoLock lock(mMutex); |
|
2415 // ::EndAllTrackAndFinished() can end these before the sources call this |
|
2416 if (!mFinished) { |
|
2417 TrackData *track = FindDataForTrack(aID); |
|
2418 if (track) { |
|
2419 track->mCommands |= TRACK_END; |
|
2420 } |
|
2421 } |
|
2422 if (auto graph = GraphImpl()) { |
|
2423 graph->EnsureNextIteration(); |
|
2424 } |
|
2425 } |
|
2426 |
|
2427 void |
|
2428 SourceMediaStream::AdvanceKnownTracksTime(StreamTime aKnownTime) |
|
2429 { |
|
2430 MutexAutoLock lock(mMutex); |
|
2431 MOZ_ASSERT(aKnownTime >= mUpdateKnownTracksTime); |
|
2432 mUpdateKnownTracksTime = aKnownTime; |
|
2433 if (auto graph = GraphImpl()) { |
|
2434 graph->EnsureNextIteration(); |
|
2435 } |
|
2436 } |
|
2437 |
|
2438 void |
|
2439 SourceMediaStream::FinishWithLockHeld() |
|
2440 { |
|
2441 mMutex.AssertCurrentThreadOwns(); |
|
2442 mUpdateFinished = true; |
|
2443 if (auto graph = GraphImpl()) { |
|
2444 graph->EnsureNextIteration(); |
|
2445 } |
|
2446 } |
|
2447 |
|
2448 void |
|
2449 SourceMediaStream::EndAllTrackAndFinish() |
|
2450 { |
|
2451 MutexAutoLock lock(mMutex); |
|
2452 for (uint32_t i = 0; i < mUpdateTracks.Length(); ++i) { |
|
2453 SourceMediaStream::TrackData* data = &mUpdateTracks[i]; |
|
2454 data->mCommands |= TRACK_END; |
|
2455 } |
|
2456 FinishWithLockHeld(); |
|
2457 // we will call NotifyFinished() to let GetUserMedia know |
|
2458 } |
|
2459 |
|
2460 TrackTicks |
|
2461 SourceMediaStream::GetBufferedTicks(TrackID aID) |
|
2462 { |
|
2463 StreamBuffer::Track* track = mBuffer.FindTrack(aID); |
|
2464 if (track) { |
|
2465 MediaSegment* segment = track->GetSegment(); |
|
2466 if (segment) { |
|
2467 return segment->GetDuration() - |
|
2468 track->TimeToTicksRoundDown( |
|
2469 GraphTimeToStreamTime(GraphImpl()->mStateComputedTime)); |
|
2470 } |
|
2471 } |
|
2472 return 0; |
|
2473 } |
|
2474 |
|
2475 void |
|
2476 SourceMediaStream::RegisterForAudioMixing() |
|
2477 { |
|
2478 MutexAutoLock lock(mMutex); |
|
2479 mNeedsMixing = true; |
|
2480 } |
|
2481 |
|
2482 bool |
|
2483 SourceMediaStream::NeedsMixing() |
|
2484 { |
|
2485 MutexAutoLock lock(mMutex); |
|
2486 return mNeedsMixing; |
|
2487 } |
|
2488 |
|
2489 void |
|
2490 MediaInputPort::Init() |
|
2491 { |
|
2492 STREAM_LOG(PR_LOG_DEBUG, ("Adding MediaInputPort %p (from %p to %p) to the graph", |
|
2493 this, mSource, mDest)); |
|
2494 mSource->AddConsumer(this); |
|
2495 mDest->AddInput(this); |
|
2496 // mPortCount decremented via MediaInputPort::Destroy's message |
|
2497 ++mDest->GraphImpl()->mPortCount; |
|
2498 } |
|
2499 |
|
2500 void |
|
2501 MediaInputPort::Disconnect() |
|
2502 { |
|
2503 NS_ASSERTION(!mSource == !mDest, |
|
2504 "mSource must either both be null or both non-null"); |
|
2505 if (!mSource) |
|
2506 return; |
|
2507 |
|
2508 mSource->RemoveConsumer(this); |
|
2509 mSource = nullptr; |
|
2510 mDest->RemoveInput(this); |
|
2511 mDest = nullptr; |
|
2512 |
|
2513 GraphImpl()->SetStreamOrderDirty(); |
|
2514 } |
|
2515 |
|
2516 MediaInputPort::InputInterval |
|
2517 MediaInputPort::GetNextInputInterval(GraphTime aTime) |
|
2518 { |
|
2519 InputInterval result = { GRAPH_TIME_MAX, GRAPH_TIME_MAX, false }; |
|
2520 GraphTime t = aTime; |
|
2521 GraphTime end; |
|
2522 for (;;) { |
|
2523 if (!mDest->mBlocked.GetAt(t, &end)) |
|
2524 break; |
|
2525 if (end == GRAPH_TIME_MAX) |
|
2526 return result; |
|
2527 t = end; |
|
2528 } |
|
2529 result.mStart = t; |
|
2530 GraphTime sourceEnd; |
|
2531 result.mInputIsBlocked = mSource->mBlocked.GetAt(t, &sourceEnd); |
|
2532 result.mEnd = std::min(end, sourceEnd); |
|
2533 return result; |
|
2534 } |
|
2535 |
|
2536 void |
|
2537 MediaInputPort::Destroy() |
|
2538 { |
|
2539 class Message : public ControlMessage { |
|
2540 public: |
|
2541 Message(MediaInputPort* aPort) |
|
2542 : ControlMessage(nullptr), mPort(aPort) {} |
|
2543 virtual void Run() |
|
2544 { |
|
2545 mPort->Disconnect(); |
|
2546 --mPort->GraphImpl()->mPortCount; |
|
2547 mPort->SetGraphImpl(nullptr); |
|
2548 NS_RELEASE(mPort); |
|
2549 } |
|
2550 virtual void RunDuringShutdown() |
|
2551 { |
|
2552 Run(); |
|
2553 } |
|
2554 MediaInputPort* mPort; |
|
2555 }; |
|
2556 GraphImpl()->AppendMessage(new Message(this)); |
|
2557 } |
|
2558 |
|
2559 MediaStreamGraphImpl* |
|
2560 MediaInputPort::GraphImpl() |
|
2561 { |
|
2562 return mGraph; |
|
2563 } |
|
2564 |
|
2565 MediaStreamGraph* |
|
2566 MediaInputPort::Graph() |
|
2567 { |
|
2568 return mGraph; |
|
2569 } |
|
2570 |
|
2571 void |
|
2572 MediaInputPort::SetGraphImpl(MediaStreamGraphImpl* aGraph) |
|
2573 { |
|
2574 MOZ_ASSERT(!mGraph || !aGraph, "Should only be set once"); |
|
2575 mGraph = aGraph; |
|
2576 } |
|
2577 |
|
2578 already_AddRefed<MediaInputPort> |
|
2579 ProcessedMediaStream::AllocateInputPort(MediaStream* aStream, uint32_t aFlags, |
|
2580 uint16_t aInputNumber, uint16_t aOutputNumber) |
|
2581 { |
|
2582 // This method creates two references to the MediaInputPort: one for |
|
2583 // the main thread, and one for the MediaStreamGraph. |
|
2584 class Message : public ControlMessage { |
|
2585 public: |
|
2586 Message(MediaInputPort* aPort) |
|
2587 : ControlMessage(aPort->GetDestination()), |
|
2588 mPort(aPort) {} |
|
2589 virtual void Run() |
|
2590 { |
|
2591 mPort->Init(); |
|
2592 // The graph holds its reference implicitly |
|
2593 mPort->GraphImpl()->SetStreamOrderDirty(); |
|
2594 unused << mPort.forget(); |
|
2595 } |
|
2596 virtual void RunDuringShutdown() |
|
2597 { |
|
2598 Run(); |
|
2599 } |
|
2600 nsRefPtr<MediaInputPort> mPort; |
|
2601 }; |
|
2602 nsRefPtr<MediaInputPort> port = new MediaInputPort(aStream, this, aFlags, |
|
2603 aInputNumber, aOutputNumber); |
|
2604 port->SetGraphImpl(GraphImpl()); |
|
2605 GraphImpl()->AppendMessage(new Message(port)); |
|
2606 return port.forget(); |
|
2607 } |
|
2608 |
|
2609 void |
|
2610 ProcessedMediaStream::Finish() |
|
2611 { |
|
2612 class Message : public ControlMessage { |
|
2613 public: |
|
2614 Message(ProcessedMediaStream* aStream) |
|
2615 : ControlMessage(aStream) {} |
|
2616 virtual void Run() |
|
2617 { |
|
2618 mStream->GraphImpl()->FinishStream(mStream); |
|
2619 } |
|
2620 }; |
|
2621 GraphImpl()->AppendMessage(new Message(this)); |
|
2622 } |
|
2623 |
|
2624 void |
|
2625 ProcessedMediaStream::SetAutofinish(bool aAutofinish) |
|
2626 { |
|
2627 class Message : public ControlMessage { |
|
2628 public: |
|
2629 Message(ProcessedMediaStream* aStream, bool aAutofinish) |
|
2630 : ControlMessage(aStream), mAutofinish(aAutofinish) {} |
|
2631 virtual void Run() |
|
2632 { |
|
2633 static_cast<ProcessedMediaStream*>(mStream)->SetAutofinishImpl(mAutofinish); |
|
2634 } |
|
2635 bool mAutofinish; |
|
2636 }; |
|
2637 GraphImpl()->AppendMessage(new Message(this, aAutofinish)); |
|
2638 } |
|
2639 |
|
2640 void |
|
2641 ProcessedMediaStream::DestroyImpl() |
|
2642 { |
|
2643 for (int32_t i = mInputs.Length() - 1; i >= 0; --i) { |
|
2644 mInputs[i]->Disconnect(); |
|
2645 } |
|
2646 MediaStream::DestroyImpl(); |
|
2647 // The stream order is only important if there are connections, in which |
|
2648 // case MediaInputPort::Disconnect() called SetStreamOrderDirty(). |
|
2649 // MediaStreamGraphImpl::RemoveStream() will also call |
|
2650 // SetStreamOrderDirty(), for other reasons. |
|
2651 } |
|
2652 |
|
2653 /** |
|
2654 * We make the initial mCurrentTime nonzero so that zero times can have |
|
2655 * special meaning if necessary. |
|
2656 */ |
|
2657 static const int32_t INITIAL_CURRENT_TIME = 1; |
|
2658 |
|
2659 MediaStreamGraphImpl::MediaStreamGraphImpl(bool aRealtime, TrackRate aSampleRate) |
|
2660 : mCurrentTime(INITIAL_CURRENT_TIME) |
|
2661 , mStateComputedTime(INITIAL_CURRENT_TIME) |
|
2662 , mProcessingGraphUpdateIndex(0) |
|
2663 , mPortCount(0) |
|
2664 , mMonitor("MediaStreamGraphImpl") |
|
2665 , mLifecycleState(LIFECYCLE_THREAD_NOT_STARTED) |
|
2666 , mWaitState(WAITSTATE_RUNNING) |
|
2667 , mEndTime(GRAPH_TIME_MAX) |
|
2668 , mSampleRate(aSampleRate) |
|
2669 , mNeedAnotherIteration(false) |
|
2670 , mForceShutDown(false) |
|
2671 , mPostedRunInStableStateEvent(false) |
|
2672 , mDetectedNotRunning(false) |
|
2673 , mPostedRunInStableState(false) |
|
2674 , mRealtime(aRealtime) |
|
2675 , mNonRealtimeProcessing(false) |
|
2676 , mStreamOrderDirty(false) |
|
2677 , mLatencyLog(AsyncLatencyLogger::Get()) |
|
2678 , mMixer(nullptr) |
|
2679 , mMemoryReportMonitor("MSGIMemory") |
|
2680 , mSelfRef(MOZ_THIS_IN_INITIALIZER_LIST()) |
|
2681 , mAudioStreamSizes() |
|
2682 , mNeedsMemoryReport(false) |
|
2683 #ifdef DEBUG |
|
2684 , mCanRunMessagesSynchronously(false) |
|
2685 #endif |
|
2686 { |
|
2687 #ifdef PR_LOGGING |
|
2688 if (!gMediaStreamGraphLog) { |
|
2689 gMediaStreamGraphLog = PR_NewLogModule("MediaStreamGraph"); |
|
2690 } |
|
2691 #endif |
|
2692 |
|
2693 mCurrentTimeStamp = mInitialTimeStamp = mLastMainThreadUpdate = TimeStamp::Now(); |
|
2694 |
|
2695 RegisterWeakMemoryReporter(this); |
|
2696 } |
|
2697 |
|
2698 void |
|
2699 MediaStreamGraphImpl::Destroy() |
|
2700 { |
|
2701 // First unregister from memory reporting. |
|
2702 UnregisterWeakMemoryReporter(this); |
|
2703 |
|
2704 // Clear the self reference which will destroy this instance. |
|
2705 mSelfRef = nullptr; |
|
2706 } |
|
2707 |
|
2708 NS_IMPL_ISUPPORTS(MediaStreamGraphShutdownObserver, nsIObserver) |
|
2709 |
|
2710 static bool gShutdownObserverRegistered = false; |
|
2711 |
|
2712 NS_IMETHODIMP |
|
2713 MediaStreamGraphShutdownObserver::Observe(nsISupports *aSubject, |
|
2714 const char *aTopic, |
|
2715 const char16_t *aData) |
|
2716 { |
|
2717 if (strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID) == 0) { |
|
2718 if (gGraph) { |
|
2719 gGraph->ForceShutDown(); |
|
2720 } |
|
2721 nsContentUtils::UnregisterShutdownObserver(this); |
|
2722 gShutdownObserverRegistered = false; |
|
2723 } |
|
2724 return NS_OK; |
|
2725 } |
|
2726 |
|
2727 MediaStreamGraph* |
|
2728 MediaStreamGraph::GetInstance() |
|
2729 { |
|
2730 NS_ASSERTION(NS_IsMainThread(), "Main thread only"); |
|
2731 |
|
2732 if (!gGraph) { |
|
2733 if (!gShutdownObserverRegistered) { |
|
2734 gShutdownObserverRegistered = true; |
|
2735 nsContentUtils::RegisterShutdownObserver(new MediaStreamGraphShutdownObserver()); |
|
2736 } |
|
2737 |
|
2738 AudioStream::InitPreferredSampleRate(); |
|
2739 |
|
2740 gGraph = new MediaStreamGraphImpl(true, AudioStream::PreferredSampleRate()); |
|
2741 |
|
2742 STREAM_LOG(PR_LOG_DEBUG, ("Starting up MediaStreamGraph %p", gGraph)); |
|
2743 } |
|
2744 |
|
2745 return gGraph; |
|
2746 } |
|
2747 |
|
2748 MediaStreamGraph* |
|
2749 MediaStreamGraph::CreateNonRealtimeInstance(TrackRate aSampleRate) |
|
2750 { |
|
2751 NS_ASSERTION(NS_IsMainThread(), "Main thread only"); |
|
2752 |
|
2753 MediaStreamGraphImpl* graph = new MediaStreamGraphImpl(false, aSampleRate); |
|
2754 |
|
2755 return graph; |
|
2756 } |
|
2757 |
|
2758 void |
|
2759 MediaStreamGraph::DestroyNonRealtimeInstance(MediaStreamGraph* aGraph) |
|
2760 { |
|
2761 NS_ASSERTION(NS_IsMainThread(), "Main thread only"); |
|
2762 MOZ_ASSERT(aGraph->IsNonRealtime(), "Should not destroy the global graph here"); |
|
2763 |
|
2764 MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(aGraph); |
|
2765 if (graph->mForceShutDown) |
|
2766 return; // already done |
|
2767 |
|
2768 if (!graph->mNonRealtimeProcessing) { |
|
2769 // Start the graph, but don't produce anything |
|
2770 graph->StartNonRealtimeProcessing(1, 0); |
|
2771 } |
|
2772 graph->ForceShutDown(); |
|
2773 } |
|
2774 |
|
2775 NS_IMPL_ISUPPORTS(MediaStreamGraphImpl, nsIMemoryReporter) |
|
2776 |
|
2777 struct ArrayClearer |
|
2778 { |
|
2779 ArrayClearer(nsTArray<AudioNodeSizes>& aArray) : mArray(aArray) {} |
|
2780 ~ArrayClearer() { mArray.Clear(); } |
|
2781 nsTArray<AudioNodeSizes>& mArray; |
|
2782 }; |
|
2783 |
|
2784 NS_IMETHODIMP |
|
2785 MediaStreamGraphImpl::CollectReports(nsIHandleReportCallback* aHandleReport, |
|
2786 nsISupports* aData) |
|
2787 { |
|
2788 // Clears out the report array after we're done with it. |
|
2789 ArrayClearer reportCleanup(mAudioStreamSizes); |
|
2790 |
|
2791 { |
|
2792 MonitorAutoLock memoryReportLock(mMemoryReportMonitor); |
|
2793 mNeedsMemoryReport = true; |
|
2794 |
|
2795 { |
|
2796 // Wake up the MSG thread. |
|
2797 MonitorAutoLock monitorLock(mMonitor); |
|
2798 EnsureImmediateWakeUpLocked(monitorLock); |
|
2799 } |
|
2800 |
|
2801 // Wait for the report to complete. |
|
2802 nsresult rv; |
|
2803 while ((rv = memoryReportLock.Wait()) != NS_OK) { |
|
2804 if (PR_GetError() != PR_PENDING_INTERRUPT_ERROR) { |
|
2805 return rv; |
|
2806 } |
|
2807 } |
|
2808 } |
|
2809 |
|
2810 #define REPORT(_path, _amount, _desc) \ |
|
2811 do { \ |
|
2812 nsresult rv; \ |
|
2813 rv = aHandleReport->Callback(EmptyCString(), _path, \ |
|
2814 KIND_HEAP, UNITS_BYTES, _amount, \ |
|
2815 NS_LITERAL_CSTRING(_desc), aData); \ |
|
2816 NS_ENSURE_SUCCESS(rv, rv); \ |
|
2817 } while (0) |
|
2818 |
|
2819 for (size_t i = 0; i < mAudioStreamSizes.Length(); i++) { |
|
2820 const AudioNodeSizes& usage = mAudioStreamSizes[i]; |
|
2821 const char* const nodeType = usage.mNodeType.get(); |
|
2822 |
|
2823 nsPrintfCString domNodePath("explicit/webaudio/audio-node/%s/dom-nodes", |
|
2824 nodeType); |
|
2825 REPORT(domNodePath, usage.mDomNode, |
|
2826 "Memory used by AudioNode DOM objects (Web Audio)."); |
|
2827 |
|
2828 nsPrintfCString enginePath("explicit/webaudio/audio-node/%s/engine-objects", |
|
2829 nodeType); |
|
2830 REPORT(enginePath, usage.mEngine, |
|
2831 "Memory used by AudioNode engine objects (Web Audio)."); |
|
2832 |
|
2833 nsPrintfCString streamPath("explicit/webaudio/audio-node/%s/stream-objects", |
|
2834 nodeType); |
|
2835 REPORT(streamPath, usage.mStream, |
|
2836 "Memory used by AudioNode stream objects (Web Audio)."); |
|
2837 |
|
2838 } |
|
2839 |
|
2840 #undef REPORT |
|
2841 |
|
2842 return NS_OK; |
|
2843 } |
|
2844 |
|
2845 SourceMediaStream* |
|
2846 MediaStreamGraph::CreateSourceStream(DOMMediaStream* aWrapper) |
|
2847 { |
|
2848 SourceMediaStream* stream = new SourceMediaStream(aWrapper); |
|
2849 NS_ADDREF(stream); |
|
2850 MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this); |
|
2851 stream->SetGraphImpl(graph); |
|
2852 graph->AppendMessage(new CreateMessage(stream)); |
|
2853 return stream; |
|
2854 } |
|
2855 |
|
2856 ProcessedMediaStream* |
|
2857 MediaStreamGraph::CreateTrackUnionStream(DOMMediaStream* aWrapper) |
|
2858 { |
|
2859 TrackUnionStream* stream = new TrackUnionStream(aWrapper); |
|
2860 NS_ADDREF(stream); |
|
2861 MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this); |
|
2862 stream->SetGraphImpl(graph); |
|
2863 graph->AppendMessage(new CreateMessage(stream)); |
|
2864 return stream; |
|
2865 } |
|
2866 |
|
2867 AudioNodeExternalInputStream* |
|
2868 MediaStreamGraph::CreateAudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate) |
|
2869 { |
|
2870 MOZ_ASSERT(NS_IsMainThread()); |
|
2871 if (!aSampleRate) { |
|
2872 aSampleRate = aEngine->NodeMainThread()->Context()->SampleRate(); |
|
2873 } |
|
2874 AudioNodeExternalInputStream* stream = new AudioNodeExternalInputStream(aEngine, aSampleRate); |
|
2875 NS_ADDREF(stream); |
|
2876 MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this); |
|
2877 stream->SetGraphImpl(graph); |
|
2878 graph->AppendMessage(new CreateMessage(stream)); |
|
2879 return stream; |
|
2880 } |
|
2881 |
|
2882 AudioNodeStream* |
|
2883 MediaStreamGraph::CreateAudioNodeStream(AudioNodeEngine* aEngine, |
|
2884 AudioNodeStreamKind aKind, |
|
2885 TrackRate aSampleRate) |
|
2886 { |
|
2887 MOZ_ASSERT(NS_IsMainThread()); |
|
2888 if (!aSampleRate) { |
|
2889 aSampleRate = aEngine->NodeMainThread()->Context()->SampleRate(); |
|
2890 } |
|
2891 AudioNodeStream* stream = new AudioNodeStream(aEngine, aKind, aSampleRate); |
|
2892 NS_ADDREF(stream); |
|
2893 MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this); |
|
2894 stream->SetGraphImpl(graph); |
|
2895 if (aEngine->HasNode()) { |
|
2896 stream->SetChannelMixingParametersImpl(aEngine->NodeMainThread()->ChannelCount(), |
|
2897 aEngine->NodeMainThread()->ChannelCountModeValue(), |
|
2898 aEngine->NodeMainThread()->ChannelInterpretationValue()); |
|
2899 } |
|
2900 graph->AppendMessage(new CreateMessage(stream)); |
|
2901 return stream; |
|
2902 } |
|
2903 |
|
2904 bool |
|
2905 MediaStreamGraph::IsNonRealtime() const |
|
2906 { |
|
2907 return this != gGraph; |
|
2908 } |
|
2909 |
|
2910 void |
|
2911 MediaStreamGraph::StartNonRealtimeProcessing(TrackRate aRate, uint32_t aTicksToProcess) |
|
2912 { |
|
2913 NS_ASSERTION(NS_IsMainThread(), "main thread only"); |
|
2914 |
|
2915 MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this); |
|
2916 NS_ASSERTION(!graph->mRealtime, "non-realtime only"); |
|
2917 |
|
2918 if (graph->mNonRealtimeProcessing) |
|
2919 return; |
|
2920 graph->mEndTime = graph->mCurrentTime + TicksToTimeRoundUp(aRate, aTicksToProcess); |
|
2921 graph->mNonRealtimeProcessing = true; |
|
2922 graph->EnsureRunInStableState(); |
|
2923 } |
|
2924 |
|
2925 void |
|
2926 ProcessedMediaStream::AddInput(MediaInputPort* aPort) |
|
2927 { |
|
2928 mInputs.AppendElement(aPort); |
|
2929 GraphImpl()->SetStreamOrderDirty(); |
|
2930 } |
|
2931 |
|
2932 } |