1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/content/media/webaudio/AudioContext.cpp Wed Dec 31 06:09:35 2014 +0100 1.3 @@ -0,0 +1,693 @@ 1.4 +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ 1.5 +/* vim:set ts=2 sw=2 sts=2 et cindent: */ 1.6 +/* This Source Code Form is subject to the terms of the Mozilla Public 1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this 1.8 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 1.9 + 1.10 +#include "AudioContext.h" 1.11 + 1.12 +#include "nsPIDOMWindow.h" 1.13 +#include "mozilla/ErrorResult.h" 1.14 +#include "mozilla/dom/AnalyserNode.h" 1.15 +#include "mozilla/dom/AudioContextBinding.h" 1.16 +#include "mozilla/dom/HTMLMediaElement.h" 1.17 +#include "mozilla/dom/OfflineAudioContextBinding.h" 1.18 +#include "mozilla/dom/OwningNonNull.h" 1.19 +#include "MediaStreamGraph.h" 1.20 +#include "AudioDestinationNode.h" 1.21 +#include "AudioBufferSourceNode.h" 1.22 +#include "AudioBuffer.h" 1.23 +#include "GainNode.h" 1.24 +#include "MediaElementAudioSourceNode.h" 1.25 +#include "MediaStreamAudioSourceNode.h" 1.26 +#include "DelayNode.h" 1.27 +#include "PannerNode.h" 1.28 +#include "AudioListener.h" 1.29 +#include "DynamicsCompressorNode.h" 1.30 +#include "BiquadFilterNode.h" 1.31 +#include "ScriptProcessorNode.h" 1.32 +#include "ChannelMergerNode.h" 1.33 +#include "ChannelSplitterNode.h" 1.34 +#include "MediaStreamAudioDestinationNode.h" 1.35 +#include "WaveShaperNode.h" 1.36 +#include "PeriodicWave.h" 1.37 +#include "ConvolverNode.h" 1.38 +#include "OscillatorNode.h" 1.39 +#include "nsNetUtil.h" 1.40 +#include "AudioStream.h" 1.41 + 1.42 +namespace mozilla { 1.43 +namespace dom { 1.44 + 1.45 +NS_IMPL_CYCLE_COLLECTION_CLASS(AudioContext) 1.46 + 1.47 +NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(AudioContext) 1.48 + NS_IMPL_CYCLE_COLLECTION_UNLINK(mDestination) 1.49 + NS_IMPL_CYCLE_COLLECTION_UNLINK(mListener) 1.50 + if (!tmp->mIsStarted) { 1.51 + NS_IMPL_CYCLE_COLLECTION_UNLINK(mActiveNodes) 1.52 + } 1.53 +NS_IMPL_CYCLE_COLLECTION_UNLINK_END_INHERITED(DOMEventTargetHelper) 1.54 + 1.55 +NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(AudioContext, 1.56 + DOMEventTargetHelper) 1.57 + NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mDestination) 1.58 + NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mListener) 1.59 + if (!tmp->mIsStarted) { 1.60 + MOZ_ASSERT(tmp->mIsOffline, 1.61 + "Online AudioContexts should always be started"); 1.62 + NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mActiveNodes) 1.63 + } 1.64 +NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END 1.65 + 1.66 +NS_IMPL_ADDREF_INHERITED(AudioContext, DOMEventTargetHelper) 1.67 +NS_IMPL_RELEASE_INHERITED(AudioContext, DOMEventTargetHelper) 1.68 +NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(AudioContext) 1.69 +NS_INTERFACE_MAP_END_INHERITING(DOMEventTargetHelper) 1.70 + 1.71 +static float GetSampleRateForAudioContext(bool aIsOffline, float aSampleRate) 1.72 +{ 1.73 + if (aIsOffline) { 1.74 + return aSampleRate; 1.75 + } else { 1.76 + AudioStream::InitPreferredSampleRate(); 1.77 + return static_cast<float>(AudioStream::PreferredSampleRate()); 1.78 + } 1.79 +} 1.80 + 1.81 +AudioContext::AudioContext(nsPIDOMWindow* aWindow, 1.82 + bool aIsOffline, 1.83 + AudioChannel aChannel, 1.84 + uint32_t aNumberOfChannels, 1.85 + uint32_t aLength, 1.86 + float aSampleRate) 1.87 + : DOMEventTargetHelper(aWindow) 1.88 + , mSampleRate(GetSampleRateForAudioContext(aIsOffline, aSampleRate)) 1.89 + , mNumberOfChannels(aNumberOfChannels) 1.90 + , mNodeCount(0) 1.91 + , mIsOffline(aIsOffline) 1.92 + , mIsStarted(!aIsOffline) 1.93 + , mIsShutDown(false) 1.94 +{ 1.95 + aWindow->AddAudioContext(this); 1.96 + 1.97 + // Note: AudioDestinationNode needs an AudioContext that must already be 1.98 + // bound to the window. 1.99 + mDestination = new AudioDestinationNode(this, aIsOffline, aChannel, 1.100 + aNumberOfChannels, aLength, aSampleRate); 1.101 + // We skip calling SetIsOnlyNodeForContext during mDestination's constructor, 1.102 + // because we can only call SetIsOnlyNodeForContext after mDestination has 1.103 + // been set up. 1.104 + mDestination->SetIsOnlyNodeForContext(true); 1.105 +} 1.106 + 1.107 +AudioContext::~AudioContext() 1.108 +{ 1.109 + nsPIDOMWindow* window = GetOwner(); 1.110 + if (window) { 1.111 + window->RemoveAudioContext(this); 1.112 + } 1.113 + 1.114 + UnregisterWeakMemoryReporter(this); 1.115 +} 1.116 + 1.117 +JSObject* 1.118 +AudioContext::WrapObject(JSContext* aCx) 1.119 +{ 1.120 + if (mIsOffline) { 1.121 + return OfflineAudioContextBinding::Wrap(aCx, this); 1.122 + } else { 1.123 + return AudioContextBinding::Wrap(aCx, this); 1.124 + } 1.125 +} 1.126 + 1.127 +/* static */ already_AddRefed<AudioContext> 1.128 +AudioContext::Constructor(const GlobalObject& aGlobal, 1.129 + ErrorResult& aRv) 1.130 +{ 1.131 + nsCOMPtr<nsPIDOMWindow> window = do_QueryInterface(aGlobal.GetAsSupports()); 1.132 + if (!window) { 1.133 + aRv.Throw(NS_ERROR_FAILURE); 1.134 + return nullptr; 1.135 + } 1.136 + 1.137 + nsRefPtr<AudioContext> object = new AudioContext(window, false); 1.138 + 1.139 + RegisterWeakMemoryReporter(object); 1.140 + 1.141 + return object.forget(); 1.142 +} 1.143 + 1.144 +/* static */ already_AddRefed<AudioContext> 1.145 +AudioContext::Constructor(const GlobalObject& aGlobal, 1.146 + AudioChannel aChannel, 1.147 + ErrorResult& aRv) 1.148 +{ 1.149 + nsCOMPtr<nsPIDOMWindow> window = do_QueryInterface(aGlobal.GetAsSupports()); 1.150 + if (!window) { 1.151 + aRv.Throw(NS_ERROR_FAILURE); 1.152 + return nullptr; 1.153 + } 1.154 + 1.155 + nsRefPtr<AudioContext> object = new AudioContext(window, false, aChannel); 1.156 + 1.157 + RegisterWeakMemoryReporter(object); 1.158 + 1.159 + return object.forget(); 1.160 +} 1.161 + 1.162 +/* static */ already_AddRefed<AudioContext> 1.163 +AudioContext::Constructor(const GlobalObject& aGlobal, 1.164 + uint32_t aNumberOfChannels, 1.165 + uint32_t aLength, 1.166 + float aSampleRate, 1.167 + ErrorResult& aRv) 1.168 +{ 1.169 + nsCOMPtr<nsPIDOMWindow> window = do_QueryInterface(aGlobal.GetAsSupports()); 1.170 + if (!window) { 1.171 + aRv.Throw(NS_ERROR_FAILURE); 1.172 + return nullptr; 1.173 + } 1.174 + 1.175 + if (aNumberOfChannels == 0 || 1.176 + aNumberOfChannels > WebAudioUtils::MaxChannelCount || 1.177 + aLength == 0 || 1.178 + aSampleRate < WebAudioUtils::MinSampleRate || 1.179 + aSampleRate > WebAudioUtils::MaxSampleRate) { 1.180 + // The DOM binding protects us against infinity and NaN 1.181 + aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR); 1.182 + return nullptr; 1.183 + } 1.184 + 1.185 + nsRefPtr<AudioContext> object = new AudioContext(window, 1.186 + true, 1.187 + AudioChannel::Normal, 1.188 + aNumberOfChannels, 1.189 + aLength, 1.190 + aSampleRate); 1.191 + 1.192 + RegisterWeakMemoryReporter(object); 1.193 + 1.194 + return object.forget(); 1.195 +} 1.196 + 1.197 +already_AddRefed<AudioBufferSourceNode> 1.198 +AudioContext::CreateBufferSource() 1.199 +{ 1.200 + nsRefPtr<AudioBufferSourceNode> bufferNode = 1.201 + new AudioBufferSourceNode(this); 1.202 + return bufferNode.forget(); 1.203 +} 1.204 + 1.205 +already_AddRefed<AudioBuffer> 1.206 +AudioContext::CreateBuffer(JSContext* aJSContext, uint32_t aNumberOfChannels, 1.207 + uint32_t aLength, float aSampleRate, 1.208 + ErrorResult& aRv) 1.209 +{ 1.210 + if (!aNumberOfChannels) { 1.211 + aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR); 1.212 + return nullptr; 1.213 + } 1.214 + 1.215 + return AudioBuffer::Create(this, aNumberOfChannels, aLength, 1.216 + aSampleRate, aJSContext, aRv); 1.217 +} 1.218 + 1.219 +namespace { 1.220 + 1.221 +bool IsValidBufferSize(uint32_t aBufferSize) { 1.222 + switch (aBufferSize) { 1.223 + case 0: // let the implementation choose the buffer size 1.224 + case 256: 1.225 + case 512: 1.226 + case 1024: 1.227 + case 2048: 1.228 + case 4096: 1.229 + case 8192: 1.230 + case 16384: 1.231 + return true; 1.232 + default: 1.233 + return false; 1.234 + } 1.235 +} 1.236 + 1.237 +} 1.238 + 1.239 +already_AddRefed<MediaStreamAudioDestinationNode> 1.240 +AudioContext::CreateMediaStreamDestination(ErrorResult& aRv) 1.241 +{ 1.242 + if (mIsOffline) { 1.243 + aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR); 1.244 + return nullptr; 1.245 + } 1.246 + 1.247 + nsRefPtr<MediaStreamAudioDestinationNode> node = 1.248 + new MediaStreamAudioDestinationNode(this); 1.249 + return node.forget(); 1.250 +} 1.251 + 1.252 +already_AddRefed<ScriptProcessorNode> 1.253 +AudioContext::CreateScriptProcessor(uint32_t aBufferSize, 1.254 + uint32_t aNumberOfInputChannels, 1.255 + uint32_t aNumberOfOutputChannels, 1.256 + ErrorResult& aRv) 1.257 +{ 1.258 + if ((aNumberOfInputChannels == 0 && aNumberOfOutputChannels == 0) || 1.259 + aNumberOfInputChannels > WebAudioUtils::MaxChannelCount || 1.260 + aNumberOfOutputChannels > WebAudioUtils::MaxChannelCount || 1.261 + !IsValidBufferSize(aBufferSize)) { 1.262 + aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR); 1.263 + return nullptr; 1.264 + } 1.265 + 1.266 + nsRefPtr<ScriptProcessorNode> scriptProcessor = 1.267 + new ScriptProcessorNode(this, aBufferSize, aNumberOfInputChannels, 1.268 + aNumberOfOutputChannels); 1.269 + return scriptProcessor.forget(); 1.270 +} 1.271 + 1.272 +already_AddRefed<AnalyserNode> 1.273 +AudioContext::CreateAnalyser() 1.274 +{ 1.275 + nsRefPtr<AnalyserNode> analyserNode = new AnalyserNode(this); 1.276 + return analyserNode.forget(); 1.277 +} 1.278 + 1.279 +already_AddRefed<MediaElementAudioSourceNode> 1.280 +AudioContext::CreateMediaElementSource(HTMLMediaElement& aMediaElement, 1.281 + ErrorResult& aRv) 1.282 +{ 1.283 + if (mIsOffline) { 1.284 + aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR); 1.285 + return nullptr; 1.286 + } 1.287 + nsRefPtr<DOMMediaStream> stream = aMediaElement.MozCaptureStream(aRv); 1.288 + if (aRv.Failed()) { 1.289 + return nullptr; 1.290 + } 1.291 + nsRefPtr<MediaElementAudioSourceNode> mediaElementAudioSourceNode = 1.292 + new MediaElementAudioSourceNode(this, stream); 1.293 + return mediaElementAudioSourceNode.forget(); 1.294 +} 1.295 + 1.296 +already_AddRefed<MediaStreamAudioSourceNode> 1.297 +AudioContext::CreateMediaStreamSource(DOMMediaStream& aMediaStream, 1.298 + ErrorResult& aRv) 1.299 +{ 1.300 + if (mIsOffline) { 1.301 + aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR); 1.302 + return nullptr; 1.303 + } 1.304 + nsRefPtr<MediaStreamAudioSourceNode> mediaStreamAudioSourceNode = 1.305 + new MediaStreamAudioSourceNode(this, &aMediaStream); 1.306 + return mediaStreamAudioSourceNode.forget(); 1.307 +} 1.308 + 1.309 +already_AddRefed<GainNode> 1.310 +AudioContext::CreateGain() 1.311 +{ 1.312 + nsRefPtr<GainNode> gainNode = new GainNode(this); 1.313 + return gainNode.forget(); 1.314 +} 1.315 + 1.316 +already_AddRefed<WaveShaperNode> 1.317 +AudioContext::CreateWaveShaper() 1.318 +{ 1.319 + nsRefPtr<WaveShaperNode> waveShaperNode = new WaveShaperNode(this); 1.320 + return waveShaperNode.forget(); 1.321 +} 1.322 + 1.323 +already_AddRefed<DelayNode> 1.324 +AudioContext::CreateDelay(double aMaxDelayTime, ErrorResult& aRv) 1.325 +{ 1.326 + if (aMaxDelayTime > 0. && aMaxDelayTime < 180.) { 1.327 + nsRefPtr<DelayNode> delayNode = new DelayNode(this, aMaxDelayTime); 1.328 + return delayNode.forget(); 1.329 + } 1.330 + aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR); 1.331 + return nullptr; 1.332 +} 1.333 + 1.334 +already_AddRefed<PannerNode> 1.335 +AudioContext::CreatePanner() 1.336 +{ 1.337 + nsRefPtr<PannerNode> pannerNode = new PannerNode(this); 1.338 + mPannerNodes.PutEntry(pannerNode); 1.339 + return pannerNode.forget(); 1.340 +} 1.341 + 1.342 +already_AddRefed<ConvolverNode> 1.343 +AudioContext::CreateConvolver() 1.344 +{ 1.345 + nsRefPtr<ConvolverNode> convolverNode = new ConvolverNode(this); 1.346 + return convolverNode.forget(); 1.347 +} 1.348 + 1.349 +already_AddRefed<ChannelSplitterNode> 1.350 +AudioContext::CreateChannelSplitter(uint32_t aNumberOfOutputs, ErrorResult& aRv) 1.351 +{ 1.352 + if (aNumberOfOutputs == 0 || 1.353 + aNumberOfOutputs > WebAudioUtils::MaxChannelCount) { 1.354 + aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR); 1.355 + return nullptr; 1.356 + } 1.357 + 1.358 + nsRefPtr<ChannelSplitterNode> splitterNode = 1.359 + new ChannelSplitterNode(this, aNumberOfOutputs); 1.360 + return splitterNode.forget(); 1.361 +} 1.362 + 1.363 +already_AddRefed<ChannelMergerNode> 1.364 +AudioContext::CreateChannelMerger(uint32_t aNumberOfInputs, ErrorResult& aRv) 1.365 +{ 1.366 + if (aNumberOfInputs == 0 || 1.367 + aNumberOfInputs > WebAudioUtils::MaxChannelCount) { 1.368 + aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR); 1.369 + return nullptr; 1.370 + } 1.371 + 1.372 + nsRefPtr<ChannelMergerNode> mergerNode = 1.373 + new ChannelMergerNode(this, aNumberOfInputs); 1.374 + return mergerNode.forget(); 1.375 +} 1.376 + 1.377 +already_AddRefed<DynamicsCompressorNode> 1.378 +AudioContext::CreateDynamicsCompressor() 1.379 +{ 1.380 + nsRefPtr<DynamicsCompressorNode> compressorNode = 1.381 + new DynamicsCompressorNode(this); 1.382 + return compressorNode.forget(); 1.383 +} 1.384 + 1.385 +already_AddRefed<BiquadFilterNode> 1.386 +AudioContext::CreateBiquadFilter() 1.387 +{ 1.388 + nsRefPtr<BiquadFilterNode> filterNode = 1.389 + new BiquadFilterNode(this); 1.390 + return filterNode.forget(); 1.391 +} 1.392 + 1.393 +already_AddRefed<OscillatorNode> 1.394 +AudioContext::CreateOscillator() 1.395 +{ 1.396 + nsRefPtr<OscillatorNode> oscillatorNode = 1.397 + new OscillatorNode(this); 1.398 + return oscillatorNode.forget(); 1.399 +} 1.400 + 1.401 +already_AddRefed<PeriodicWave> 1.402 +AudioContext::CreatePeriodicWave(const Float32Array& aRealData, 1.403 + const Float32Array& aImagData, 1.404 + ErrorResult& aRv) 1.405 +{ 1.406 + aRealData.ComputeLengthAndData(); 1.407 + aImagData.ComputeLengthAndData(); 1.408 + 1.409 + if (aRealData.Length() != aImagData.Length() || 1.410 + aRealData.Length() == 0 || 1.411 + aRealData.Length() > 4096) { 1.412 + aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR); 1.413 + return nullptr; 1.414 + } 1.415 + 1.416 + nsRefPtr<PeriodicWave> periodicWave = 1.417 + new PeriodicWave(this, aRealData.Data(), aImagData.Data(), 1.418 + aImagData.Length(), aRv); 1.419 + if (aRv.Failed()) { 1.420 + return nullptr; 1.421 + } 1.422 + return periodicWave.forget(); 1.423 +} 1.424 + 1.425 +AudioListener* 1.426 +AudioContext::Listener() 1.427 +{ 1.428 + if (!mListener) { 1.429 + mListener = new AudioListener(this); 1.430 + } 1.431 + return mListener; 1.432 +} 1.433 + 1.434 +void 1.435 +AudioContext::DecodeAudioData(const ArrayBuffer& aBuffer, 1.436 + DecodeSuccessCallback& aSuccessCallback, 1.437 + const Optional<OwningNonNull<DecodeErrorCallback> >& aFailureCallback) 1.438 +{ 1.439 + AutoJSAPI jsapi; 1.440 + JSContext* cx = jsapi.cx(); 1.441 + JSAutoCompartment ac(cx, aBuffer.Obj()); 1.442 + 1.443 + aBuffer.ComputeLengthAndData(); 1.444 + 1.445 + // Neuter the array buffer 1.446 + size_t length = aBuffer.Length(); 1.447 + JS::RootedObject obj(cx, aBuffer.Obj()); 1.448 + 1.449 + uint8_t* data = static_cast<uint8_t*>(JS_StealArrayBufferContents(cx, obj)); 1.450 + 1.451 + // Sniff the content of the media. 1.452 + // Failed type sniffing will be handled by AsyncDecodeMedia. 1.453 + nsAutoCString contentType; 1.454 + NS_SniffContent(NS_DATA_SNIFFER_CATEGORY, nullptr, data, length, contentType); 1.455 + 1.456 + nsRefPtr<DecodeErrorCallback> failureCallback; 1.457 + if (aFailureCallback.WasPassed()) { 1.458 + failureCallback = &aFailureCallback.Value(); 1.459 + } 1.460 + nsRefPtr<WebAudioDecodeJob> job( 1.461 + new WebAudioDecodeJob(contentType, this, 1.462 + &aSuccessCallback, failureCallback)); 1.463 + mDecoder.AsyncDecodeMedia(contentType.get(), data, length, *job); 1.464 + // Transfer the ownership to mDecodeJobs 1.465 + mDecodeJobs.AppendElement(job); 1.466 +} 1.467 + 1.468 +void 1.469 +AudioContext::RemoveFromDecodeQueue(WebAudioDecodeJob* aDecodeJob) 1.470 +{ 1.471 + mDecodeJobs.RemoveElement(aDecodeJob); 1.472 +} 1.473 + 1.474 +void 1.475 +AudioContext::RegisterActiveNode(AudioNode* aNode) 1.476 +{ 1.477 + if (!mIsShutDown) { 1.478 + mActiveNodes.PutEntry(aNode); 1.479 + } 1.480 +} 1.481 + 1.482 +void 1.483 +AudioContext::UnregisterActiveNode(AudioNode* aNode) 1.484 +{ 1.485 + mActiveNodes.RemoveEntry(aNode); 1.486 +} 1.487 + 1.488 +void 1.489 +AudioContext::UnregisterAudioBufferSourceNode(AudioBufferSourceNode* aNode) 1.490 +{ 1.491 + UpdatePannerSource(); 1.492 +} 1.493 + 1.494 +void 1.495 +AudioContext::UnregisterPannerNode(PannerNode* aNode) 1.496 +{ 1.497 + mPannerNodes.RemoveEntry(aNode); 1.498 + if (mListener) { 1.499 + mListener->UnregisterPannerNode(aNode); 1.500 + } 1.501 +} 1.502 + 1.503 +static PLDHashOperator 1.504 +FindConnectedSourcesOn(nsPtrHashKey<PannerNode>* aEntry, void* aData) 1.505 +{ 1.506 + aEntry->GetKey()->FindConnectedSources(); 1.507 + return PL_DHASH_NEXT; 1.508 +} 1.509 + 1.510 +void 1.511 +AudioContext::UpdatePannerSource() 1.512 +{ 1.513 + mPannerNodes.EnumerateEntries(FindConnectedSourcesOn, nullptr); 1.514 +} 1.515 + 1.516 +uint32_t 1.517 +AudioContext::MaxChannelCount() const 1.518 +{ 1.519 + return mIsOffline ? mNumberOfChannels : AudioStream::MaxNumberOfChannels(); 1.520 +} 1.521 + 1.522 +MediaStreamGraph* 1.523 +AudioContext::Graph() const 1.524 +{ 1.525 + return Destination()->Stream()->Graph(); 1.526 +} 1.527 + 1.528 +MediaStream* 1.529 +AudioContext::DestinationStream() const 1.530 +{ 1.531 + if (Destination()) { 1.532 + return Destination()->Stream(); 1.533 + } 1.534 + return nullptr; 1.535 +} 1.536 + 1.537 +double 1.538 +AudioContext::CurrentTime() const 1.539 +{ 1.540 + return MediaTimeToSeconds(Destination()->Stream()->GetCurrentTime()) + 1.541 + ExtraCurrentTime(); 1.542 +} 1.543 + 1.544 +void 1.545 +AudioContext::Shutdown() 1.546 +{ 1.547 + mIsShutDown = true; 1.548 + 1.549 + // We mute rather than suspending, because the delay between the ::Shutdown 1.550 + // call and the CC would make us overbuffer in the MediaStreamGraph. 1.551 + // See bug 936784 for details. 1.552 + if (!mIsOffline) { 1.553 + Mute(); 1.554 + } 1.555 + 1.556 + mDecoder.Shutdown(); 1.557 + 1.558 + // Release references to active nodes. 1.559 + // Active AudioNodes don't unregister in destructors, at which point the 1.560 + // Node is already unregistered. 1.561 + mActiveNodes.Clear(); 1.562 + 1.563 + // For offline contexts, we can destroy the MediaStreamGraph at this point. 1.564 + if (mIsOffline && mDestination) { 1.565 + mDestination->OfflineShutdown(); 1.566 + } 1.567 +} 1.568 + 1.569 +void 1.570 +AudioContext::Suspend() 1.571 +{ 1.572 + MediaStream* ds = DestinationStream(); 1.573 + if (ds) { 1.574 + ds->ChangeExplicitBlockerCount(1); 1.575 + } 1.576 +} 1.577 + 1.578 +void 1.579 +AudioContext::Resume() 1.580 +{ 1.581 + MediaStream* ds = DestinationStream(); 1.582 + if (ds) { 1.583 + ds->ChangeExplicitBlockerCount(-1); 1.584 + } 1.585 +} 1.586 + 1.587 +void 1.588 +AudioContext::UpdateNodeCount(int32_t aDelta) 1.589 +{ 1.590 + bool firstNode = mNodeCount == 0; 1.591 + mNodeCount += aDelta; 1.592 + MOZ_ASSERT(mNodeCount >= 0); 1.593 + // mDestinationNode may be null when we're destroying nodes unlinked by CC 1.594 + if (!firstNode && mDestination) { 1.595 + mDestination->SetIsOnlyNodeForContext(mNodeCount == 1); 1.596 + } 1.597 +} 1.598 + 1.599 +JSContext* 1.600 +AudioContext::GetJSContext() const 1.601 +{ 1.602 + MOZ_ASSERT(NS_IsMainThread()); 1.603 + 1.604 + nsCOMPtr<nsIScriptGlobalObject> scriptGlobal = 1.605 + do_QueryInterface(GetParentObject()); 1.606 + if (!scriptGlobal) { 1.607 + return nullptr; 1.608 + } 1.609 + nsIScriptContext* scriptContext = scriptGlobal->GetContext(); 1.610 + if (!scriptContext) { 1.611 + return nullptr; 1.612 + } 1.613 + return scriptContext->GetNativeContext(); 1.614 +} 1.615 + 1.616 +void 1.617 +AudioContext::StartRendering(ErrorResult& aRv) 1.618 +{ 1.619 + MOZ_ASSERT(mIsOffline, "This should only be called on OfflineAudioContext"); 1.620 + if (mIsStarted) { 1.621 + aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR); 1.622 + return; 1.623 + } 1.624 + 1.625 + mIsStarted = true; 1.626 + mDestination->StartRendering(); 1.627 +} 1.628 + 1.629 +void 1.630 +AudioContext::Mute() const 1.631 +{ 1.632 + MOZ_ASSERT(!mIsOffline); 1.633 + if (mDestination) { 1.634 + mDestination->Mute(); 1.635 + } 1.636 +} 1.637 + 1.638 +void 1.639 +AudioContext::Unmute() const 1.640 +{ 1.641 + MOZ_ASSERT(!mIsOffline); 1.642 + if (mDestination) { 1.643 + mDestination->Unmute(); 1.644 + } 1.645 +} 1.646 + 1.647 +AudioChannel 1.648 +AudioContext::MozAudioChannelType() const 1.649 +{ 1.650 + return mDestination->MozAudioChannelType(); 1.651 +} 1.652 + 1.653 +void 1.654 +AudioContext::SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv) 1.655 +{ 1.656 + mDestination->SetMozAudioChannelType(aValue, aRv); 1.657 +} 1.658 + 1.659 +size_t 1.660 +AudioContext::SizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) const 1.661 +{ 1.662 + // AudioNodes are tracked separately because we do not want the AudioContext 1.663 + // to track all of the AudioNodes it creates, so we wouldn't be able to 1.664 + // traverse them from here. 1.665 + 1.666 + size_t amount = aMallocSizeOf(this); 1.667 + if (mListener) { 1.668 + amount += mListener->SizeOfIncludingThis(aMallocSizeOf); 1.669 + } 1.670 + amount += mDecoder.SizeOfExcludingThis(aMallocSizeOf); 1.671 + amount += mDecodeJobs.SizeOfExcludingThis(aMallocSizeOf); 1.672 + for (uint32_t i = 0; i < mDecodeJobs.Length(); ++i) { 1.673 + amount += mDecodeJobs[i]->SizeOfExcludingThis(aMallocSizeOf); 1.674 + } 1.675 + amount += mActiveNodes.SizeOfExcludingThis(nullptr, aMallocSizeOf); 1.676 + amount += mPannerNodes.SizeOfExcludingThis(nullptr, aMallocSizeOf); 1.677 + return amount; 1.678 +} 1.679 + 1.680 +NS_IMETHODIMP 1.681 +AudioContext::CollectReports(nsIHandleReportCallback* aHandleReport, 1.682 + nsISupports* aData) 1.683 +{ 1.684 + int64_t amount = SizeOfIncludingThis(MallocSizeOf); 1.685 + return MOZ_COLLECT_REPORT("explicit/webaudio/audiocontext", KIND_HEAP, UNITS_BYTES, 1.686 + amount, "Memory used by AudioContext objects (Web Audio)."); 1.687 +} 1.688 + 1.689 +double 1.690 +AudioContext::ExtraCurrentTime() const 1.691 +{ 1.692 + return mDestination->ExtraCurrentTime(); 1.693 +} 1.694 + 1.695 +} 1.696 +}