Tue, 06 Jan 2015 21:39:09 +0100
Conditionally force memory storage according to privacy.thirdparty.isolate;
This solves Tor bug #9701, complying with disk avoidance documented in
https://www.torproject.org/projects/torbrowser/design/#disk-avoidance.
michael@0 | 1 | /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ |
michael@0 | 2 | /* vim:set ts=2 sw=2 sts=2 et cindent: */ |
michael@0 | 3 | /* This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this |
michael@0 | 5 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 6 | |
michael@0 | 7 | #include "AudioContext.h" |
michael@0 | 8 | |
michael@0 | 9 | #include "nsPIDOMWindow.h" |
michael@0 | 10 | #include "mozilla/ErrorResult.h" |
michael@0 | 11 | #include "mozilla/dom/AnalyserNode.h" |
michael@0 | 12 | #include "mozilla/dom/AudioContextBinding.h" |
michael@0 | 13 | #include "mozilla/dom/HTMLMediaElement.h" |
michael@0 | 14 | #include "mozilla/dom/OfflineAudioContextBinding.h" |
michael@0 | 15 | #include "mozilla/dom/OwningNonNull.h" |
michael@0 | 16 | #include "MediaStreamGraph.h" |
michael@0 | 17 | #include "AudioDestinationNode.h" |
michael@0 | 18 | #include "AudioBufferSourceNode.h" |
michael@0 | 19 | #include "AudioBuffer.h" |
michael@0 | 20 | #include "GainNode.h" |
michael@0 | 21 | #include "MediaElementAudioSourceNode.h" |
michael@0 | 22 | #include "MediaStreamAudioSourceNode.h" |
michael@0 | 23 | #include "DelayNode.h" |
michael@0 | 24 | #include "PannerNode.h" |
michael@0 | 25 | #include "AudioListener.h" |
michael@0 | 26 | #include "DynamicsCompressorNode.h" |
michael@0 | 27 | #include "BiquadFilterNode.h" |
michael@0 | 28 | #include "ScriptProcessorNode.h" |
michael@0 | 29 | #include "ChannelMergerNode.h" |
michael@0 | 30 | #include "ChannelSplitterNode.h" |
michael@0 | 31 | #include "MediaStreamAudioDestinationNode.h" |
michael@0 | 32 | #include "WaveShaperNode.h" |
michael@0 | 33 | #include "PeriodicWave.h" |
michael@0 | 34 | #include "ConvolverNode.h" |
michael@0 | 35 | #include "OscillatorNode.h" |
michael@0 | 36 | #include "nsNetUtil.h" |
michael@0 | 37 | #include "AudioStream.h" |
michael@0 | 38 | |
michael@0 | 39 | namespace mozilla { |
michael@0 | 40 | namespace dom { |
michael@0 | 41 | |
michael@0 | 42 | NS_IMPL_CYCLE_COLLECTION_CLASS(AudioContext) |
michael@0 | 43 | |
michael@0 | 44 | NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(AudioContext) |
michael@0 | 45 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mDestination) |
michael@0 | 46 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mListener) |
michael@0 | 47 | if (!tmp->mIsStarted) { |
michael@0 | 48 | NS_IMPL_CYCLE_COLLECTION_UNLINK(mActiveNodes) |
michael@0 | 49 | } |
michael@0 | 50 | NS_IMPL_CYCLE_COLLECTION_UNLINK_END_INHERITED(DOMEventTargetHelper) |
michael@0 | 51 | |
michael@0 | 52 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(AudioContext, |
michael@0 | 53 | DOMEventTargetHelper) |
michael@0 | 54 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mDestination) |
michael@0 | 55 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mListener) |
michael@0 | 56 | if (!tmp->mIsStarted) { |
michael@0 | 57 | MOZ_ASSERT(tmp->mIsOffline, |
michael@0 | 58 | "Online AudioContexts should always be started"); |
michael@0 | 59 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mActiveNodes) |
michael@0 | 60 | } |
michael@0 | 61 | NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END |
michael@0 | 62 | |
michael@0 | 63 | NS_IMPL_ADDREF_INHERITED(AudioContext, DOMEventTargetHelper) |
michael@0 | 64 | NS_IMPL_RELEASE_INHERITED(AudioContext, DOMEventTargetHelper) |
michael@0 | 65 | NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(AudioContext) |
michael@0 | 66 | NS_INTERFACE_MAP_END_INHERITING(DOMEventTargetHelper) |
michael@0 | 67 | |
michael@0 | 68 | static float GetSampleRateForAudioContext(bool aIsOffline, float aSampleRate) |
michael@0 | 69 | { |
michael@0 | 70 | if (aIsOffline) { |
michael@0 | 71 | return aSampleRate; |
michael@0 | 72 | } else { |
michael@0 | 73 | AudioStream::InitPreferredSampleRate(); |
michael@0 | 74 | return static_cast<float>(AudioStream::PreferredSampleRate()); |
michael@0 | 75 | } |
michael@0 | 76 | } |
michael@0 | 77 | |
michael@0 | 78 | AudioContext::AudioContext(nsPIDOMWindow* aWindow, |
michael@0 | 79 | bool aIsOffline, |
michael@0 | 80 | AudioChannel aChannel, |
michael@0 | 81 | uint32_t aNumberOfChannels, |
michael@0 | 82 | uint32_t aLength, |
michael@0 | 83 | float aSampleRate) |
michael@0 | 84 | : DOMEventTargetHelper(aWindow) |
michael@0 | 85 | , mSampleRate(GetSampleRateForAudioContext(aIsOffline, aSampleRate)) |
michael@0 | 86 | , mNumberOfChannels(aNumberOfChannels) |
michael@0 | 87 | , mNodeCount(0) |
michael@0 | 88 | , mIsOffline(aIsOffline) |
michael@0 | 89 | , mIsStarted(!aIsOffline) |
michael@0 | 90 | , mIsShutDown(false) |
michael@0 | 91 | { |
michael@0 | 92 | aWindow->AddAudioContext(this); |
michael@0 | 93 | |
michael@0 | 94 | // Note: AudioDestinationNode needs an AudioContext that must already be |
michael@0 | 95 | // bound to the window. |
michael@0 | 96 | mDestination = new AudioDestinationNode(this, aIsOffline, aChannel, |
michael@0 | 97 | aNumberOfChannels, aLength, aSampleRate); |
michael@0 | 98 | // We skip calling SetIsOnlyNodeForContext during mDestination's constructor, |
michael@0 | 99 | // because we can only call SetIsOnlyNodeForContext after mDestination has |
michael@0 | 100 | // been set up. |
michael@0 | 101 | mDestination->SetIsOnlyNodeForContext(true); |
michael@0 | 102 | } |
michael@0 | 103 | |
michael@0 | 104 | AudioContext::~AudioContext() |
michael@0 | 105 | { |
michael@0 | 106 | nsPIDOMWindow* window = GetOwner(); |
michael@0 | 107 | if (window) { |
michael@0 | 108 | window->RemoveAudioContext(this); |
michael@0 | 109 | } |
michael@0 | 110 | |
michael@0 | 111 | UnregisterWeakMemoryReporter(this); |
michael@0 | 112 | } |
michael@0 | 113 | |
michael@0 | 114 | JSObject* |
michael@0 | 115 | AudioContext::WrapObject(JSContext* aCx) |
michael@0 | 116 | { |
michael@0 | 117 | if (mIsOffline) { |
michael@0 | 118 | return OfflineAudioContextBinding::Wrap(aCx, this); |
michael@0 | 119 | } else { |
michael@0 | 120 | return AudioContextBinding::Wrap(aCx, this); |
michael@0 | 121 | } |
michael@0 | 122 | } |
michael@0 | 123 | |
michael@0 | 124 | /* static */ already_AddRefed<AudioContext> |
michael@0 | 125 | AudioContext::Constructor(const GlobalObject& aGlobal, |
michael@0 | 126 | ErrorResult& aRv) |
michael@0 | 127 | { |
michael@0 | 128 | nsCOMPtr<nsPIDOMWindow> window = do_QueryInterface(aGlobal.GetAsSupports()); |
michael@0 | 129 | if (!window) { |
michael@0 | 130 | aRv.Throw(NS_ERROR_FAILURE); |
michael@0 | 131 | return nullptr; |
michael@0 | 132 | } |
michael@0 | 133 | |
michael@0 | 134 | nsRefPtr<AudioContext> object = new AudioContext(window, false); |
michael@0 | 135 | |
michael@0 | 136 | RegisterWeakMemoryReporter(object); |
michael@0 | 137 | |
michael@0 | 138 | return object.forget(); |
michael@0 | 139 | } |
michael@0 | 140 | |
michael@0 | 141 | /* static */ already_AddRefed<AudioContext> |
michael@0 | 142 | AudioContext::Constructor(const GlobalObject& aGlobal, |
michael@0 | 143 | AudioChannel aChannel, |
michael@0 | 144 | ErrorResult& aRv) |
michael@0 | 145 | { |
michael@0 | 146 | nsCOMPtr<nsPIDOMWindow> window = do_QueryInterface(aGlobal.GetAsSupports()); |
michael@0 | 147 | if (!window) { |
michael@0 | 148 | aRv.Throw(NS_ERROR_FAILURE); |
michael@0 | 149 | return nullptr; |
michael@0 | 150 | } |
michael@0 | 151 | |
michael@0 | 152 | nsRefPtr<AudioContext> object = new AudioContext(window, false, aChannel); |
michael@0 | 153 | |
michael@0 | 154 | RegisterWeakMemoryReporter(object); |
michael@0 | 155 | |
michael@0 | 156 | return object.forget(); |
michael@0 | 157 | } |
michael@0 | 158 | |
michael@0 | 159 | /* static */ already_AddRefed<AudioContext> |
michael@0 | 160 | AudioContext::Constructor(const GlobalObject& aGlobal, |
michael@0 | 161 | uint32_t aNumberOfChannels, |
michael@0 | 162 | uint32_t aLength, |
michael@0 | 163 | float aSampleRate, |
michael@0 | 164 | ErrorResult& aRv) |
michael@0 | 165 | { |
michael@0 | 166 | nsCOMPtr<nsPIDOMWindow> window = do_QueryInterface(aGlobal.GetAsSupports()); |
michael@0 | 167 | if (!window) { |
michael@0 | 168 | aRv.Throw(NS_ERROR_FAILURE); |
michael@0 | 169 | return nullptr; |
michael@0 | 170 | } |
michael@0 | 171 | |
michael@0 | 172 | if (aNumberOfChannels == 0 || |
michael@0 | 173 | aNumberOfChannels > WebAudioUtils::MaxChannelCount || |
michael@0 | 174 | aLength == 0 || |
michael@0 | 175 | aSampleRate < WebAudioUtils::MinSampleRate || |
michael@0 | 176 | aSampleRate > WebAudioUtils::MaxSampleRate) { |
michael@0 | 177 | // The DOM binding protects us against infinity and NaN |
michael@0 | 178 | aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR); |
michael@0 | 179 | return nullptr; |
michael@0 | 180 | } |
michael@0 | 181 | |
michael@0 | 182 | nsRefPtr<AudioContext> object = new AudioContext(window, |
michael@0 | 183 | true, |
michael@0 | 184 | AudioChannel::Normal, |
michael@0 | 185 | aNumberOfChannels, |
michael@0 | 186 | aLength, |
michael@0 | 187 | aSampleRate); |
michael@0 | 188 | |
michael@0 | 189 | RegisterWeakMemoryReporter(object); |
michael@0 | 190 | |
michael@0 | 191 | return object.forget(); |
michael@0 | 192 | } |
michael@0 | 193 | |
michael@0 | 194 | already_AddRefed<AudioBufferSourceNode> |
michael@0 | 195 | AudioContext::CreateBufferSource() |
michael@0 | 196 | { |
michael@0 | 197 | nsRefPtr<AudioBufferSourceNode> bufferNode = |
michael@0 | 198 | new AudioBufferSourceNode(this); |
michael@0 | 199 | return bufferNode.forget(); |
michael@0 | 200 | } |
michael@0 | 201 | |
michael@0 | 202 | already_AddRefed<AudioBuffer> |
michael@0 | 203 | AudioContext::CreateBuffer(JSContext* aJSContext, uint32_t aNumberOfChannels, |
michael@0 | 204 | uint32_t aLength, float aSampleRate, |
michael@0 | 205 | ErrorResult& aRv) |
michael@0 | 206 | { |
michael@0 | 207 | if (!aNumberOfChannels) { |
michael@0 | 208 | aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR); |
michael@0 | 209 | return nullptr; |
michael@0 | 210 | } |
michael@0 | 211 | |
michael@0 | 212 | return AudioBuffer::Create(this, aNumberOfChannels, aLength, |
michael@0 | 213 | aSampleRate, aJSContext, aRv); |
michael@0 | 214 | } |
michael@0 | 215 | |
michael@0 | 216 | namespace { |
michael@0 | 217 | |
michael@0 | 218 | bool IsValidBufferSize(uint32_t aBufferSize) { |
michael@0 | 219 | switch (aBufferSize) { |
michael@0 | 220 | case 0: // let the implementation choose the buffer size |
michael@0 | 221 | case 256: |
michael@0 | 222 | case 512: |
michael@0 | 223 | case 1024: |
michael@0 | 224 | case 2048: |
michael@0 | 225 | case 4096: |
michael@0 | 226 | case 8192: |
michael@0 | 227 | case 16384: |
michael@0 | 228 | return true; |
michael@0 | 229 | default: |
michael@0 | 230 | return false; |
michael@0 | 231 | } |
michael@0 | 232 | } |
michael@0 | 233 | |
michael@0 | 234 | } |
michael@0 | 235 | |
michael@0 | 236 | already_AddRefed<MediaStreamAudioDestinationNode> |
michael@0 | 237 | AudioContext::CreateMediaStreamDestination(ErrorResult& aRv) |
michael@0 | 238 | { |
michael@0 | 239 | if (mIsOffline) { |
michael@0 | 240 | aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR); |
michael@0 | 241 | return nullptr; |
michael@0 | 242 | } |
michael@0 | 243 | |
michael@0 | 244 | nsRefPtr<MediaStreamAudioDestinationNode> node = |
michael@0 | 245 | new MediaStreamAudioDestinationNode(this); |
michael@0 | 246 | return node.forget(); |
michael@0 | 247 | } |
michael@0 | 248 | |
michael@0 | 249 | already_AddRefed<ScriptProcessorNode> |
michael@0 | 250 | AudioContext::CreateScriptProcessor(uint32_t aBufferSize, |
michael@0 | 251 | uint32_t aNumberOfInputChannels, |
michael@0 | 252 | uint32_t aNumberOfOutputChannels, |
michael@0 | 253 | ErrorResult& aRv) |
michael@0 | 254 | { |
michael@0 | 255 | if ((aNumberOfInputChannels == 0 && aNumberOfOutputChannels == 0) || |
michael@0 | 256 | aNumberOfInputChannels > WebAudioUtils::MaxChannelCount || |
michael@0 | 257 | aNumberOfOutputChannels > WebAudioUtils::MaxChannelCount || |
michael@0 | 258 | !IsValidBufferSize(aBufferSize)) { |
michael@0 | 259 | aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR); |
michael@0 | 260 | return nullptr; |
michael@0 | 261 | } |
michael@0 | 262 | |
michael@0 | 263 | nsRefPtr<ScriptProcessorNode> scriptProcessor = |
michael@0 | 264 | new ScriptProcessorNode(this, aBufferSize, aNumberOfInputChannels, |
michael@0 | 265 | aNumberOfOutputChannels); |
michael@0 | 266 | return scriptProcessor.forget(); |
michael@0 | 267 | } |
michael@0 | 268 | |
michael@0 | 269 | already_AddRefed<AnalyserNode> |
michael@0 | 270 | AudioContext::CreateAnalyser() |
michael@0 | 271 | { |
michael@0 | 272 | nsRefPtr<AnalyserNode> analyserNode = new AnalyserNode(this); |
michael@0 | 273 | return analyserNode.forget(); |
michael@0 | 274 | } |
michael@0 | 275 | |
michael@0 | 276 | already_AddRefed<MediaElementAudioSourceNode> |
michael@0 | 277 | AudioContext::CreateMediaElementSource(HTMLMediaElement& aMediaElement, |
michael@0 | 278 | ErrorResult& aRv) |
michael@0 | 279 | { |
michael@0 | 280 | if (mIsOffline) { |
michael@0 | 281 | aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR); |
michael@0 | 282 | return nullptr; |
michael@0 | 283 | } |
michael@0 | 284 | nsRefPtr<DOMMediaStream> stream = aMediaElement.MozCaptureStream(aRv); |
michael@0 | 285 | if (aRv.Failed()) { |
michael@0 | 286 | return nullptr; |
michael@0 | 287 | } |
michael@0 | 288 | nsRefPtr<MediaElementAudioSourceNode> mediaElementAudioSourceNode = |
michael@0 | 289 | new MediaElementAudioSourceNode(this, stream); |
michael@0 | 290 | return mediaElementAudioSourceNode.forget(); |
michael@0 | 291 | } |
michael@0 | 292 | |
michael@0 | 293 | already_AddRefed<MediaStreamAudioSourceNode> |
michael@0 | 294 | AudioContext::CreateMediaStreamSource(DOMMediaStream& aMediaStream, |
michael@0 | 295 | ErrorResult& aRv) |
michael@0 | 296 | { |
michael@0 | 297 | if (mIsOffline) { |
michael@0 | 298 | aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR); |
michael@0 | 299 | return nullptr; |
michael@0 | 300 | } |
michael@0 | 301 | nsRefPtr<MediaStreamAudioSourceNode> mediaStreamAudioSourceNode = |
michael@0 | 302 | new MediaStreamAudioSourceNode(this, &aMediaStream); |
michael@0 | 303 | return mediaStreamAudioSourceNode.forget(); |
michael@0 | 304 | } |
michael@0 | 305 | |
michael@0 | 306 | already_AddRefed<GainNode> |
michael@0 | 307 | AudioContext::CreateGain() |
michael@0 | 308 | { |
michael@0 | 309 | nsRefPtr<GainNode> gainNode = new GainNode(this); |
michael@0 | 310 | return gainNode.forget(); |
michael@0 | 311 | } |
michael@0 | 312 | |
michael@0 | 313 | already_AddRefed<WaveShaperNode> |
michael@0 | 314 | AudioContext::CreateWaveShaper() |
michael@0 | 315 | { |
michael@0 | 316 | nsRefPtr<WaveShaperNode> waveShaperNode = new WaveShaperNode(this); |
michael@0 | 317 | return waveShaperNode.forget(); |
michael@0 | 318 | } |
michael@0 | 319 | |
michael@0 | 320 | already_AddRefed<DelayNode> |
michael@0 | 321 | AudioContext::CreateDelay(double aMaxDelayTime, ErrorResult& aRv) |
michael@0 | 322 | { |
michael@0 | 323 | if (aMaxDelayTime > 0. && aMaxDelayTime < 180.) { |
michael@0 | 324 | nsRefPtr<DelayNode> delayNode = new DelayNode(this, aMaxDelayTime); |
michael@0 | 325 | return delayNode.forget(); |
michael@0 | 326 | } |
michael@0 | 327 | aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR); |
michael@0 | 328 | return nullptr; |
michael@0 | 329 | } |
michael@0 | 330 | |
michael@0 | 331 | already_AddRefed<PannerNode> |
michael@0 | 332 | AudioContext::CreatePanner() |
michael@0 | 333 | { |
michael@0 | 334 | nsRefPtr<PannerNode> pannerNode = new PannerNode(this); |
michael@0 | 335 | mPannerNodes.PutEntry(pannerNode); |
michael@0 | 336 | return pannerNode.forget(); |
michael@0 | 337 | } |
michael@0 | 338 | |
michael@0 | 339 | already_AddRefed<ConvolverNode> |
michael@0 | 340 | AudioContext::CreateConvolver() |
michael@0 | 341 | { |
michael@0 | 342 | nsRefPtr<ConvolverNode> convolverNode = new ConvolverNode(this); |
michael@0 | 343 | return convolverNode.forget(); |
michael@0 | 344 | } |
michael@0 | 345 | |
michael@0 | 346 | already_AddRefed<ChannelSplitterNode> |
michael@0 | 347 | AudioContext::CreateChannelSplitter(uint32_t aNumberOfOutputs, ErrorResult& aRv) |
michael@0 | 348 | { |
michael@0 | 349 | if (aNumberOfOutputs == 0 || |
michael@0 | 350 | aNumberOfOutputs > WebAudioUtils::MaxChannelCount) { |
michael@0 | 351 | aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR); |
michael@0 | 352 | return nullptr; |
michael@0 | 353 | } |
michael@0 | 354 | |
michael@0 | 355 | nsRefPtr<ChannelSplitterNode> splitterNode = |
michael@0 | 356 | new ChannelSplitterNode(this, aNumberOfOutputs); |
michael@0 | 357 | return splitterNode.forget(); |
michael@0 | 358 | } |
michael@0 | 359 | |
michael@0 | 360 | already_AddRefed<ChannelMergerNode> |
michael@0 | 361 | AudioContext::CreateChannelMerger(uint32_t aNumberOfInputs, ErrorResult& aRv) |
michael@0 | 362 | { |
michael@0 | 363 | if (aNumberOfInputs == 0 || |
michael@0 | 364 | aNumberOfInputs > WebAudioUtils::MaxChannelCount) { |
michael@0 | 365 | aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR); |
michael@0 | 366 | return nullptr; |
michael@0 | 367 | } |
michael@0 | 368 | |
michael@0 | 369 | nsRefPtr<ChannelMergerNode> mergerNode = |
michael@0 | 370 | new ChannelMergerNode(this, aNumberOfInputs); |
michael@0 | 371 | return mergerNode.forget(); |
michael@0 | 372 | } |
michael@0 | 373 | |
michael@0 | 374 | already_AddRefed<DynamicsCompressorNode> |
michael@0 | 375 | AudioContext::CreateDynamicsCompressor() |
michael@0 | 376 | { |
michael@0 | 377 | nsRefPtr<DynamicsCompressorNode> compressorNode = |
michael@0 | 378 | new DynamicsCompressorNode(this); |
michael@0 | 379 | return compressorNode.forget(); |
michael@0 | 380 | } |
michael@0 | 381 | |
michael@0 | 382 | already_AddRefed<BiquadFilterNode> |
michael@0 | 383 | AudioContext::CreateBiquadFilter() |
michael@0 | 384 | { |
michael@0 | 385 | nsRefPtr<BiquadFilterNode> filterNode = |
michael@0 | 386 | new BiquadFilterNode(this); |
michael@0 | 387 | return filterNode.forget(); |
michael@0 | 388 | } |
michael@0 | 389 | |
michael@0 | 390 | already_AddRefed<OscillatorNode> |
michael@0 | 391 | AudioContext::CreateOscillator() |
michael@0 | 392 | { |
michael@0 | 393 | nsRefPtr<OscillatorNode> oscillatorNode = |
michael@0 | 394 | new OscillatorNode(this); |
michael@0 | 395 | return oscillatorNode.forget(); |
michael@0 | 396 | } |
michael@0 | 397 | |
michael@0 | 398 | already_AddRefed<PeriodicWave> |
michael@0 | 399 | AudioContext::CreatePeriodicWave(const Float32Array& aRealData, |
michael@0 | 400 | const Float32Array& aImagData, |
michael@0 | 401 | ErrorResult& aRv) |
michael@0 | 402 | { |
michael@0 | 403 | aRealData.ComputeLengthAndData(); |
michael@0 | 404 | aImagData.ComputeLengthAndData(); |
michael@0 | 405 | |
michael@0 | 406 | if (aRealData.Length() != aImagData.Length() || |
michael@0 | 407 | aRealData.Length() == 0 || |
michael@0 | 408 | aRealData.Length() > 4096) { |
michael@0 | 409 | aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR); |
michael@0 | 410 | return nullptr; |
michael@0 | 411 | } |
michael@0 | 412 | |
michael@0 | 413 | nsRefPtr<PeriodicWave> periodicWave = |
michael@0 | 414 | new PeriodicWave(this, aRealData.Data(), aImagData.Data(), |
michael@0 | 415 | aImagData.Length(), aRv); |
michael@0 | 416 | if (aRv.Failed()) { |
michael@0 | 417 | return nullptr; |
michael@0 | 418 | } |
michael@0 | 419 | return periodicWave.forget(); |
michael@0 | 420 | } |
michael@0 | 421 | |
michael@0 | 422 | AudioListener* |
michael@0 | 423 | AudioContext::Listener() |
michael@0 | 424 | { |
michael@0 | 425 | if (!mListener) { |
michael@0 | 426 | mListener = new AudioListener(this); |
michael@0 | 427 | } |
michael@0 | 428 | return mListener; |
michael@0 | 429 | } |
michael@0 | 430 | |
michael@0 | 431 | void |
michael@0 | 432 | AudioContext::DecodeAudioData(const ArrayBuffer& aBuffer, |
michael@0 | 433 | DecodeSuccessCallback& aSuccessCallback, |
michael@0 | 434 | const Optional<OwningNonNull<DecodeErrorCallback> >& aFailureCallback) |
michael@0 | 435 | { |
michael@0 | 436 | AutoJSAPI jsapi; |
michael@0 | 437 | JSContext* cx = jsapi.cx(); |
michael@0 | 438 | JSAutoCompartment ac(cx, aBuffer.Obj()); |
michael@0 | 439 | |
michael@0 | 440 | aBuffer.ComputeLengthAndData(); |
michael@0 | 441 | |
michael@0 | 442 | // Neuter the array buffer |
michael@0 | 443 | size_t length = aBuffer.Length(); |
michael@0 | 444 | JS::RootedObject obj(cx, aBuffer.Obj()); |
michael@0 | 445 | |
michael@0 | 446 | uint8_t* data = static_cast<uint8_t*>(JS_StealArrayBufferContents(cx, obj)); |
michael@0 | 447 | |
michael@0 | 448 | // Sniff the content of the media. |
michael@0 | 449 | // Failed type sniffing will be handled by AsyncDecodeMedia. |
michael@0 | 450 | nsAutoCString contentType; |
michael@0 | 451 | NS_SniffContent(NS_DATA_SNIFFER_CATEGORY, nullptr, data, length, contentType); |
michael@0 | 452 | |
michael@0 | 453 | nsRefPtr<DecodeErrorCallback> failureCallback; |
michael@0 | 454 | if (aFailureCallback.WasPassed()) { |
michael@0 | 455 | failureCallback = &aFailureCallback.Value(); |
michael@0 | 456 | } |
michael@0 | 457 | nsRefPtr<WebAudioDecodeJob> job( |
michael@0 | 458 | new WebAudioDecodeJob(contentType, this, |
michael@0 | 459 | &aSuccessCallback, failureCallback)); |
michael@0 | 460 | mDecoder.AsyncDecodeMedia(contentType.get(), data, length, *job); |
michael@0 | 461 | // Transfer the ownership to mDecodeJobs |
michael@0 | 462 | mDecodeJobs.AppendElement(job); |
michael@0 | 463 | } |
michael@0 | 464 | |
michael@0 | 465 | void |
michael@0 | 466 | AudioContext::RemoveFromDecodeQueue(WebAudioDecodeJob* aDecodeJob) |
michael@0 | 467 | { |
michael@0 | 468 | mDecodeJobs.RemoveElement(aDecodeJob); |
michael@0 | 469 | } |
michael@0 | 470 | |
michael@0 | 471 | void |
michael@0 | 472 | AudioContext::RegisterActiveNode(AudioNode* aNode) |
michael@0 | 473 | { |
michael@0 | 474 | if (!mIsShutDown) { |
michael@0 | 475 | mActiveNodes.PutEntry(aNode); |
michael@0 | 476 | } |
michael@0 | 477 | } |
michael@0 | 478 | |
michael@0 | 479 | void |
michael@0 | 480 | AudioContext::UnregisterActiveNode(AudioNode* aNode) |
michael@0 | 481 | { |
michael@0 | 482 | mActiveNodes.RemoveEntry(aNode); |
michael@0 | 483 | } |
michael@0 | 484 | |
michael@0 | 485 | void |
michael@0 | 486 | AudioContext::UnregisterAudioBufferSourceNode(AudioBufferSourceNode* aNode) |
michael@0 | 487 | { |
michael@0 | 488 | UpdatePannerSource(); |
michael@0 | 489 | } |
michael@0 | 490 | |
michael@0 | 491 | void |
michael@0 | 492 | AudioContext::UnregisterPannerNode(PannerNode* aNode) |
michael@0 | 493 | { |
michael@0 | 494 | mPannerNodes.RemoveEntry(aNode); |
michael@0 | 495 | if (mListener) { |
michael@0 | 496 | mListener->UnregisterPannerNode(aNode); |
michael@0 | 497 | } |
michael@0 | 498 | } |
michael@0 | 499 | |
michael@0 | 500 | static PLDHashOperator |
michael@0 | 501 | FindConnectedSourcesOn(nsPtrHashKey<PannerNode>* aEntry, void* aData) |
michael@0 | 502 | { |
michael@0 | 503 | aEntry->GetKey()->FindConnectedSources(); |
michael@0 | 504 | return PL_DHASH_NEXT; |
michael@0 | 505 | } |
michael@0 | 506 | |
michael@0 | 507 | void |
michael@0 | 508 | AudioContext::UpdatePannerSource() |
michael@0 | 509 | { |
michael@0 | 510 | mPannerNodes.EnumerateEntries(FindConnectedSourcesOn, nullptr); |
michael@0 | 511 | } |
michael@0 | 512 | |
michael@0 | 513 | uint32_t |
michael@0 | 514 | AudioContext::MaxChannelCount() const |
michael@0 | 515 | { |
michael@0 | 516 | return mIsOffline ? mNumberOfChannels : AudioStream::MaxNumberOfChannels(); |
michael@0 | 517 | } |
michael@0 | 518 | |
michael@0 | 519 | MediaStreamGraph* |
michael@0 | 520 | AudioContext::Graph() const |
michael@0 | 521 | { |
michael@0 | 522 | return Destination()->Stream()->Graph(); |
michael@0 | 523 | } |
michael@0 | 524 | |
michael@0 | 525 | MediaStream* |
michael@0 | 526 | AudioContext::DestinationStream() const |
michael@0 | 527 | { |
michael@0 | 528 | if (Destination()) { |
michael@0 | 529 | return Destination()->Stream(); |
michael@0 | 530 | } |
michael@0 | 531 | return nullptr; |
michael@0 | 532 | } |
michael@0 | 533 | |
michael@0 | 534 | double |
michael@0 | 535 | AudioContext::CurrentTime() const |
michael@0 | 536 | { |
michael@0 | 537 | return MediaTimeToSeconds(Destination()->Stream()->GetCurrentTime()) + |
michael@0 | 538 | ExtraCurrentTime(); |
michael@0 | 539 | } |
michael@0 | 540 | |
michael@0 | 541 | void |
michael@0 | 542 | AudioContext::Shutdown() |
michael@0 | 543 | { |
michael@0 | 544 | mIsShutDown = true; |
michael@0 | 545 | |
michael@0 | 546 | // We mute rather than suspending, because the delay between the ::Shutdown |
michael@0 | 547 | // call and the CC would make us overbuffer in the MediaStreamGraph. |
michael@0 | 548 | // See bug 936784 for details. |
michael@0 | 549 | if (!mIsOffline) { |
michael@0 | 550 | Mute(); |
michael@0 | 551 | } |
michael@0 | 552 | |
michael@0 | 553 | mDecoder.Shutdown(); |
michael@0 | 554 | |
michael@0 | 555 | // Release references to active nodes. |
michael@0 | 556 | // Active AudioNodes don't unregister in destructors, at which point the |
michael@0 | 557 | // Node is already unregistered. |
michael@0 | 558 | mActiveNodes.Clear(); |
michael@0 | 559 | |
michael@0 | 560 | // For offline contexts, we can destroy the MediaStreamGraph at this point. |
michael@0 | 561 | if (mIsOffline && mDestination) { |
michael@0 | 562 | mDestination->OfflineShutdown(); |
michael@0 | 563 | } |
michael@0 | 564 | } |
michael@0 | 565 | |
michael@0 | 566 | void |
michael@0 | 567 | AudioContext::Suspend() |
michael@0 | 568 | { |
michael@0 | 569 | MediaStream* ds = DestinationStream(); |
michael@0 | 570 | if (ds) { |
michael@0 | 571 | ds->ChangeExplicitBlockerCount(1); |
michael@0 | 572 | } |
michael@0 | 573 | } |
michael@0 | 574 | |
michael@0 | 575 | void |
michael@0 | 576 | AudioContext::Resume() |
michael@0 | 577 | { |
michael@0 | 578 | MediaStream* ds = DestinationStream(); |
michael@0 | 579 | if (ds) { |
michael@0 | 580 | ds->ChangeExplicitBlockerCount(-1); |
michael@0 | 581 | } |
michael@0 | 582 | } |
michael@0 | 583 | |
michael@0 | 584 | void |
michael@0 | 585 | AudioContext::UpdateNodeCount(int32_t aDelta) |
michael@0 | 586 | { |
michael@0 | 587 | bool firstNode = mNodeCount == 0; |
michael@0 | 588 | mNodeCount += aDelta; |
michael@0 | 589 | MOZ_ASSERT(mNodeCount >= 0); |
michael@0 | 590 | // mDestinationNode may be null when we're destroying nodes unlinked by CC |
michael@0 | 591 | if (!firstNode && mDestination) { |
michael@0 | 592 | mDestination->SetIsOnlyNodeForContext(mNodeCount == 1); |
michael@0 | 593 | } |
michael@0 | 594 | } |
michael@0 | 595 | |
michael@0 | 596 | JSContext* |
michael@0 | 597 | AudioContext::GetJSContext() const |
michael@0 | 598 | { |
michael@0 | 599 | MOZ_ASSERT(NS_IsMainThread()); |
michael@0 | 600 | |
michael@0 | 601 | nsCOMPtr<nsIScriptGlobalObject> scriptGlobal = |
michael@0 | 602 | do_QueryInterface(GetParentObject()); |
michael@0 | 603 | if (!scriptGlobal) { |
michael@0 | 604 | return nullptr; |
michael@0 | 605 | } |
michael@0 | 606 | nsIScriptContext* scriptContext = scriptGlobal->GetContext(); |
michael@0 | 607 | if (!scriptContext) { |
michael@0 | 608 | return nullptr; |
michael@0 | 609 | } |
michael@0 | 610 | return scriptContext->GetNativeContext(); |
michael@0 | 611 | } |
michael@0 | 612 | |
michael@0 | 613 | void |
michael@0 | 614 | AudioContext::StartRendering(ErrorResult& aRv) |
michael@0 | 615 | { |
michael@0 | 616 | MOZ_ASSERT(mIsOffline, "This should only be called on OfflineAudioContext"); |
michael@0 | 617 | if (mIsStarted) { |
michael@0 | 618 | aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR); |
michael@0 | 619 | return; |
michael@0 | 620 | } |
michael@0 | 621 | |
michael@0 | 622 | mIsStarted = true; |
michael@0 | 623 | mDestination->StartRendering(); |
michael@0 | 624 | } |
michael@0 | 625 | |
michael@0 | 626 | void |
michael@0 | 627 | AudioContext::Mute() const |
michael@0 | 628 | { |
michael@0 | 629 | MOZ_ASSERT(!mIsOffline); |
michael@0 | 630 | if (mDestination) { |
michael@0 | 631 | mDestination->Mute(); |
michael@0 | 632 | } |
michael@0 | 633 | } |
michael@0 | 634 | |
michael@0 | 635 | void |
michael@0 | 636 | AudioContext::Unmute() const |
michael@0 | 637 | { |
michael@0 | 638 | MOZ_ASSERT(!mIsOffline); |
michael@0 | 639 | if (mDestination) { |
michael@0 | 640 | mDestination->Unmute(); |
michael@0 | 641 | } |
michael@0 | 642 | } |
michael@0 | 643 | |
michael@0 | 644 | AudioChannel |
michael@0 | 645 | AudioContext::MozAudioChannelType() const |
michael@0 | 646 | { |
michael@0 | 647 | return mDestination->MozAudioChannelType(); |
michael@0 | 648 | } |
michael@0 | 649 | |
michael@0 | 650 | void |
michael@0 | 651 | AudioContext::SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv) |
michael@0 | 652 | { |
michael@0 | 653 | mDestination->SetMozAudioChannelType(aValue, aRv); |
michael@0 | 654 | } |
michael@0 | 655 | |
michael@0 | 656 | size_t |
michael@0 | 657 | AudioContext::SizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) const |
michael@0 | 658 | { |
michael@0 | 659 | // AudioNodes are tracked separately because we do not want the AudioContext |
michael@0 | 660 | // to track all of the AudioNodes it creates, so we wouldn't be able to |
michael@0 | 661 | // traverse them from here. |
michael@0 | 662 | |
michael@0 | 663 | size_t amount = aMallocSizeOf(this); |
michael@0 | 664 | if (mListener) { |
michael@0 | 665 | amount += mListener->SizeOfIncludingThis(aMallocSizeOf); |
michael@0 | 666 | } |
michael@0 | 667 | amount += mDecoder.SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 668 | amount += mDecodeJobs.SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 669 | for (uint32_t i = 0; i < mDecodeJobs.Length(); ++i) { |
michael@0 | 670 | amount += mDecodeJobs[i]->SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 671 | } |
michael@0 | 672 | amount += mActiveNodes.SizeOfExcludingThis(nullptr, aMallocSizeOf); |
michael@0 | 673 | amount += mPannerNodes.SizeOfExcludingThis(nullptr, aMallocSizeOf); |
michael@0 | 674 | return amount; |
michael@0 | 675 | } |
michael@0 | 676 | |
michael@0 | 677 | NS_IMETHODIMP |
michael@0 | 678 | AudioContext::CollectReports(nsIHandleReportCallback* aHandleReport, |
michael@0 | 679 | nsISupports* aData) |
michael@0 | 680 | { |
michael@0 | 681 | int64_t amount = SizeOfIncludingThis(MallocSizeOf); |
michael@0 | 682 | return MOZ_COLLECT_REPORT("explicit/webaudio/audiocontext", KIND_HEAP, UNITS_BYTES, |
michael@0 | 683 | amount, "Memory used by AudioContext objects (Web Audio)."); |
michael@0 | 684 | } |
michael@0 | 685 | |
michael@0 | 686 | double |
michael@0 | 687 | AudioContext::ExtraCurrentTime() const |
michael@0 | 688 | { |
michael@0 | 689 | return mDestination->ExtraCurrentTime(); |
michael@0 | 690 | } |
michael@0 | 691 | |
michael@0 | 692 | } |
michael@0 | 693 | } |