diff -r 000000000000 -r 6474c204b198 content/media/webaudio/AudioContext.h --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/content/media/webaudio/AudioContext.h Wed Dec 31 06:09:35 2014 +0100 @@ -0,0 +1,280 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef AudioContext_h_ +#define AudioContext_h_ + +#include "mozilla/dom/AudioChannelBinding.h" +#include "MediaBufferDecoder.h" +#include "mozilla/Attributes.h" +#include "mozilla/DOMEventTargetHelper.h" +#include "mozilla/MemoryReporting.h" +#include "mozilla/dom/TypedArray.h" +#include "nsAutoPtr.h" +#include "nsCOMPtr.h" +#include "nsCycleCollectionParticipant.h" +#include "nsHashKeys.h" +#include "nsTHashtable.h" +#include "js/TypeDecls.h" +#include "nsIMemoryReporter.h" + +// X11 has a #define for CurrentTime. Unbelievable :-(. +// See content/media/DOMMediaStream.h for more fun! +#ifdef CurrentTime +#undef CurrentTime +#endif + +class nsPIDOMWindow; + +namespace mozilla { + +class DOMMediaStream; +class ErrorResult; +class MediaStream; +class MediaStreamGraph; + +namespace dom { + +class AnalyserNode; +class AudioBuffer; +class AudioBufferSourceNode; +class AudioDestinationNode; +class AudioListener; +class AudioNode; +class BiquadFilterNode; +class ChannelMergerNode; +class ChannelSplitterNode; +class ConvolverNode; +class DelayNode; +class DynamicsCompressorNode; +class GainNode; +class HTMLMediaElement; +class MediaElementAudioSourceNode; +class GlobalObject; +class MediaStreamAudioDestinationNode; +class MediaStreamAudioSourceNode; +class OscillatorNode; +class PannerNode; +class ScriptProcessorNode; +class WaveShaperNode; +class PeriodicWave; + +class AudioContext MOZ_FINAL : public DOMEventTargetHelper, + public nsIMemoryReporter +{ + AudioContext(nsPIDOMWindow* aParentWindow, + bool aIsOffline, + AudioChannel aChannel = AudioChannel::Normal, + uint32_t aNumberOfChannels = 0, + uint32_t aLength = 0, + float aSampleRate = 0.0f); + ~AudioContext(); + +public: + NS_DECL_ISUPPORTS_INHERITED + NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(AudioContext, + DOMEventTargetHelper) + MOZ_DEFINE_MALLOC_SIZE_OF(MallocSizeOf) + + nsPIDOMWindow* GetParentObject() const + { + return GetOwner(); + } + + void Shutdown(); // idempotent + void Suspend(); + void Resume(); + + virtual JSObject* WrapObject(JSContext* aCx) MOZ_OVERRIDE; + + using DOMEventTargetHelper::DispatchTrustedEvent; + + // Constructor for regular AudioContext + static already_AddRefed + Constructor(const GlobalObject& aGlobal, ErrorResult& aRv); + + // Constructor for regular AudioContext. A default audio channel is needed. + static already_AddRefed + Constructor(const GlobalObject& aGlobal, + AudioChannel aChannel, + ErrorResult& aRv); + + // Constructor for offline AudioContext + static already_AddRefed + Constructor(const GlobalObject& aGlobal, + uint32_t aNumberOfChannels, + uint32_t aLength, + float aSampleRate, + ErrorResult& aRv); + + // AudioContext methods + + AudioDestinationNode* Destination() const + { + return mDestination; + } + + float SampleRate() const + { + return mSampleRate; + } + + double CurrentTime() const; + + AudioListener* Listener(); + + already_AddRefed CreateBufferSource(); + + already_AddRefed + CreateBuffer(JSContext* aJSContext, uint32_t aNumberOfChannels, + uint32_t aLength, float aSampleRate, + ErrorResult& aRv); + + already_AddRefed + CreateMediaStreamDestination(ErrorResult& aRv); + + already_AddRefed + CreateScriptProcessor(uint32_t aBufferSize, + uint32_t aNumberOfInputChannels, + uint32_t aNumberOfOutputChannels, + ErrorResult& aRv); + + already_AddRefed + CreateAnalyser(); + + already_AddRefed + CreateGain(); + + already_AddRefed + CreateWaveShaper(); + + already_AddRefed + CreateMediaElementSource(HTMLMediaElement& aMediaElement, ErrorResult& aRv); + already_AddRefed + CreateMediaStreamSource(DOMMediaStream& aMediaStream, ErrorResult& aRv); + + already_AddRefed + CreateDelay(double aMaxDelayTime, ErrorResult& aRv); + + already_AddRefed + CreatePanner(); + + already_AddRefed + CreateConvolver(); + + already_AddRefed + CreateChannelSplitter(uint32_t aNumberOfOutputs, ErrorResult& aRv); + + already_AddRefed + CreateChannelMerger(uint32_t aNumberOfInputs, ErrorResult& aRv); + + already_AddRefed + CreateDynamicsCompressor(); + + already_AddRefed + CreateBiquadFilter(); + + already_AddRefed + CreateOscillator(); + + already_AddRefed + CreatePeriodicWave(const Float32Array& aRealData, const Float32Array& aImagData, + ErrorResult& aRv); + + void DecodeAudioData(const ArrayBuffer& aBuffer, + DecodeSuccessCallback& aSuccessCallback, + const Optional >& aFailureCallback); + + // OfflineAudioContext methods + void StartRendering(ErrorResult& aRv); + IMPL_EVENT_HANDLER(complete) + + bool IsOffline() const { return mIsOffline; } + + MediaStreamGraph* Graph() const; + MediaStream* DestinationStream() const; + + // Nodes register here if they will produce sound even if they have silent + // or no input connections. The AudioContext will keep registered nodes + // alive until the context is collected. This takes care of "playing" + // references and "tail-time" references. + void RegisterActiveNode(AudioNode* aNode); + // Nodes unregister when they have finished producing sound for the + // foreseeable future. + // Do NOT call UnregisterActiveNode from an AudioNode destructor. + // If the destructor is called, then the Node has already been unregistered. + // The destructor may be called during hashtable enumeration, during which + // unregistering would not be safe. + void UnregisterActiveNode(AudioNode* aNode); + + void UnregisterAudioBufferSourceNode(AudioBufferSourceNode* aNode); + void UnregisterPannerNode(PannerNode* aNode); + void UpdatePannerSource(); + + uint32_t MaxChannelCount() const; + + void Mute() const; + void Unmute() const; + + JSContext* GetJSContext() const; + + AudioChannel MozAudioChannelType() const; + void SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv); + + void UpdateNodeCount(int32_t aDelta); + + double DOMTimeToStreamTime(double aTime) const + { + return aTime - ExtraCurrentTime(); + } + +private: + /** + * Returns the amount of extra time added to the current time of the + * AudioDestinationNode's MediaStream to get this AudioContext's currentTime. + * Must be subtracted from all DOM API parameter times that are on the same + * timeline as AudioContext's currentTime to get times we can pass to the + * MediaStreamGraph. + */ + double ExtraCurrentTime() const; + + void RemoveFromDecodeQueue(WebAudioDecodeJob* aDecodeJob); + void ShutdownDecoder(); + + size_t SizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) const; + NS_IMETHOD CollectReports(nsIHandleReportCallback* aHandleReport, + nsISupports* aData); + + friend struct ::mozilla::WebAudioDecodeJob; + +private: + // Note that it's important for mSampleRate to be initialized before + // mDestination, as mDestination's constructor needs to access it! + const float mSampleRate; + nsRefPtr mDestination; + nsRefPtr mListener; + MediaBufferDecoder mDecoder; + nsTArray > mDecodeJobs; + // See RegisterActiveNode. These will keep the AudioContext alive while it + // is rendering and the window remains alive. + nsTHashtable > mActiveNodes; + // Hashsets containing all the PannerNodes, to compute the doppler shift. + // These are weak pointers. + nsTHashtable > mPannerNodes; + // Number of channels passed in the OfflineAudioContext ctor. + uint32_t mNumberOfChannels; + // Number of nodes that currently exist for this AudioContext + int32_t mNodeCount; + bool mIsOffline; + bool mIsStarted; + bool mIsShutDown; +}; + +} +} + +#endif +