1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/content/media/webaudio/ScriptProcessorNode.cpp Wed Dec 31 06:09:35 2014 +0100 1.3 @@ -0,0 +1,530 @@ 1.4 +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ 1.5 +/* vim:set ts=2 sw=2 sts=2 et cindent: */ 1.6 +/* This Source Code Form is subject to the terms of the Mozilla Public 1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this 1.8 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 1.9 + 1.10 +#include "ScriptProcessorNode.h" 1.11 +#include "mozilla/dom/ScriptProcessorNodeBinding.h" 1.12 +#include "AudioBuffer.h" 1.13 +#include "AudioDestinationNode.h" 1.14 +#include "AudioNodeEngine.h" 1.15 +#include "AudioNodeStream.h" 1.16 +#include "AudioProcessingEvent.h" 1.17 +#include "WebAudioUtils.h" 1.18 +#include "nsCxPusher.h" 1.19 +#include "mozilla/Mutex.h" 1.20 +#include "mozilla/PodOperations.h" 1.21 +#include <deque> 1.22 + 1.23 +namespace mozilla { 1.24 +namespace dom { 1.25 + 1.26 +// The maximum latency, in seconds, that we can live with before dropping 1.27 +// buffers. 1.28 +static const float MAX_LATENCY_S = 0.5; 1.29 + 1.30 +NS_IMPL_ISUPPORTS_INHERITED0(ScriptProcessorNode, AudioNode) 1.31 + 1.32 +// This class manages a queue of output buffers shared between 1.33 +// the main thread and the Media Stream Graph thread. 1.34 +class SharedBuffers 1.35 +{ 1.36 +private: 1.37 + class OutputQueue 1.38 + { 1.39 + public: 1.40 + explicit OutputQueue(const char* aName) 1.41 + : mMutex(aName) 1.42 + {} 1.43 + 1.44 + size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const 1.45 + { 1.46 + mMutex.AssertCurrentThreadOwns(); 1.47 + 1.48 + size_t amount = 0; 1.49 + for (size_t i = 0; i < mBufferList.size(); i++) { 1.50 + amount += mBufferList[i].SizeOfExcludingThis(aMallocSizeOf, false); 1.51 + } 1.52 + 1.53 + return amount; 1.54 + } 1.55 + 1.56 + Mutex& Lock() const { return const_cast<OutputQueue*>(this)->mMutex; } 1.57 + 1.58 + size_t ReadyToConsume() const 1.59 + { 1.60 + mMutex.AssertCurrentThreadOwns(); 1.61 + MOZ_ASSERT(!NS_IsMainThread()); 1.62 + return mBufferList.size(); 1.63 + } 1.64 + 1.65 + // Produce one buffer 1.66 + AudioChunk& Produce() 1.67 + { 1.68 + mMutex.AssertCurrentThreadOwns(); 1.69 + MOZ_ASSERT(NS_IsMainThread()); 1.70 + mBufferList.push_back(AudioChunk()); 1.71 + return mBufferList.back(); 1.72 + } 1.73 + 1.74 + // Consumes one buffer. 1.75 + AudioChunk Consume() 1.76 + { 1.77 + mMutex.AssertCurrentThreadOwns(); 1.78 + MOZ_ASSERT(!NS_IsMainThread()); 1.79 + MOZ_ASSERT(ReadyToConsume() > 0); 1.80 + AudioChunk front = mBufferList.front(); 1.81 + mBufferList.pop_front(); 1.82 + return front; 1.83 + } 1.84 + 1.85 + // Empties the buffer queue. 1.86 + void Clear() 1.87 + { 1.88 + mMutex.AssertCurrentThreadOwns(); 1.89 + mBufferList.clear(); 1.90 + } 1.91 + 1.92 + private: 1.93 + typedef std::deque<AudioChunk> BufferList; 1.94 + 1.95 + // Synchronizes access to mBufferList. Note that it's the responsibility 1.96 + // of the callers to perform the required locking, and we assert that every 1.97 + // time we access mBufferList. 1.98 + Mutex mMutex; 1.99 + // The list representing the queue. 1.100 + BufferList mBufferList; 1.101 + }; 1.102 + 1.103 +public: 1.104 + SharedBuffers(float aSampleRate) 1.105 + : mOutputQueue("SharedBuffers::outputQueue") 1.106 + , mDelaySoFar(TRACK_TICKS_MAX) 1.107 + , mSampleRate(aSampleRate) 1.108 + , mLatency(0.0) 1.109 + , mDroppingBuffers(false) 1.110 + { 1.111 + } 1.112 + 1.113 + size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const 1.114 + { 1.115 + size_t amount = aMallocSizeOf(this); 1.116 + 1.117 + { 1.118 + MutexAutoLock lock(mOutputQueue.Lock()); 1.119 + amount += mOutputQueue.SizeOfExcludingThis(aMallocSizeOf); 1.120 + } 1.121 + 1.122 + return amount; 1.123 + } 1.124 + 1.125 + // main thread 1.126 + void FinishProducingOutputBuffer(ThreadSharedFloatArrayBufferList* aBuffer, 1.127 + uint32_t aBufferSize) 1.128 + { 1.129 + MOZ_ASSERT(NS_IsMainThread()); 1.130 + 1.131 + TimeStamp now = TimeStamp::Now(); 1.132 + 1.133 + if (mLastEventTime.IsNull()) { 1.134 + mLastEventTime = now; 1.135 + } else { 1.136 + // When the main thread is blocked, and all the event are processed in a 1.137 + // burst after the main thread unblocks, the |(now - mLastEventTime)| 1.138 + // interval will be very short. |latency - bufferDuration| will be 1.139 + // negative, effectively moving back mLatency to a smaller and smaller 1.140 + // value, until it crosses zero, at which point we stop dropping buffers 1.141 + // and resume normal operation. This does not work if at the same time, 1.142 + // the MSG thread was also slowed down, so if the latency on the MSG 1.143 + // thread is normal, and we are still dropping buffers, and mLatency is 1.144 + // still more than twice the duration of a buffer, we reset it and stop 1.145 + // dropping buffers. 1.146 + float latency = (now - mLastEventTime).ToSeconds(); 1.147 + float bufferDuration = aBufferSize / mSampleRate; 1.148 + mLatency += latency - bufferDuration; 1.149 + mLastEventTime = now; 1.150 + if (mLatency > MAX_LATENCY_S || 1.151 + (mDroppingBuffers && mLatency > 0.0 && 1.152 + fabs(latency - bufferDuration) < bufferDuration)) { 1.153 + mDroppingBuffers = true; 1.154 + return; 1.155 + } else { 1.156 + if (mDroppingBuffers) { 1.157 + mLatency = 0; 1.158 + } 1.159 + mDroppingBuffers = false; 1.160 + } 1.161 + } 1.162 + 1.163 + MutexAutoLock lock(mOutputQueue.Lock()); 1.164 + for (uint32_t offset = 0; offset < aBufferSize; offset += WEBAUDIO_BLOCK_SIZE) { 1.165 + AudioChunk& chunk = mOutputQueue.Produce(); 1.166 + if (aBuffer) { 1.167 + chunk.mDuration = WEBAUDIO_BLOCK_SIZE; 1.168 + chunk.mBuffer = aBuffer; 1.169 + chunk.mChannelData.SetLength(aBuffer->GetChannels()); 1.170 + for (uint32_t i = 0; i < aBuffer->GetChannels(); ++i) { 1.171 + chunk.mChannelData[i] = aBuffer->GetData(i) + offset; 1.172 + } 1.173 + chunk.mVolume = 1.0f; 1.174 + chunk.mBufferFormat = AUDIO_FORMAT_FLOAT32; 1.175 + } else { 1.176 + chunk.SetNull(WEBAUDIO_BLOCK_SIZE); 1.177 + } 1.178 + } 1.179 + } 1.180 + 1.181 + // graph thread 1.182 + AudioChunk GetOutputBuffer() 1.183 + { 1.184 + MOZ_ASSERT(!NS_IsMainThread()); 1.185 + AudioChunk buffer; 1.186 + 1.187 + { 1.188 + MutexAutoLock lock(mOutputQueue.Lock()); 1.189 + if (mOutputQueue.ReadyToConsume() > 0) { 1.190 + if (mDelaySoFar == TRACK_TICKS_MAX) { 1.191 + mDelaySoFar = 0; 1.192 + } 1.193 + buffer = mOutputQueue.Consume(); 1.194 + } else { 1.195 + // If we're out of buffers to consume, just output silence 1.196 + buffer.SetNull(WEBAUDIO_BLOCK_SIZE); 1.197 + if (mDelaySoFar != TRACK_TICKS_MAX) { 1.198 + // Remember the delay that we just hit 1.199 + mDelaySoFar += WEBAUDIO_BLOCK_SIZE; 1.200 + } 1.201 + } 1.202 + } 1.203 + 1.204 + return buffer; 1.205 + } 1.206 + 1.207 + TrackTicks DelaySoFar() const 1.208 + { 1.209 + MOZ_ASSERT(!NS_IsMainThread()); 1.210 + return mDelaySoFar == TRACK_TICKS_MAX ? 0 : mDelaySoFar; 1.211 + } 1.212 + 1.213 + void Reset() 1.214 + { 1.215 + MOZ_ASSERT(!NS_IsMainThread()); 1.216 + mDelaySoFar = TRACK_TICKS_MAX; 1.217 + mLatency = 0.0f; 1.218 + { 1.219 + MutexAutoLock lock(mOutputQueue.Lock()); 1.220 + mOutputQueue.Clear(); 1.221 + } 1.222 + mLastEventTime = TimeStamp(); 1.223 + } 1.224 + 1.225 +private: 1.226 + OutputQueue mOutputQueue; 1.227 + // How much delay we've seen so far. This measures the amount of delay 1.228 + // caused by the main thread lagging behind in producing output buffers. 1.229 + // TRACK_TICKS_MAX means that we have not received our first buffer yet. 1.230 + TrackTicks mDelaySoFar; 1.231 + // The samplerate of the context. 1.232 + float mSampleRate; 1.233 + // This is the latency caused by the buffering. If this grows too high, we 1.234 + // will drop buffers until it is acceptable. 1.235 + float mLatency; 1.236 + // This is the time at which we last produced a buffer, to detect if the main 1.237 + // thread has been blocked. 1.238 + TimeStamp mLastEventTime; 1.239 + // True if we should be dropping buffers. 1.240 + bool mDroppingBuffers; 1.241 +}; 1.242 + 1.243 +class ScriptProcessorNodeEngine : public AudioNodeEngine 1.244 +{ 1.245 +public: 1.246 + typedef nsAutoTArray<nsAutoArrayPtr<float>, 2> InputChannels; 1.247 + 1.248 + ScriptProcessorNodeEngine(ScriptProcessorNode* aNode, 1.249 + AudioDestinationNode* aDestination, 1.250 + uint32_t aBufferSize, 1.251 + uint32_t aNumberOfInputChannels) 1.252 + : AudioNodeEngine(aNode) 1.253 + , mSharedBuffers(aNode->GetSharedBuffers()) 1.254 + , mSource(nullptr) 1.255 + , mDestination(static_cast<AudioNodeStream*> (aDestination->Stream())) 1.256 + , mBufferSize(aBufferSize) 1.257 + , mInputWriteIndex(0) 1.258 + , mSeenNonSilenceInput(false) 1.259 + { 1.260 + mInputChannels.SetLength(aNumberOfInputChannels); 1.261 + AllocateInputBlock(); 1.262 + } 1.263 + 1.264 + void SetSourceStream(AudioNodeStream* aSource) 1.265 + { 1.266 + mSource = aSource; 1.267 + } 1.268 + 1.269 + virtual void ProcessBlock(AudioNodeStream* aStream, 1.270 + const AudioChunk& aInput, 1.271 + AudioChunk* aOutput, 1.272 + bool* aFinished) MOZ_OVERRIDE 1.273 + { 1.274 + MutexAutoLock lock(NodeMutex()); 1.275 + 1.276 + // If our node is dead, just output silence. 1.277 + if (!Node()) { 1.278 + aOutput->SetNull(WEBAUDIO_BLOCK_SIZE); 1.279 + return; 1.280 + } 1.281 + 1.282 + // This node is not connected to anything. Per spec, we don't fire the 1.283 + // onaudioprocess event. We also want to clear out the input and output 1.284 + // buffer queue, and output a null buffer. 1.285 + if (!(aStream->ConsumerCount() || 1.286 + aStream->AsProcessedStream()->InputPortCount())) { 1.287 + aOutput->SetNull(WEBAUDIO_BLOCK_SIZE); 1.288 + mSharedBuffers->Reset(); 1.289 + mSeenNonSilenceInput = false; 1.290 + mInputWriteIndex = 0; 1.291 + return; 1.292 + } 1.293 + 1.294 + // First, record our input buffer 1.295 + for (uint32_t i = 0; i < mInputChannels.Length(); ++i) { 1.296 + if (aInput.IsNull()) { 1.297 + PodZero(mInputChannels[i] + mInputWriteIndex, 1.298 + aInput.GetDuration()); 1.299 + } else { 1.300 + mSeenNonSilenceInput = true; 1.301 + MOZ_ASSERT(aInput.GetDuration() == WEBAUDIO_BLOCK_SIZE, "sanity check"); 1.302 + MOZ_ASSERT(aInput.mChannelData.Length() == mInputChannels.Length()); 1.303 + AudioBlockCopyChannelWithScale(static_cast<const float*>(aInput.mChannelData[i]), 1.304 + aInput.mVolume, 1.305 + mInputChannels[i] + mInputWriteIndex); 1.306 + } 1.307 + } 1.308 + mInputWriteIndex += aInput.GetDuration(); 1.309 + 1.310 + // Now, see if we have data to output 1.311 + // Note that we need to do this before sending the buffer to the main 1.312 + // thread so that our delay time is updated. 1.313 + *aOutput = mSharedBuffers->GetOutputBuffer(); 1.314 + 1.315 + if (mInputWriteIndex >= mBufferSize) { 1.316 + SendBuffersToMainThread(aStream); 1.317 + mInputWriteIndex -= mBufferSize; 1.318 + mSeenNonSilenceInput = false; 1.319 + AllocateInputBlock(); 1.320 + } 1.321 + } 1.322 + 1.323 + virtual size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const MOZ_OVERRIDE 1.324 + { 1.325 + // Not owned: 1.326 + // - mSharedBuffers 1.327 + // - mSource (probably) 1.328 + // - mDestination (probably) 1.329 + size_t amount = AudioNodeEngine::SizeOfExcludingThis(aMallocSizeOf); 1.330 + amount += mInputChannels.SizeOfExcludingThis(aMallocSizeOf); 1.331 + for (size_t i = 0; i < mInputChannels.Length(); i++) { 1.332 + amount += mInputChannels[i].SizeOfExcludingThis(aMallocSizeOf); 1.333 + } 1.334 + 1.335 + return amount; 1.336 + } 1.337 + 1.338 + virtual size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const MOZ_OVERRIDE 1.339 + { 1.340 + return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf); 1.341 + } 1.342 + 1.343 +private: 1.344 + void AllocateInputBlock() 1.345 + { 1.346 + for (unsigned i = 0; i < mInputChannels.Length(); ++i) { 1.347 + if (!mInputChannels[i]) { 1.348 + mInputChannels[i] = new float[mBufferSize]; 1.349 + } 1.350 + } 1.351 + } 1.352 + 1.353 + void SendBuffersToMainThread(AudioNodeStream* aStream) 1.354 + { 1.355 + MOZ_ASSERT(!NS_IsMainThread()); 1.356 + 1.357 + // we now have a full input buffer ready to be sent to the main thread. 1.358 + TrackTicks playbackTick = mSource->GetCurrentPosition(); 1.359 + // Add the duration of the current sample 1.360 + playbackTick += WEBAUDIO_BLOCK_SIZE; 1.361 + // Add the delay caused by the main thread 1.362 + playbackTick += mSharedBuffers->DelaySoFar(); 1.363 + // Compute the playback time in the coordinate system of the destination 1.364 + // FIXME: bug 970773 1.365 + double playbackTime = 1.366 + mSource->DestinationTimeFromTicks(mDestination, playbackTick); 1.367 + 1.368 + class Command : public nsRunnable 1.369 + { 1.370 + public: 1.371 + Command(AudioNodeStream* aStream, 1.372 + InputChannels& aInputChannels, 1.373 + double aPlaybackTime, 1.374 + bool aNullInput) 1.375 + : mStream(aStream) 1.376 + , mPlaybackTime(aPlaybackTime) 1.377 + , mNullInput(aNullInput) 1.378 + { 1.379 + mInputChannels.SetLength(aInputChannels.Length()); 1.380 + if (!aNullInput) { 1.381 + for (uint32_t i = 0; i < mInputChannels.Length(); ++i) { 1.382 + mInputChannels[i] = aInputChannels[i].forget(); 1.383 + } 1.384 + } 1.385 + } 1.386 + 1.387 + NS_IMETHODIMP Run() 1.388 + { 1.389 + // If it's not safe to run scripts right now, schedule this to run later 1.390 + if (!nsContentUtils::IsSafeToRunScript()) { 1.391 + nsContentUtils::AddScriptRunner(this); 1.392 + return NS_OK; 1.393 + } 1.394 + 1.395 + nsRefPtr<ScriptProcessorNode> node; 1.396 + { 1.397 + // No need to keep holding the lock for the whole duration of this 1.398 + // function, since we're holding a strong reference to it, so if 1.399 + // we can obtain the reference, we will hold the node alive in 1.400 + // this function. 1.401 + MutexAutoLock lock(mStream->Engine()->NodeMutex()); 1.402 + node = static_cast<ScriptProcessorNode*>(mStream->Engine()->Node()); 1.403 + } 1.404 + if (!node || !node->Context()) { 1.405 + return NS_OK; 1.406 + } 1.407 + 1.408 + AutoPushJSContext cx(node->Context()->GetJSContext()); 1.409 + if (cx) { 1.410 + 1.411 + 1.412 + // Create the input buffer 1.413 + nsRefPtr<AudioBuffer> inputBuffer; 1.414 + if (!mNullInput) { 1.415 + ErrorResult rv; 1.416 + inputBuffer = 1.417 + AudioBuffer::Create(node->Context(), mInputChannels.Length(), 1.418 + node->BufferSize(), 1.419 + node->Context()->SampleRate(), cx, rv); 1.420 + if (rv.Failed()) { 1.421 + return NS_OK; 1.422 + } 1.423 + // Put the channel data inside it 1.424 + for (uint32_t i = 0; i < mInputChannels.Length(); ++i) { 1.425 + inputBuffer->SetRawChannelContents(cx, i, mInputChannels[i]); 1.426 + } 1.427 + } 1.428 + 1.429 + // Ask content to produce data in the output buffer 1.430 + // Note that we always avoid creating the output buffer here, and we try to 1.431 + // avoid creating the input buffer as well. The AudioProcessingEvent class 1.432 + // knows how to lazily create them if needed once the script tries to access 1.433 + // them. Otherwise, we may be able to get away without creating them! 1.434 + nsRefPtr<AudioProcessingEvent> event = new AudioProcessingEvent(node, nullptr, nullptr); 1.435 + event->InitEvent(inputBuffer, 1.436 + mInputChannels.Length(), 1.437 + mPlaybackTime); 1.438 + node->DispatchTrustedEvent(event); 1.439 + 1.440 + // Steal the output buffers if they have been set. Don't create a 1.441 + // buffer if it hasn't been used to return output; 1.442 + // FinishProducingOutputBuffer() will optimize output = null. 1.443 + // GetThreadSharedChannelsForRate() may also return null after OOM. 1.444 + nsRefPtr<ThreadSharedFloatArrayBufferList> output; 1.445 + if (event->HasOutputBuffer()) { 1.446 + ErrorResult rv; 1.447 + AudioBuffer* buffer = event->GetOutputBuffer(rv); 1.448 + // HasOutputBuffer() returning true means that GetOutputBuffer() 1.449 + // will not fail. 1.450 + MOZ_ASSERT(!rv.Failed()); 1.451 + output = buffer->GetThreadSharedChannelsForRate(cx); 1.452 + } 1.453 + 1.454 + // Append it to our output buffer queue 1.455 + node->GetSharedBuffers()->FinishProducingOutputBuffer(output, node->BufferSize()); 1.456 + } 1.457 + return NS_OK; 1.458 + } 1.459 + private: 1.460 + nsRefPtr<AudioNodeStream> mStream; 1.461 + InputChannels mInputChannels; 1.462 + double mPlaybackTime; 1.463 + bool mNullInput; 1.464 + }; 1.465 + 1.466 + NS_DispatchToMainThread(new Command(aStream, mInputChannels, 1.467 + playbackTime, 1.468 + !mSeenNonSilenceInput)); 1.469 + } 1.470 + 1.471 + friend class ScriptProcessorNode; 1.472 + 1.473 + SharedBuffers* mSharedBuffers; 1.474 + AudioNodeStream* mSource; 1.475 + AudioNodeStream* mDestination; 1.476 + InputChannels mInputChannels; 1.477 + const uint32_t mBufferSize; 1.478 + // The write index into the current input buffer 1.479 + uint32_t mInputWriteIndex; 1.480 + bool mSeenNonSilenceInput; 1.481 +}; 1.482 + 1.483 +ScriptProcessorNode::ScriptProcessorNode(AudioContext* aContext, 1.484 + uint32_t aBufferSize, 1.485 + uint32_t aNumberOfInputChannels, 1.486 + uint32_t aNumberOfOutputChannels) 1.487 + : AudioNode(aContext, 1.488 + aNumberOfInputChannels, 1.489 + mozilla::dom::ChannelCountMode::Explicit, 1.490 + mozilla::dom::ChannelInterpretation::Speakers) 1.491 + , mSharedBuffers(new SharedBuffers(aContext->SampleRate())) 1.492 + , mBufferSize(aBufferSize ? 1.493 + aBufferSize : // respect what the web developer requested 1.494 + 4096) // choose our own buffer size -- 4KB for now 1.495 + , mNumberOfOutputChannels(aNumberOfOutputChannels) 1.496 +{ 1.497 + MOZ_ASSERT(BufferSize() % WEBAUDIO_BLOCK_SIZE == 0, "Invalid buffer size"); 1.498 + ScriptProcessorNodeEngine* engine = 1.499 + new ScriptProcessorNodeEngine(this, 1.500 + aContext->Destination(), 1.501 + BufferSize(), 1.502 + aNumberOfInputChannels); 1.503 + mStream = aContext->Graph()->CreateAudioNodeStream(engine, MediaStreamGraph::INTERNAL_STREAM); 1.504 + engine->SetSourceStream(static_cast<AudioNodeStream*> (mStream.get())); 1.505 +} 1.506 + 1.507 +ScriptProcessorNode::~ScriptProcessorNode() 1.508 +{ 1.509 +} 1.510 + 1.511 +size_t 1.512 +ScriptProcessorNode::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const 1.513 +{ 1.514 + size_t amount = AudioNode::SizeOfExcludingThis(aMallocSizeOf); 1.515 + amount += mSharedBuffers->SizeOfIncludingThis(aMallocSizeOf); 1.516 + return amount; 1.517 +} 1.518 + 1.519 +size_t 1.520 +ScriptProcessorNode::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const 1.521 +{ 1.522 + return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf); 1.523 +} 1.524 + 1.525 +JSObject* 1.526 +ScriptProcessorNode::WrapObject(JSContext* aCx) 1.527 +{ 1.528 + return ScriptProcessorNodeBinding::Wrap(aCx, this); 1.529 +} 1.530 + 1.531 +} 1.532 +} 1.533 +