content/media/webaudio/ScriptProcessorNode.cpp

Tue, 06 Jan 2015 21:39:09 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Tue, 06 Jan 2015 21:39:09 +0100
branch
TOR_BUG_9701
changeset 8
97036ab72558
permissions
-rw-r--r--

Conditionally force memory storage according to privacy.thirdparty.isolate;
This solves Tor bug #9701, complying with disk avoidance documented in
https://www.torproject.org/projects/torbrowser/design/#disk-avoidance.

michael@0 1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
michael@0 2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
michael@0 3 /* This Source Code Form is subject to the terms of the Mozilla Public
michael@0 4 * License, v. 2.0. If a copy of the MPL was not distributed with this
michael@0 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
michael@0 6
michael@0 7 #include "ScriptProcessorNode.h"
michael@0 8 #include "mozilla/dom/ScriptProcessorNodeBinding.h"
michael@0 9 #include "AudioBuffer.h"
michael@0 10 #include "AudioDestinationNode.h"
michael@0 11 #include "AudioNodeEngine.h"
michael@0 12 #include "AudioNodeStream.h"
michael@0 13 #include "AudioProcessingEvent.h"
michael@0 14 #include "WebAudioUtils.h"
michael@0 15 #include "nsCxPusher.h"
michael@0 16 #include "mozilla/Mutex.h"
michael@0 17 #include "mozilla/PodOperations.h"
michael@0 18 #include <deque>
michael@0 19
michael@0 20 namespace mozilla {
michael@0 21 namespace dom {
michael@0 22
michael@0 23 // The maximum latency, in seconds, that we can live with before dropping
michael@0 24 // buffers.
michael@0 25 static const float MAX_LATENCY_S = 0.5;
michael@0 26
michael@0 27 NS_IMPL_ISUPPORTS_INHERITED0(ScriptProcessorNode, AudioNode)
michael@0 28
michael@0 29 // This class manages a queue of output buffers shared between
michael@0 30 // the main thread and the Media Stream Graph thread.
michael@0 31 class SharedBuffers
michael@0 32 {
michael@0 33 private:
michael@0 34 class OutputQueue
michael@0 35 {
michael@0 36 public:
michael@0 37 explicit OutputQueue(const char* aName)
michael@0 38 : mMutex(aName)
michael@0 39 {}
michael@0 40
michael@0 41 size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
michael@0 42 {
michael@0 43 mMutex.AssertCurrentThreadOwns();
michael@0 44
michael@0 45 size_t amount = 0;
michael@0 46 for (size_t i = 0; i < mBufferList.size(); i++) {
michael@0 47 amount += mBufferList[i].SizeOfExcludingThis(aMallocSizeOf, false);
michael@0 48 }
michael@0 49
michael@0 50 return amount;
michael@0 51 }
michael@0 52
michael@0 53 Mutex& Lock() const { return const_cast<OutputQueue*>(this)->mMutex; }
michael@0 54
michael@0 55 size_t ReadyToConsume() const
michael@0 56 {
michael@0 57 mMutex.AssertCurrentThreadOwns();
michael@0 58 MOZ_ASSERT(!NS_IsMainThread());
michael@0 59 return mBufferList.size();
michael@0 60 }
michael@0 61
michael@0 62 // Produce one buffer
michael@0 63 AudioChunk& Produce()
michael@0 64 {
michael@0 65 mMutex.AssertCurrentThreadOwns();
michael@0 66 MOZ_ASSERT(NS_IsMainThread());
michael@0 67 mBufferList.push_back(AudioChunk());
michael@0 68 return mBufferList.back();
michael@0 69 }
michael@0 70
michael@0 71 // Consumes one buffer.
michael@0 72 AudioChunk Consume()
michael@0 73 {
michael@0 74 mMutex.AssertCurrentThreadOwns();
michael@0 75 MOZ_ASSERT(!NS_IsMainThread());
michael@0 76 MOZ_ASSERT(ReadyToConsume() > 0);
michael@0 77 AudioChunk front = mBufferList.front();
michael@0 78 mBufferList.pop_front();
michael@0 79 return front;
michael@0 80 }
michael@0 81
michael@0 82 // Empties the buffer queue.
michael@0 83 void Clear()
michael@0 84 {
michael@0 85 mMutex.AssertCurrentThreadOwns();
michael@0 86 mBufferList.clear();
michael@0 87 }
michael@0 88
michael@0 89 private:
michael@0 90 typedef std::deque<AudioChunk> BufferList;
michael@0 91
michael@0 92 // Synchronizes access to mBufferList. Note that it's the responsibility
michael@0 93 // of the callers to perform the required locking, and we assert that every
michael@0 94 // time we access mBufferList.
michael@0 95 Mutex mMutex;
michael@0 96 // The list representing the queue.
michael@0 97 BufferList mBufferList;
michael@0 98 };
michael@0 99
michael@0 100 public:
michael@0 101 SharedBuffers(float aSampleRate)
michael@0 102 : mOutputQueue("SharedBuffers::outputQueue")
michael@0 103 , mDelaySoFar(TRACK_TICKS_MAX)
michael@0 104 , mSampleRate(aSampleRate)
michael@0 105 , mLatency(0.0)
michael@0 106 , mDroppingBuffers(false)
michael@0 107 {
michael@0 108 }
michael@0 109
michael@0 110 size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
michael@0 111 {
michael@0 112 size_t amount = aMallocSizeOf(this);
michael@0 113
michael@0 114 {
michael@0 115 MutexAutoLock lock(mOutputQueue.Lock());
michael@0 116 amount += mOutputQueue.SizeOfExcludingThis(aMallocSizeOf);
michael@0 117 }
michael@0 118
michael@0 119 return amount;
michael@0 120 }
michael@0 121
michael@0 122 // main thread
michael@0 123 void FinishProducingOutputBuffer(ThreadSharedFloatArrayBufferList* aBuffer,
michael@0 124 uint32_t aBufferSize)
michael@0 125 {
michael@0 126 MOZ_ASSERT(NS_IsMainThread());
michael@0 127
michael@0 128 TimeStamp now = TimeStamp::Now();
michael@0 129
michael@0 130 if (mLastEventTime.IsNull()) {
michael@0 131 mLastEventTime = now;
michael@0 132 } else {
michael@0 133 // When the main thread is blocked, and all the event are processed in a
michael@0 134 // burst after the main thread unblocks, the |(now - mLastEventTime)|
michael@0 135 // interval will be very short. |latency - bufferDuration| will be
michael@0 136 // negative, effectively moving back mLatency to a smaller and smaller
michael@0 137 // value, until it crosses zero, at which point we stop dropping buffers
michael@0 138 // and resume normal operation. This does not work if at the same time,
michael@0 139 // the MSG thread was also slowed down, so if the latency on the MSG
michael@0 140 // thread is normal, and we are still dropping buffers, and mLatency is
michael@0 141 // still more than twice the duration of a buffer, we reset it and stop
michael@0 142 // dropping buffers.
michael@0 143 float latency = (now - mLastEventTime).ToSeconds();
michael@0 144 float bufferDuration = aBufferSize / mSampleRate;
michael@0 145 mLatency += latency - bufferDuration;
michael@0 146 mLastEventTime = now;
michael@0 147 if (mLatency > MAX_LATENCY_S ||
michael@0 148 (mDroppingBuffers && mLatency > 0.0 &&
michael@0 149 fabs(latency - bufferDuration) < bufferDuration)) {
michael@0 150 mDroppingBuffers = true;
michael@0 151 return;
michael@0 152 } else {
michael@0 153 if (mDroppingBuffers) {
michael@0 154 mLatency = 0;
michael@0 155 }
michael@0 156 mDroppingBuffers = false;
michael@0 157 }
michael@0 158 }
michael@0 159
michael@0 160 MutexAutoLock lock(mOutputQueue.Lock());
michael@0 161 for (uint32_t offset = 0; offset < aBufferSize; offset += WEBAUDIO_BLOCK_SIZE) {
michael@0 162 AudioChunk& chunk = mOutputQueue.Produce();
michael@0 163 if (aBuffer) {
michael@0 164 chunk.mDuration = WEBAUDIO_BLOCK_SIZE;
michael@0 165 chunk.mBuffer = aBuffer;
michael@0 166 chunk.mChannelData.SetLength(aBuffer->GetChannels());
michael@0 167 for (uint32_t i = 0; i < aBuffer->GetChannels(); ++i) {
michael@0 168 chunk.mChannelData[i] = aBuffer->GetData(i) + offset;
michael@0 169 }
michael@0 170 chunk.mVolume = 1.0f;
michael@0 171 chunk.mBufferFormat = AUDIO_FORMAT_FLOAT32;
michael@0 172 } else {
michael@0 173 chunk.SetNull(WEBAUDIO_BLOCK_SIZE);
michael@0 174 }
michael@0 175 }
michael@0 176 }
michael@0 177
michael@0 178 // graph thread
michael@0 179 AudioChunk GetOutputBuffer()
michael@0 180 {
michael@0 181 MOZ_ASSERT(!NS_IsMainThread());
michael@0 182 AudioChunk buffer;
michael@0 183
michael@0 184 {
michael@0 185 MutexAutoLock lock(mOutputQueue.Lock());
michael@0 186 if (mOutputQueue.ReadyToConsume() > 0) {
michael@0 187 if (mDelaySoFar == TRACK_TICKS_MAX) {
michael@0 188 mDelaySoFar = 0;
michael@0 189 }
michael@0 190 buffer = mOutputQueue.Consume();
michael@0 191 } else {
michael@0 192 // If we're out of buffers to consume, just output silence
michael@0 193 buffer.SetNull(WEBAUDIO_BLOCK_SIZE);
michael@0 194 if (mDelaySoFar != TRACK_TICKS_MAX) {
michael@0 195 // Remember the delay that we just hit
michael@0 196 mDelaySoFar += WEBAUDIO_BLOCK_SIZE;
michael@0 197 }
michael@0 198 }
michael@0 199 }
michael@0 200
michael@0 201 return buffer;
michael@0 202 }
michael@0 203
michael@0 204 TrackTicks DelaySoFar() const
michael@0 205 {
michael@0 206 MOZ_ASSERT(!NS_IsMainThread());
michael@0 207 return mDelaySoFar == TRACK_TICKS_MAX ? 0 : mDelaySoFar;
michael@0 208 }
michael@0 209
michael@0 210 void Reset()
michael@0 211 {
michael@0 212 MOZ_ASSERT(!NS_IsMainThread());
michael@0 213 mDelaySoFar = TRACK_TICKS_MAX;
michael@0 214 mLatency = 0.0f;
michael@0 215 {
michael@0 216 MutexAutoLock lock(mOutputQueue.Lock());
michael@0 217 mOutputQueue.Clear();
michael@0 218 }
michael@0 219 mLastEventTime = TimeStamp();
michael@0 220 }
michael@0 221
michael@0 222 private:
michael@0 223 OutputQueue mOutputQueue;
michael@0 224 // How much delay we've seen so far. This measures the amount of delay
michael@0 225 // caused by the main thread lagging behind in producing output buffers.
michael@0 226 // TRACK_TICKS_MAX means that we have not received our first buffer yet.
michael@0 227 TrackTicks mDelaySoFar;
michael@0 228 // The samplerate of the context.
michael@0 229 float mSampleRate;
michael@0 230 // This is the latency caused by the buffering. If this grows too high, we
michael@0 231 // will drop buffers until it is acceptable.
michael@0 232 float mLatency;
michael@0 233 // This is the time at which we last produced a buffer, to detect if the main
michael@0 234 // thread has been blocked.
michael@0 235 TimeStamp mLastEventTime;
michael@0 236 // True if we should be dropping buffers.
michael@0 237 bool mDroppingBuffers;
michael@0 238 };
michael@0 239
michael@0 240 class ScriptProcessorNodeEngine : public AudioNodeEngine
michael@0 241 {
michael@0 242 public:
michael@0 243 typedef nsAutoTArray<nsAutoArrayPtr<float>, 2> InputChannels;
michael@0 244
michael@0 245 ScriptProcessorNodeEngine(ScriptProcessorNode* aNode,
michael@0 246 AudioDestinationNode* aDestination,
michael@0 247 uint32_t aBufferSize,
michael@0 248 uint32_t aNumberOfInputChannels)
michael@0 249 : AudioNodeEngine(aNode)
michael@0 250 , mSharedBuffers(aNode->GetSharedBuffers())
michael@0 251 , mSource(nullptr)
michael@0 252 , mDestination(static_cast<AudioNodeStream*> (aDestination->Stream()))
michael@0 253 , mBufferSize(aBufferSize)
michael@0 254 , mInputWriteIndex(0)
michael@0 255 , mSeenNonSilenceInput(false)
michael@0 256 {
michael@0 257 mInputChannels.SetLength(aNumberOfInputChannels);
michael@0 258 AllocateInputBlock();
michael@0 259 }
michael@0 260
michael@0 261 void SetSourceStream(AudioNodeStream* aSource)
michael@0 262 {
michael@0 263 mSource = aSource;
michael@0 264 }
michael@0 265
michael@0 266 virtual void ProcessBlock(AudioNodeStream* aStream,
michael@0 267 const AudioChunk& aInput,
michael@0 268 AudioChunk* aOutput,
michael@0 269 bool* aFinished) MOZ_OVERRIDE
michael@0 270 {
michael@0 271 MutexAutoLock lock(NodeMutex());
michael@0 272
michael@0 273 // If our node is dead, just output silence.
michael@0 274 if (!Node()) {
michael@0 275 aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
michael@0 276 return;
michael@0 277 }
michael@0 278
michael@0 279 // This node is not connected to anything. Per spec, we don't fire the
michael@0 280 // onaudioprocess event. We also want to clear out the input and output
michael@0 281 // buffer queue, and output a null buffer.
michael@0 282 if (!(aStream->ConsumerCount() ||
michael@0 283 aStream->AsProcessedStream()->InputPortCount())) {
michael@0 284 aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
michael@0 285 mSharedBuffers->Reset();
michael@0 286 mSeenNonSilenceInput = false;
michael@0 287 mInputWriteIndex = 0;
michael@0 288 return;
michael@0 289 }
michael@0 290
michael@0 291 // First, record our input buffer
michael@0 292 for (uint32_t i = 0; i < mInputChannels.Length(); ++i) {
michael@0 293 if (aInput.IsNull()) {
michael@0 294 PodZero(mInputChannels[i] + mInputWriteIndex,
michael@0 295 aInput.GetDuration());
michael@0 296 } else {
michael@0 297 mSeenNonSilenceInput = true;
michael@0 298 MOZ_ASSERT(aInput.GetDuration() == WEBAUDIO_BLOCK_SIZE, "sanity check");
michael@0 299 MOZ_ASSERT(aInput.mChannelData.Length() == mInputChannels.Length());
michael@0 300 AudioBlockCopyChannelWithScale(static_cast<const float*>(aInput.mChannelData[i]),
michael@0 301 aInput.mVolume,
michael@0 302 mInputChannels[i] + mInputWriteIndex);
michael@0 303 }
michael@0 304 }
michael@0 305 mInputWriteIndex += aInput.GetDuration();
michael@0 306
michael@0 307 // Now, see if we have data to output
michael@0 308 // Note that we need to do this before sending the buffer to the main
michael@0 309 // thread so that our delay time is updated.
michael@0 310 *aOutput = mSharedBuffers->GetOutputBuffer();
michael@0 311
michael@0 312 if (mInputWriteIndex >= mBufferSize) {
michael@0 313 SendBuffersToMainThread(aStream);
michael@0 314 mInputWriteIndex -= mBufferSize;
michael@0 315 mSeenNonSilenceInput = false;
michael@0 316 AllocateInputBlock();
michael@0 317 }
michael@0 318 }
michael@0 319
michael@0 320 virtual size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const MOZ_OVERRIDE
michael@0 321 {
michael@0 322 // Not owned:
michael@0 323 // - mSharedBuffers
michael@0 324 // - mSource (probably)
michael@0 325 // - mDestination (probably)
michael@0 326 size_t amount = AudioNodeEngine::SizeOfExcludingThis(aMallocSizeOf);
michael@0 327 amount += mInputChannels.SizeOfExcludingThis(aMallocSizeOf);
michael@0 328 for (size_t i = 0; i < mInputChannels.Length(); i++) {
michael@0 329 amount += mInputChannels[i].SizeOfExcludingThis(aMallocSizeOf);
michael@0 330 }
michael@0 331
michael@0 332 return amount;
michael@0 333 }
michael@0 334
michael@0 335 virtual size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const MOZ_OVERRIDE
michael@0 336 {
michael@0 337 return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
michael@0 338 }
michael@0 339
michael@0 340 private:
michael@0 341 void AllocateInputBlock()
michael@0 342 {
michael@0 343 for (unsigned i = 0; i < mInputChannels.Length(); ++i) {
michael@0 344 if (!mInputChannels[i]) {
michael@0 345 mInputChannels[i] = new float[mBufferSize];
michael@0 346 }
michael@0 347 }
michael@0 348 }
michael@0 349
michael@0 350 void SendBuffersToMainThread(AudioNodeStream* aStream)
michael@0 351 {
michael@0 352 MOZ_ASSERT(!NS_IsMainThread());
michael@0 353
michael@0 354 // we now have a full input buffer ready to be sent to the main thread.
michael@0 355 TrackTicks playbackTick = mSource->GetCurrentPosition();
michael@0 356 // Add the duration of the current sample
michael@0 357 playbackTick += WEBAUDIO_BLOCK_SIZE;
michael@0 358 // Add the delay caused by the main thread
michael@0 359 playbackTick += mSharedBuffers->DelaySoFar();
michael@0 360 // Compute the playback time in the coordinate system of the destination
michael@0 361 // FIXME: bug 970773
michael@0 362 double playbackTime =
michael@0 363 mSource->DestinationTimeFromTicks(mDestination, playbackTick);
michael@0 364
michael@0 365 class Command : public nsRunnable
michael@0 366 {
michael@0 367 public:
michael@0 368 Command(AudioNodeStream* aStream,
michael@0 369 InputChannels& aInputChannels,
michael@0 370 double aPlaybackTime,
michael@0 371 bool aNullInput)
michael@0 372 : mStream(aStream)
michael@0 373 , mPlaybackTime(aPlaybackTime)
michael@0 374 , mNullInput(aNullInput)
michael@0 375 {
michael@0 376 mInputChannels.SetLength(aInputChannels.Length());
michael@0 377 if (!aNullInput) {
michael@0 378 for (uint32_t i = 0; i < mInputChannels.Length(); ++i) {
michael@0 379 mInputChannels[i] = aInputChannels[i].forget();
michael@0 380 }
michael@0 381 }
michael@0 382 }
michael@0 383
michael@0 384 NS_IMETHODIMP Run()
michael@0 385 {
michael@0 386 // If it's not safe to run scripts right now, schedule this to run later
michael@0 387 if (!nsContentUtils::IsSafeToRunScript()) {
michael@0 388 nsContentUtils::AddScriptRunner(this);
michael@0 389 return NS_OK;
michael@0 390 }
michael@0 391
michael@0 392 nsRefPtr<ScriptProcessorNode> node;
michael@0 393 {
michael@0 394 // No need to keep holding the lock for the whole duration of this
michael@0 395 // function, since we're holding a strong reference to it, so if
michael@0 396 // we can obtain the reference, we will hold the node alive in
michael@0 397 // this function.
michael@0 398 MutexAutoLock lock(mStream->Engine()->NodeMutex());
michael@0 399 node = static_cast<ScriptProcessorNode*>(mStream->Engine()->Node());
michael@0 400 }
michael@0 401 if (!node || !node->Context()) {
michael@0 402 return NS_OK;
michael@0 403 }
michael@0 404
michael@0 405 AutoPushJSContext cx(node->Context()->GetJSContext());
michael@0 406 if (cx) {
michael@0 407
michael@0 408
michael@0 409 // Create the input buffer
michael@0 410 nsRefPtr<AudioBuffer> inputBuffer;
michael@0 411 if (!mNullInput) {
michael@0 412 ErrorResult rv;
michael@0 413 inputBuffer =
michael@0 414 AudioBuffer::Create(node->Context(), mInputChannels.Length(),
michael@0 415 node->BufferSize(),
michael@0 416 node->Context()->SampleRate(), cx, rv);
michael@0 417 if (rv.Failed()) {
michael@0 418 return NS_OK;
michael@0 419 }
michael@0 420 // Put the channel data inside it
michael@0 421 for (uint32_t i = 0; i < mInputChannels.Length(); ++i) {
michael@0 422 inputBuffer->SetRawChannelContents(cx, i, mInputChannels[i]);
michael@0 423 }
michael@0 424 }
michael@0 425
michael@0 426 // Ask content to produce data in the output buffer
michael@0 427 // Note that we always avoid creating the output buffer here, and we try to
michael@0 428 // avoid creating the input buffer as well. The AudioProcessingEvent class
michael@0 429 // knows how to lazily create them if needed once the script tries to access
michael@0 430 // them. Otherwise, we may be able to get away without creating them!
michael@0 431 nsRefPtr<AudioProcessingEvent> event = new AudioProcessingEvent(node, nullptr, nullptr);
michael@0 432 event->InitEvent(inputBuffer,
michael@0 433 mInputChannels.Length(),
michael@0 434 mPlaybackTime);
michael@0 435 node->DispatchTrustedEvent(event);
michael@0 436
michael@0 437 // Steal the output buffers if they have been set. Don't create a
michael@0 438 // buffer if it hasn't been used to return output;
michael@0 439 // FinishProducingOutputBuffer() will optimize output = null.
michael@0 440 // GetThreadSharedChannelsForRate() may also return null after OOM.
michael@0 441 nsRefPtr<ThreadSharedFloatArrayBufferList> output;
michael@0 442 if (event->HasOutputBuffer()) {
michael@0 443 ErrorResult rv;
michael@0 444 AudioBuffer* buffer = event->GetOutputBuffer(rv);
michael@0 445 // HasOutputBuffer() returning true means that GetOutputBuffer()
michael@0 446 // will not fail.
michael@0 447 MOZ_ASSERT(!rv.Failed());
michael@0 448 output = buffer->GetThreadSharedChannelsForRate(cx);
michael@0 449 }
michael@0 450
michael@0 451 // Append it to our output buffer queue
michael@0 452 node->GetSharedBuffers()->FinishProducingOutputBuffer(output, node->BufferSize());
michael@0 453 }
michael@0 454 return NS_OK;
michael@0 455 }
michael@0 456 private:
michael@0 457 nsRefPtr<AudioNodeStream> mStream;
michael@0 458 InputChannels mInputChannels;
michael@0 459 double mPlaybackTime;
michael@0 460 bool mNullInput;
michael@0 461 };
michael@0 462
michael@0 463 NS_DispatchToMainThread(new Command(aStream, mInputChannels,
michael@0 464 playbackTime,
michael@0 465 !mSeenNonSilenceInput));
michael@0 466 }
michael@0 467
michael@0 468 friend class ScriptProcessorNode;
michael@0 469
michael@0 470 SharedBuffers* mSharedBuffers;
michael@0 471 AudioNodeStream* mSource;
michael@0 472 AudioNodeStream* mDestination;
michael@0 473 InputChannels mInputChannels;
michael@0 474 const uint32_t mBufferSize;
michael@0 475 // The write index into the current input buffer
michael@0 476 uint32_t mInputWriteIndex;
michael@0 477 bool mSeenNonSilenceInput;
michael@0 478 };
michael@0 479
michael@0 480 ScriptProcessorNode::ScriptProcessorNode(AudioContext* aContext,
michael@0 481 uint32_t aBufferSize,
michael@0 482 uint32_t aNumberOfInputChannels,
michael@0 483 uint32_t aNumberOfOutputChannels)
michael@0 484 : AudioNode(aContext,
michael@0 485 aNumberOfInputChannels,
michael@0 486 mozilla::dom::ChannelCountMode::Explicit,
michael@0 487 mozilla::dom::ChannelInterpretation::Speakers)
michael@0 488 , mSharedBuffers(new SharedBuffers(aContext->SampleRate()))
michael@0 489 , mBufferSize(aBufferSize ?
michael@0 490 aBufferSize : // respect what the web developer requested
michael@0 491 4096) // choose our own buffer size -- 4KB for now
michael@0 492 , mNumberOfOutputChannels(aNumberOfOutputChannels)
michael@0 493 {
michael@0 494 MOZ_ASSERT(BufferSize() % WEBAUDIO_BLOCK_SIZE == 0, "Invalid buffer size");
michael@0 495 ScriptProcessorNodeEngine* engine =
michael@0 496 new ScriptProcessorNodeEngine(this,
michael@0 497 aContext->Destination(),
michael@0 498 BufferSize(),
michael@0 499 aNumberOfInputChannels);
michael@0 500 mStream = aContext->Graph()->CreateAudioNodeStream(engine, MediaStreamGraph::INTERNAL_STREAM);
michael@0 501 engine->SetSourceStream(static_cast<AudioNodeStream*> (mStream.get()));
michael@0 502 }
michael@0 503
michael@0 504 ScriptProcessorNode::~ScriptProcessorNode()
michael@0 505 {
michael@0 506 }
michael@0 507
michael@0 508 size_t
michael@0 509 ScriptProcessorNode::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
michael@0 510 {
michael@0 511 size_t amount = AudioNode::SizeOfExcludingThis(aMallocSizeOf);
michael@0 512 amount += mSharedBuffers->SizeOfIncludingThis(aMallocSizeOf);
michael@0 513 return amount;
michael@0 514 }
michael@0 515
michael@0 516 size_t
michael@0 517 ScriptProcessorNode::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
michael@0 518 {
michael@0 519 return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
michael@0 520 }
michael@0 521
michael@0 522 JSObject*
michael@0 523 ScriptProcessorNode::WrapObject(JSContext* aCx)
michael@0 524 {
michael@0 525 return ScriptProcessorNodeBinding::Wrap(aCx, this);
michael@0 526 }
michael@0 527
michael@0 528 }
michael@0 529 }
michael@0 530

mercurial