Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
michael@0 | 1 | /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ |
michael@0 | 2 | /* vim:set ts=2 sw=2 sts=2 et cindent: */ |
michael@0 | 3 | /* This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this |
michael@0 | 5 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 6 | |
michael@0 | 7 | #include "AudioDestinationNode.h" |
michael@0 | 8 | #include "mozilla/dom/AudioDestinationNodeBinding.h" |
michael@0 | 9 | #include "mozilla/Preferences.h" |
michael@0 | 10 | #include "AudioChannelAgent.h" |
michael@0 | 11 | #include "AudioChannelService.h" |
michael@0 | 12 | #include "AudioNodeEngine.h" |
michael@0 | 13 | #include "AudioNodeStream.h" |
michael@0 | 14 | #include "MediaStreamGraph.h" |
michael@0 | 15 | #include "OfflineAudioCompletionEvent.h" |
michael@0 | 16 | #include "nsIInterfaceRequestorUtils.h" |
michael@0 | 17 | #include "nsIDocShell.h" |
michael@0 | 18 | #include "nsIPermissionManager.h" |
michael@0 | 19 | #include "nsIScriptObjectPrincipal.h" |
michael@0 | 20 | #include "nsServiceManagerUtils.h" |
michael@0 | 21 | #include "nsIAppShell.h" |
michael@0 | 22 | #include "nsWidgetsCID.h" |
michael@0 | 23 | |
michael@0 | 24 | namespace mozilla { |
michael@0 | 25 | namespace dom { |
michael@0 | 26 | |
michael@0 | 27 | static uint8_t gWebAudioOutputKey; |
michael@0 | 28 | |
michael@0 | 29 | class OfflineDestinationNodeEngine : public AudioNodeEngine |
michael@0 | 30 | { |
michael@0 | 31 | public: |
michael@0 | 32 | typedef AutoFallibleTArray<nsAutoArrayPtr<float>, 2> InputChannels; |
michael@0 | 33 | |
michael@0 | 34 | OfflineDestinationNodeEngine(AudioDestinationNode* aNode, |
michael@0 | 35 | uint32_t aNumberOfChannels, |
michael@0 | 36 | uint32_t aLength, |
michael@0 | 37 | float aSampleRate) |
michael@0 | 38 | : AudioNodeEngine(aNode) |
michael@0 | 39 | , mWriteIndex(0) |
michael@0 | 40 | , mLength(aLength) |
michael@0 | 41 | , mSampleRate(aSampleRate) |
michael@0 | 42 | { |
michael@0 | 43 | // These allocations might fail if content provides a huge number of |
michael@0 | 44 | // channels or size, but it's OK since we'll deal with the failure |
michael@0 | 45 | // gracefully. |
michael@0 | 46 | if (mInputChannels.SetLength(aNumberOfChannels)) { |
michael@0 | 47 | static const fallible_t fallible = fallible_t(); |
michael@0 | 48 | for (uint32_t i = 0; i < aNumberOfChannels; ++i) { |
michael@0 | 49 | mInputChannels[i] = new(fallible) float[aLength]; |
michael@0 | 50 | if (!mInputChannels[i]) { |
michael@0 | 51 | mInputChannels.Clear(); |
michael@0 | 52 | break; |
michael@0 | 53 | } |
michael@0 | 54 | } |
michael@0 | 55 | } |
michael@0 | 56 | } |
michael@0 | 57 | |
michael@0 | 58 | virtual void ProcessBlock(AudioNodeStream* aStream, |
michael@0 | 59 | const AudioChunk& aInput, |
michael@0 | 60 | AudioChunk* aOutput, |
michael@0 | 61 | bool* aFinished) MOZ_OVERRIDE |
michael@0 | 62 | { |
michael@0 | 63 | // Do this just for the sake of political correctness; this output |
michael@0 | 64 | // will not go anywhere. |
michael@0 | 65 | *aOutput = aInput; |
michael@0 | 66 | |
michael@0 | 67 | // Handle the case of allocation failure in the input buffer |
michael@0 | 68 | if (mInputChannels.IsEmpty()) { |
michael@0 | 69 | return; |
michael@0 | 70 | } |
michael@0 | 71 | |
michael@0 | 72 | if (mWriteIndex >= mLength) { |
michael@0 | 73 | NS_ASSERTION(mWriteIndex == mLength, "Overshot length"); |
michael@0 | 74 | // Don't record any more. |
michael@0 | 75 | return; |
michael@0 | 76 | } |
michael@0 | 77 | |
michael@0 | 78 | // Record our input buffer |
michael@0 | 79 | MOZ_ASSERT(mWriteIndex < mLength, "How did this happen?"); |
michael@0 | 80 | const uint32_t duration = std::min(WEBAUDIO_BLOCK_SIZE, mLength - mWriteIndex); |
michael@0 | 81 | const uint32_t commonChannelCount = std::min(mInputChannels.Length(), |
michael@0 | 82 | aInput.mChannelData.Length()); |
michael@0 | 83 | // First, copy as many channels in the input as we have |
michael@0 | 84 | for (uint32_t i = 0; i < commonChannelCount; ++i) { |
michael@0 | 85 | if (aInput.IsNull()) { |
michael@0 | 86 | PodZero(mInputChannels[i] + mWriteIndex, duration); |
michael@0 | 87 | } else { |
michael@0 | 88 | const float* inputBuffer = static_cast<const float*>(aInput.mChannelData[i]); |
michael@0 | 89 | if (duration == WEBAUDIO_BLOCK_SIZE) { |
michael@0 | 90 | // Use the optimized version of the copy with scale operation |
michael@0 | 91 | AudioBlockCopyChannelWithScale(inputBuffer, aInput.mVolume, |
michael@0 | 92 | mInputChannels[i] + mWriteIndex); |
michael@0 | 93 | } else { |
michael@0 | 94 | if (aInput.mVolume == 1.0f) { |
michael@0 | 95 | PodCopy(mInputChannels[i] + mWriteIndex, inputBuffer, duration); |
michael@0 | 96 | } else { |
michael@0 | 97 | for (uint32_t j = 0; j < duration; ++j) { |
michael@0 | 98 | mInputChannels[i][mWriteIndex + j] = aInput.mVolume * inputBuffer[j]; |
michael@0 | 99 | } |
michael@0 | 100 | } |
michael@0 | 101 | } |
michael@0 | 102 | } |
michael@0 | 103 | } |
michael@0 | 104 | // Then, silence all of the remaining channels |
michael@0 | 105 | for (uint32_t i = commonChannelCount; i < mInputChannels.Length(); ++i) { |
michael@0 | 106 | PodZero(mInputChannels[i] + mWriteIndex, duration); |
michael@0 | 107 | } |
michael@0 | 108 | mWriteIndex += duration; |
michael@0 | 109 | |
michael@0 | 110 | if (mWriteIndex >= mLength) { |
michael@0 | 111 | NS_ASSERTION(mWriteIndex == mLength, "Overshot length"); |
michael@0 | 112 | // Go to finished state. When the graph's current time eventually reaches |
michael@0 | 113 | // the end of the stream, then the main thread will be notified and we'll |
michael@0 | 114 | // shut down the AudioContext. |
michael@0 | 115 | *aFinished = true; |
michael@0 | 116 | } |
michael@0 | 117 | } |
michael@0 | 118 | |
michael@0 | 119 | void FireOfflineCompletionEvent(AudioDestinationNode* aNode) |
michael@0 | 120 | { |
michael@0 | 121 | AudioContext* context = aNode->Context(); |
michael@0 | 122 | context->Shutdown(); |
michael@0 | 123 | // Shutdown drops self reference, but the context is still referenced by aNode, |
michael@0 | 124 | // which is strongly referenced by the runnable that called |
michael@0 | 125 | // AudioDestinationNode::FireOfflineCompletionEvent. |
michael@0 | 126 | |
michael@0 | 127 | AutoPushJSContext cx(context->GetJSContext()); |
michael@0 | 128 | if (!cx) { |
michael@0 | 129 | |
michael@0 | 130 | return; |
michael@0 | 131 | } |
michael@0 | 132 | JSAutoRequest ar(cx); |
michael@0 | 133 | |
michael@0 | 134 | // Create the input buffer |
michael@0 | 135 | ErrorResult rv; |
michael@0 | 136 | nsRefPtr<AudioBuffer> renderedBuffer = |
michael@0 | 137 | AudioBuffer::Create(context, mInputChannels.Length(), |
michael@0 | 138 | mLength, mSampleRate, cx, rv); |
michael@0 | 139 | if (rv.Failed()) { |
michael@0 | 140 | return; |
michael@0 | 141 | } |
michael@0 | 142 | for (uint32_t i = 0; i < mInputChannels.Length(); ++i) { |
michael@0 | 143 | renderedBuffer->SetRawChannelContents(cx, i, mInputChannels[i]); |
michael@0 | 144 | } |
michael@0 | 145 | |
michael@0 | 146 | nsRefPtr<OfflineAudioCompletionEvent> event = |
michael@0 | 147 | new OfflineAudioCompletionEvent(context, nullptr, nullptr); |
michael@0 | 148 | event->InitEvent(renderedBuffer); |
michael@0 | 149 | context->DispatchTrustedEvent(event); |
michael@0 | 150 | } |
michael@0 | 151 | |
michael@0 | 152 | virtual size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const MOZ_OVERRIDE |
michael@0 | 153 | { |
michael@0 | 154 | size_t amount = AudioNodeEngine::SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 155 | amount += mInputChannels.SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 156 | return amount; |
michael@0 | 157 | } |
michael@0 | 158 | |
michael@0 | 159 | virtual size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const MOZ_OVERRIDE |
michael@0 | 160 | { |
michael@0 | 161 | return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 162 | } |
michael@0 | 163 | |
michael@0 | 164 | private: |
michael@0 | 165 | // The input to the destination node is recorded in the mInputChannels buffer. |
michael@0 | 166 | // When this buffer fills up with mLength frames, the buffered input is sent |
michael@0 | 167 | // to the main thread in order to dispatch OfflineAudioCompletionEvent. |
michael@0 | 168 | InputChannels mInputChannels; |
michael@0 | 169 | // An index representing the next offset in mInputChannels to be written to. |
michael@0 | 170 | uint32_t mWriteIndex; |
michael@0 | 171 | // How many frames the OfflineAudioContext intends to produce. |
michael@0 | 172 | uint32_t mLength; |
michael@0 | 173 | float mSampleRate; |
michael@0 | 174 | }; |
michael@0 | 175 | |
michael@0 | 176 | class DestinationNodeEngine : public AudioNodeEngine |
michael@0 | 177 | { |
michael@0 | 178 | public: |
michael@0 | 179 | explicit DestinationNodeEngine(AudioDestinationNode* aNode) |
michael@0 | 180 | : AudioNodeEngine(aNode) |
michael@0 | 181 | , mVolume(1.0f) |
michael@0 | 182 | { |
michael@0 | 183 | } |
michael@0 | 184 | |
michael@0 | 185 | virtual void ProcessBlock(AudioNodeStream* aStream, |
michael@0 | 186 | const AudioChunk& aInput, |
michael@0 | 187 | AudioChunk* aOutput, |
michael@0 | 188 | bool* aFinished) MOZ_OVERRIDE |
michael@0 | 189 | { |
michael@0 | 190 | *aOutput = aInput; |
michael@0 | 191 | aOutput->mVolume *= mVolume; |
michael@0 | 192 | } |
michael@0 | 193 | |
michael@0 | 194 | virtual void SetDoubleParameter(uint32_t aIndex, double aParam) MOZ_OVERRIDE |
michael@0 | 195 | { |
michael@0 | 196 | if (aIndex == VOLUME) { |
michael@0 | 197 | mVolume = aParam; |
michael@0 | 198 | } |
michael@0 | 199 | } |
michael@0 | 200 | |
michael@0 | 201 | enum Parameters { |
michael@0 | 202 | VOLUME, |
michael@0 | 203 | }; |
michael@0 | 204 | |
michael@0 | 205 | virtual size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const MOZ_OVERRIDE |
michael@0 | 206 | { |
michael@0 | 207 | return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 208 | } |
michael@0 | 209 | |
michael@0 | 210 | private: |
michael@0 | 211 | float mVolume; |
michael@0 | 212 | }; |
michael@0 | 213 | |
michael@0 | 214 | static bool UseAudioChannelService() |
michael@0 | 215 | { |
michael@0 | 216 | return Preferences::GetBool("media.useAudioChannelService"); |
michael@0 | 217 | } |
michael@0 | 218 | |
michael@0 | 219 | NS_IMPL_CYCLE_COLLECTION_INHERITED(AudioDestinationNode, AudioNode, |
michael@0 | 220 | mAudioChannelAgent) |
michael@0 | 221 | |
michael@0 | 222 | NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(AudioDestinationNode) |
michael@0 | 223 | NS_INTERFACE_MAP_ENTRY(nsIDOMEventListener) |
michael@0 | 224 | NS_INTERFACE_MAP_ENTRY(nsIAudioChannelAgentCallback) |
michael@0 | 225 | NS_INTERFACE_MAP_ENTRY(nsISupportsWeakReference) |
michael@0 | 226 | NS_INTERFACE_MAP_END_INHERITING(AudioNode) |
michael@0 | 227 | |
michael@0 | 228 | NS_IMPL_ADDREF_INHERITED(AudioDestinationNode, AudioNode) |
michael@0 | 229 | NS_IMPL_RELEASE_INHERITED(AudioDestinationNode, AudioNode) |
michael@0 | 230 | |
michael@0 | 231 | AudioDestinationNode::AudioDestinationNode(AudioContext* aContext, |
michael@0 | 232 | bool aIsOffline, |
michael@0 | 233 | AudioChannel aChannel, |
michael@0 | 234 | uint32_t aNumberOfChannels, |
michael@0 | 235 | uint32_t aLength, |
michael@0 | 236 | float aSampleRate) |
michael@0 | 237 | : AudioNode(aContext, |
michael@0 | 238 | aIsOffline ? aNumberOfChannels : 2, |
michael@0 | 239 | ChannelCountMode::Explicit, |
michael@0 | 240 | ChannelInterpretation::Speakers) |
michael@0 | 241 | , mFramesToProduce(aLength) |
michael@0 | 242 | , mAudioChannel(AudioChannel::Normal) |
michael@0 | 243 | , mIsOffline(aIsOffline) |
michael@0 | 244 | , mHasFinished(false) |
michael@0 | 245 | , mExtraCurrentTime(0) |
michael@0 | 246 | , mExtraCurrentTimeSinceLastStartedBlocking(0) |
michael@0 | 247 | , mExtraCurrentTimeUpdatedSinceLastStableState(false) |
michael@0 | 248 | { |
michael@0 | 249 | MediaStreamGraph* graph = aIsOffline ? |
michael@0 | 250 | MediaStreamGraph::CreateNonRealtimeInstance(aSampleRate) : |
michael@0 | 251 | MediaStreamGraph::GetInstance(); |
michael@0 | 252 | AudioNodeEngine* engine = aIsOffline ? |
michael@0 | 253 | new OfflineDestinationNodeEngine(this, aNumberOfChannels, |
michael@0 | 254 | aLength, aSampleRate) : |
michael@0 | 255 | static_cast<AudioNodeEngine*>(new DestinationNodeEngine(this)); |
michael@0 | 256 | |
michael@0 | 257 | mStream = graph->CreateAudioNodeStream(engine, MediaStreamGraph::EXTERNAL_STREAM); |
michael@0 | 258 | mStream->SetAudioChannelType(aChannel); |
michael@0 | 259 | mStream->AddMainThreadListener(this); |
michael@0 | 260 | mStream->AddAudioOutput(&gWebAudioOutputKey); |
michael@0 | 261 | |
michael@0 | 262 | if (aChannel != AudioChannel::Normal) { |
michael@0 | 263 | ErrorResult rv; |
michael@0 | 264 | SetMozAudioChannelType(aChannel, rv); |
michael@0 | 265 | } |
michael@0 | 266 | |
michael@0 | 267 | if (!aIsOffline && UseAudioChannelService()) { |
michael@0 | 268 | nsCOMPtr<nsIDOMEventTarget> target = do_QueryInterface(GetOwner()); |
michael@0 | 269 | if (target) { |
michael@0 | 270 | target->AddSystemEventListener(NS_LITERAL_STRING("visibilitychange"), this, |
michael@0 | 271 | /* useCapture = */ true, |
michael@0 | 272 | /* wantsUntrusted = */ false); |
michael@0 | 273 | } |
michael@0 | 274 | |
michael@0 | 275 | CreateAudioChannelAgent(); |
michael@0 | 276 | } |
michael@0 | 277 | } |
michael@0 | 278 | |
michael@0 | 279 | size_t |
michael@0 | 280 | AudioDestinationNode::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const |
michael@0 | 281 | { |
michael@0 | 282 | size_t amount = AudioNode::SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 283 | // Might be useful in the future: |
michael@0 | 284 | // - mAudioChannelAgent |
michael@0 | 285 | return amount; |
michael@0 | 286 | } |
michael@0 | 287 | |
michael@0 | 288 | size_t |
michael@0 | 289 | AudioDestinationNode::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const |
michael@0 | 290 | { |
michael@0 | 291 | return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf); |
michael@0 | 292 | } |
michael@0 | 293 | |
michael@0 | 294 | void |
michael@0 | 295 | AudioDestinationNode::DestroyMediaStream() |
michael@0 | 296 | { |
michael@0 | 297 | if (mAudioChannelAgent && !Context()->IsOffline()) { |
michael@0 | 298 | mAudioChannelAgent->StopPlaying(); |
michael@0 | 299 | mAudioChannelAgent = nullptr; |
michael@0 | 300 | |
michael@0 | 301 | nsCOMPtr<nsIDOMEventTarget> target = do_QueryInterface(GetOwner()); |
michael@0 | 302 | NS_ENSURE_TRUE_VOID(target); |
michael@0 | 303 | |
michael@0 | 304 | target->RemoveSystemEventListener(NS_LITERAL_STRING("visibilitychange"), this, |
michael@0 | 305 | /* useCapture = */ true); |
michael@0 | 306 | } |
michael@0 | 307 | |
michael@0 | 308 | if (!mStream) |
michael@0 | 309 | return; |
michael@0 | 310 | |
michael@0 | 311 | mStream->RemoveMainThreadListener(this); |
michael@0 | 312 | MediaStreamGraph* graph = mStream->Graph(); |
michael@0 | 313 | if (graph->IsNonRealtime()) { |
michael@0 | 314 | MediaStreamGraph::DestroyNonRealtimeInstance(graph); |
michael@0 | 315 | } |
michael@0 | 316 | AudioNode::DestroyMediaStream(); |
michael@0 | 317 | } |
michael@0 | 318 | |
michael@0 | 319 | void |
michael@0 | 320 | AudioDestinationNode::NotifyMainThreadStateChanged() |
michael@0 | 321 | { |
michael@0 | 322 | if (mStream->IsFinished() && !mHasFinished) { |
michael@0 | 323 | mHasFinished = true; |
michael@0 | 324 | if (mIsOffline) { |
michael@0 | 325 | nsCOMPtr<nsIRunnable> runnable = |
michael@0 | 326 | NS_NewRunnableMethod(this, &AudioDestinationNode::FireOfflineCompletionEvent); |
michael@0 | 327 | NS_DispatchToCurrentThread(runnable); |
michael@0 | 328 | } |
michael@0 | 329 | } |
michael@0 | 330 | } |
michael@0 | 331 | |
michael@0 | 332 | void |
michael@0 | 333 | AudioDestinationNode::FireOfflineCompletionEvent() |
michael@0 | 334 | { |
michael@0 | 335 | AudioNodeStream* stream = static_cast<AudioNodeStream*>(Stream()); |
michael@0 | 336 | OfflineDestinationNodeEngine* engine = |
michael@0 | 337 | static_cast<OfflineDestinationNodeEngine*>(stream->Engine()); |
michael@0 | 338 | engine->FireOfflineCompletionEvent(this); |
michael@0 | 339 | } |
michael@0 | 340 | |
michael@0 | 341 | uint32_t |
michael@0 | 342 | AudioDestinationNode::MaxChannelCount() const |
michael@0 | 343 | { |
michael@0 | 344 | return Context()->MaxChannelCount(); |
michael@0 | 345 | } |
michael@0 | 346 | |
michael@0 | 347 | void |
michael@0 | 348 | AudioDestinationNode::SetChannelCount(uint32_t aChannelCount, ErrorResult& aRv) |
michael@0 | 349 | { |
michael@0 | 350 | if (aChannelCount > MaxChannelCount()) { |
michael@0 | 351 | aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR); |
michael@0 | 352 | return; |
michael@0 | 353 | } |
michael@0 | 354 | |
michael@0 | 355 | AudioNode::SetChannelCount(aChannelCount, aRv); |
michael@0 | 356 | } |
michael@0 | 357 | |
michael@0 | 358 | void |
michael@0 | 359 | AudioDestinationNode::Mute() |
michael@0 | 360 | { |
michael@0 | 361 | MOZ_ASSERT(Context() && !Context()->IsOffline()); |
michael@0 | 362 | SendDoubleParameterToStream(DestinationNodeEngine::VOLUME, 0.0f); |
michael@0 | 363 | } |
michael@0 | 364 | |
michael@0 | 365 | void |
michael@0 | 366 | AudioDestinationNode::Unmute() |
michael@0 | 367 | { |
michael@0 | 368 | MOZ_ASSERT(Context() && !Context()->IsOffline()); |
michael@0 | 369 | SendDoubleParameterToStream(DestinationNodeEngine::VOLUME, 1.0f); |
michael@0 | 370 | } |
michael@0 | 371 | |
michael@0 | 372 | void |
michael@0 | 373 | AudioDestinationNode::OfflineShutdown() |
michael@0 | 374 | { |
michael@0 | 375 | MOZ_ASSERT(Context() && Context()->IsOffline(), |
michael@0 | 376 | "Should only be called on a valid OfflineAudioContext"); |
michael@0 | 377 | |
michael@0 | 378 | MediaStreamGraph::DestroyNonRealtimeInstance(mStream->Graph()); |
michael@0 | 379 | mOfflineRenderingRef.Drop(this); |
michael@0 | 380 | } |
michael@0 | 381 | |
michael@0 | 382 | JSObject* |
michael@0 | 383 | AudioDestinationNode::WrapObject(JSContext* aCx) |
michael@0 | 384 | { |
michael@0 | 385 | return AudioDestinationNodeBinding::Wrap(aCx, this); |
michael@0 | 386 | } |
michael@0 | 387 | |
michael@0 | 388 | void |
michael@0 | 389 | AudioDestinationNode::StartRendering() |
michael@0 | 390 | { |
michael@0 | 391 | mOfflineRenderingRef.Take(this); |
michael@0 | 392 | mStream->Graph()->StartNonRealtimeProcessing(TrackRate(Context()->SampleRate()), mFramesToProduce); |
michael@0 | 393 | } |
michael@0 | 394 | |
michael@0 | 395 | void |
michael@0 | 396 | AudioDestinationNode::SetCanPlay(bool aCanPlay) |
michael@0 | 397 | { |
michael@0 | 398 | mStream->SetTrackEnabled(AudioNodeStream::AUDIO_TRACK, aCanPlay); |
michael@0 | 399 | } |
michael@0 | 400 | |
michael@0 | 401 | NS_IMETHODIMP |
michael@0 | 402 | AudioDestinationNode::HandleEvent(nsIDOMEvent* aEvent) |
michael@0 | 403 | { |
michael@0 | 404 | nsAutoString type; |
michael@0 | 405 | aEvent->GetType(type); |
michael@0 | 406 | |
michael@0 | 407 | if (!type.EqualsLiteral("visibilitychange")) { |
michael@0 | 408 | return NS_ERROR_FAILURE; |
michael@0 | 409 | } |
michael@0 | 410 | |
michael@0 | 411 | nsCOMPtr<nsIDocShell> docshell = do_GetInterface(GetOwner()); |
michael@0 | 412 | NS_ENSURE_TRUE(docshell, NS_ERROR_FAILURE); |
michael@0 | 413 | |
michael@0 | 414 | bool isActive = false; |
michael@0 | 415 | docshell->GetIsActive(&isActive); |
michael@0 | 416 | |
michael@0 | 417 | mAudioChannelAgent->SetVisibilityState(isActive); |
michael@0 | 418 | return NS_OK; |
michael@0 | 419 | } |
michael@0 | 420 | |
michael@0 | 421 | NS_IMETHODIMP |
michael@0 | 422 | AudioDestinationNode::CanPlayChanged(int32_t aCanPlay) |
michael@0 | 423 | { |
michael@0 | 424 | SetCanPlay(aCanPlay == AudioChannelState::AUDIO_CHANNEL_STATE_NORMAL); |
michael@0 | 425 | return NS_OK; |
michael@0 | 426 | } |
michael@0 | 427 | |
michael@0 | 428 | NS_IMETHODIMP |
michael@0 | 429 | AudioDestinationNode::WindowVolumeChanged() |
michael@0 | 430 | { |
michael@0 | 431 | MOZ_ASSERT(mAudioChannelAgent); |
michael@0 | 432 | |
michael@0 | 433 | if (!mStream) { |
michael@0 | 434 | return NS_OK; |
michael@0 | 435 | } |
michael@0 | 436 | |
michael@0 | 437 | float volume; |
michael@0 | 438 | nsresult rv = mAudioChannelAgent->GetWindowVolume(&volume); |
michael@0 | 439 | NS_ENSURE_SUCCESS(rv, rv); |
michael@0 | 440 | |
michael@0 | 441 | mStream->SetAudioOutputVolume(&gWebAudioOutputKey, volume); |
michael@0 | 442 | return NS_OK; |
michael@0 | 443 | } |
michael@0 | 444 | |
michael@0 | 445 | AudioChannel |
michael@0 | 446 | AudioDestinationNode::MozAudioChannelType() const |
michael@0 | 447 | { |
michael@0 | 448 | return mAudioChannel; |
michael@0 | 449 | } |
michael@0 | 450 | |
michael@0 | 451 | void |
michael@0 | 452 | AudioDestinationNode::SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv) |
michael@0 | 453 | { |
michael@0 | 454 | if (Context()->IsOffline()) { |
michael@0 | 455 | aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR); |
michael@0 | 456 | return; |
michael@0 | 457 | } |
michael@0 | 458 | |
michael@0 | 459 | if (aValue != mAudioChannel && |
michael@0 | 460 | CheckAudioChannelPermissions(aValue)) { |
michael@0 | 461 | mAudioChannel = aValue; |
michael@0 | 462 | |
michael@0 | 463 | if (mAudioChannelAgent) { |
michael@0 | 464 | CreateAudioChannelAgent(); |
michael@0 | 465 | } |
michael@0 | 466 | } |
michael@0 | 467 | } |
michael@0 | 468 | |
michael@0 | 469 | bool |
michael@0 | 470 | AudioDestinationNode::CheckAudioChannelPermissions(AudioChannel aValue) |
michael@0 | 471 | { |
michael@0 | 472 | if (!Preferences::GetBool("media.useAudioChannelService")) { |
michael@0 | 473 | return true; |
michael@0 | 474 | } |
michael@0 | 475 | |
michael@0 | 476 | // Only normal channel doesn't need permission. |
michael@0 | 477 | if (aValue == AudioChannel::Normal) { |
michael@0 | 478 | return true; |
michael@0 | 479 | } |
michael@0 | 480 | |
michael@0 | 481 | // Maybe this audio channel is equal to the default one. |
michael@0 | 482 | if (aValue == AudioChannelService::GetDefaultAudioChannel()) { |
michael@0 | 483 | return true; |
michael@0 | 484 | } |
michael@0 | 485 | |
michael@0 | 486 | nsCOMPtr<nsIPermissionManager> permissionManager = |
michael@0 | 487 | do_GetService(NS_PERMISSIONMANAGER_CONTRACTID); |
michael@0 | 488 | if (!permissionManager) { |
michael@0 | 489 | return false; |
michael@0 | 490 | } |
michael@0 | 491 | |
michael@0 | 492 | nsCOMPtr<nsIScriptObjectPrincipal> sop = do_QueryInterface(GetOwner()); |
michael@0 | 493 | NS_ASSERTION(sop, "Window didn't QI to nsIScriptObjectPrincipal!"); |
michael@0 | 494 | nsCOMPtr<nsIPrincipal> principal = sop->GetPrincipal(); |
michael@0 | 495 | |
michael@0 | 496 | uint32_t perm = nsIPermissionManager::UNKNOWN_ACTION; |
michael@0 | 497 | |
michael@0 | 498 | nsCString channel; |
michael@0 | 499 | channel.AssignASCII(AudioChannelValues::strings[uint32_t(aValue)].value, |
michael@0 | 500 | AudioChannelValues::strings[uint32_t(aValue)].length); |
michael@0 | 501 | permissionManager->TestExactPermissionFromPrincipal(principal, |
michael@0 | 502 | nsCString(NS_LITERAL_CSTRING("audio-channel-") + channel).get(), |
michael@0 | 503 | &perm); |
michael@0 | 504 | |
michael@0 | 505 | return perm == nsIPermissionManager::ALLOW_ACTION; |
michael@0 | 506 | } |
michael@0 | 507 | |
michael@0 | 508 | void |
michael@0 | 509 | AudioDestinationNode::CreateAudioChannelAgent() |
michael@0 | 510 | { |
michael@0 | 511 | if (mAudioChannelAgent) { |
michael@0 | 512 | mAudioChannelAgent->StopPlaying(); |
michael@0 | 513 | } |
michael@0 | 514 | |
michael@0 | 515 | mAudioChannelAgent = new AudioChannelAgent(); |
michael@0 | 516 | mAudioChannelAgent->InitWithWeakCallback(GetOwner(), |
michael@0 | 517 | static_cast<int32_t>(mAudioChannel), |
michael@0 | 518 | this); |
michael@0 | 519 | |
michael@0 | 520 | nsCOMPtr<nsIDocShell> docshell = do_GetInterface(GetOwner()); |
michael@0 | 521 | if (docshell) { |
michael@0 | 522 | bool isActive = false; |
michael@0 | 523 | docshell->GetIsActive(&isActive); |
michael@0 | 524 | mAudioChannelAgent->SetVisibilityState(isActive); |
michael@0 | 525 | } |
michael@0 | 526 | |
michael@0 | 527 | int32_t state = 0; |
michael@0 | 528 | mAudioChannelAgent->StartPlaying(&state); |
michael@0 | 529 | SetCanPlay(state == AudioChannelState::AUDIO_CHANNEL_STATE_NORMAL); |
michael@0 | 530 | } |
michael@0 | 531 | |
michael@0 | 532 | void |
michael@0 | 533 | AudioDestinationNode::NotifyStableState() |
michael@0 | 534 | { |
michael@0 | 535 | mExtraCurrentTimeUpdatedSinceLastStableState = false; |
michael@0 | 536 | } |
michael@0 | 537 | |
michael@0 | 538 | static NS_DEFINE_CID(kAppShellCID, NS_APPSHELL_CID); |
michael@0 | 539 | |
michael@0 | 540 | void |
michael@0 | 541 | AudioDestinationNode::ScheduleStableStateNotification() |
michael@0 | 542 | { |
michael@0 | 543 | nsCOMPtr<nsIAppShell> appShell = do_GetService(kAppShellCID); |
michael@0 | 544 | if (appShell) { |
michael@0 | 545 | nsCOMPtr<nsIRunnable> event = |
michael@0 | 546 | NS_NewRunnableMethod(this, &AudioDestinationNode::NotifyStableState); |
michael@0 | 547 | appShell->RunInStableState(event); |
michael@0 | 548 | } |
michael@0 | 549 | } |
michael@0 | 550 | |
michael@0 | 551 | double |
michael@0 | 552 | AudioDestinationNode::ExtraCurrentTime() |
michael@0 | 553 | { |
michael@0 | 554 | if (!mStartedBlockingDueToBeingOnlyNode.IsNull() && |
michael@0 | 555 | !mExtraCurrentTimeUpdatedSinceLastStableState) { |
michael@0 | 556 | mExtraCurrentTimeUpdatedSinceLastStableState = true; |
michael@0 | 557 | mExtraCurrentTimeSinceLastStartedBlocking = |
michael@0 | 558 | (TimeStamp::Now() - mStartedBlockingDueToBeingOnlyNode).ToSeconds(); |
michael@0 | 559 | ScheduleStableStateNotification(); |
michael@0 | 560 | } |
michael@0 | 561 | return mExtraCurrentTime + mExtraCurrentTimeSinceLastStartedBlocking; |
michael@0 | 562 | } |
michael@0 | 563 | |
michael@0 | 564 | void |
michael@0 | 565 | AudioDestinationNode::SetIsOnlyNodeForContext(bool aIsOnlyNode) |
michael@0 | 566 | { |
michael@0 | 567 | if (!mStartedBlockingDueToBeingOnlyNode.IsNull() == aIsOnlyNode) { |
michael@0 | 568 | // Nothing changed. |
michael@0 | 569 | return; |
michael@0 | 570 | } |
michael@0 | 571 | |
michael@0 | 572 | if (!mStream) { |
michael@0 | 573 | // DestroyMediaStream has been called, presumably during CC Unlink(). |
michael@0 | 574 | return; |
michael@0 | 575 | } |
michael@0 | 576 | |
michael@0 | 577 | if (mIsOffline) { |
michael@0 | 578 | // Don't block the destination stream for offline AudioContexts, since |
michael@0 | 579 | // we expect the zero data produced when there are no other nodes to |
michael@0 | 580 | // show up in its result buffer. Also, we would get confused by adding |
michael@0 | 581 | // ExtraCurrentTime before StartRendering has even been called. |
michael@0 | 582 | return; |
michael@0 | 583 | } |
michael@0 | 584 | |
michael@0 | 585 | if (aIsOnlyNode) { |
michael@0 | 586 | mStream->ChangeExplicitBlockerCount(1); |
michael@0 | 587 | mStartedBlockingDueToBeingOnlyNode = TimeStamp::Now(); |
michael@0 | 588 | // Don't do an update of mExtraCurrentTimeSinceLastStartedBlocking until the next stable state. |
michael@0 | 589 | mExtraCurrentTimeUpdatedSinceLastStableState = true; |
michael@0 | 590 | ScheduleStableStateNotification(); |
michael@0 | 591 | } else { |
michael@0 | 592 | // Force update of mExtraCurrentTimeSinceLastStartedBlocking if necessary |
michael@0 | 593 | ExtraCurrentTime(); |
michael@0 | 594 | mExtraCurrentTime += mExtraCurrentTimeSinceLastStartedBlocking; |
michael@0 | 595 | mExtraCurrentTimeSinceLastStartedBlocking = 0; |
michael@0 | 596 | mStream->ChangeExplicitBlockerCount(-1); |
michael@0 | 597 | mStartedBlockingDueToBeingOnlyNode = TimeStamp(); |
michael@0 | 598 | } |
michael@0 | 599 | } |
michael@0 | 600 | |
michael@0 | 601 | } |
michael@0 | 602 | |
michael@0 | 603 | } |