Sat, 03 Jan 2015 20:18:00 +0100
Conditionally enable double key logic according to:
private browsing mode or privacy.thirdparty.isolate preference and
implement in GetCookieStringCommon and FindCookie where it counts...
With some reservations of how to convince FindCookie users to test
condition and pass a nullptr when disabling double key logic.
michael@0 | 1 | /* This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 2 | * License, v. 2.0. If a copy of the MPL was not distributed with this |
michael@0 | 3 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 4 | "use strict"; |
michael@0 | 5 | |
michael@0 | 6 | const {Cc, Ci, Cu, Cr} = require("chrome"); |
michael@0 | 7 | |
michael@0 | 8 | const Services = require("Services"); |
michael@0 | 9 | |
michael@0 | 10 | const { Promise: promise } = Cu.import("resource://gre/modules/Promise.jsm", {}); |
michael@0 | 11 | const events = require("sdk/event/core"); |
michael@0 | 12 | const protocol = require("devtools/server/protocol"); |
michael@0 | 13 | const { CallWatcherActor, CallWatcherFront } = require("devtools/server/actors/call-watcher"); |
michael@0 | 14 | |
michael@0 | 15 | const { on, once, off, emit } = events; |
michael@0 | 16 | const { method, Arg, Option, RetVal } = protocol; |
michael@0 | 17 | |
michael@0 | 18 | exports.register = function(handle) { |
michael@0 | 19 | handle.addTabActor(WebAudioActor, "webaudioActor"); |
michael@0 | 20 | }; |
michael@0 | 21 | |
michael@0 | 22 | exports.unregister = function(handle) { |
michael@0 | 23 | handle.removeTabActor(WebAudioActor); |
michael@0 | 24 | }; |
michael@0 | 25 | |
michael@0 | 26 | const AUDIO_GLOBALS = [ |
michael@0 | 27 | "AudioContext", "AudioNode" |
michael@0 | 28 | ]; |
michael@0 | 29 | |
michael@0 | 30 | const NODE_CREATION_METHODS = [ |
michael@0 | 31 | "createBufferSource", "createMediaElementSource", "createMediaStreamSource", |
michael@0 | 32 | "createMediaStreamDestination", "createScriptProcessor", "createAnalyser", |
michael@0 | 33 | "createGain", "createDelay", "createBiquadFilter", "createWaveShaper", |
michael@0 | 34 | "createPanner", "createConvolver", "createChannelSplitter", "createChannelMerger", |
michael@0 | 35 | "createDynamicsCompressor", "createOscillator" |
michael@0 | 36 | ]; |
michael@0 | 37 | |
michael@0 | 38 | const NODE_ROUTING_METHODS = [ |
michael@0 | 39 | "connect", "disconnect" |
michael@0 | 40 | ]; |
michael@0 | 41 | |
michael@0 | 42 | const NODE_PROPERTIES = { |
michael@0 | 43 | "OscillatorNode": { |
michael@0 | 44 | "type": {}, |
michael@0 | 45 | "frequency": {}, |
michael@0 | 46 | "detune": {} |
michael@0 | 47 | }, |
michael@0 | 48 | "GainNode": { |
michael@0 | 49 | "gain": {} |
michael@0 | 50 | }, |
michael@0 | 51 | "DelayNode": { |
michael@0 | 52 | "delayTime": {} |
michael@0 | 53 | }, |
michael@0 | 54 | "AudioBufferSourceNode": { |
michael@0 | 55 | "buffer": { "Buffer": true }, |
michael@0 | 56 | "playbackRate": {}, |
michael@0 | 57 | "loop": {}, |
michael@0 | 58 | "loopStart": {}, |
michael@0 | 59 | "loopEnd": {} |
michael@0 | 60 | }, |
michael@0 | 61 | "ScriptProcessorNode": { |
michael@0 | 62 | "bufferSize": { "readonly": true } |
michael@0 | 63 | }, |
michael@0 | 64 | "PannerNode": { |
michael@0 | 65 | "panningModel": {}, |
michael@0 | 66 | "distanceModel": {}, |
michael@0 | 67 | "refDistance": {}, |
michael@0 | 68 | "maxDistance": {}, |
michael@0 | 69 | "rolloffFactor": {}, |
michael@0 | 70 | "coneInnerAngle": {}, |
michael@0 | 71 | "coneOuterAngle": {}, |
michael@0 | 72 | "coneOuterGain": {} |
michael@0 | 73 | }, |
michael@0 | 74 | "ConvolverNode": { |
michael@0 | 75 | "buffer": { "Buffer": true }, |
michael@0 | 76 | "normalize": {}, |
michael@0 | 77 | }, |
michael@0 | 78 | "DynamicsCompressorNode": { |
michael@0 | 79 | "threshold": {}, |
michael@0 | 80 | "knee": {}, |
michael@0 | 81 | "ratio": {}, |
michael@0 | 82 | "reduction": {}, |
michael@0 | 83 | "attack": {}, |
michael@0 | 84 | "release": {} |
michael@0 | 85 | }, |
michael@0 | 86 | "BiquadFilterNode": { |
michael@0 | 87 | "type": {}, |
michael@0 | 88 | "frequency": {}, |
michael@0 | 89 | "Q": {}, |
michael@0 | 90 | "detune": {}, |
michael@0 | 91 | "gain": {} |
michael@0 | 92 | }, |
michael@0 | 93 | "WaveShaperNode": { |
michael@0 | 94 | "curve": { "Float32Array": true }, |
michael@0 | 95 | "oversample": {} |
michael@0 | 96 | }, |
michael@0 | 97 | "AnalyserNode": { |
michael@0 | 98 | "fftSize": {}, |
michael@0 | 99 | "minDecibels": {}, |
michael@0 | 100 | "maxDecibels": {}, |
michael@0 | 101 | "smoothingTimeConstraint": {}, |
michael@0 | 102 | "frequencyBinCount": { "readonly": true }, |
michael@0 | 103 | }, |
michael@0 | 104 | "AudioDestinationNode": {}, |
michael@0 | 105 | "ChannelSplitterNode": {}, |
michael@0 | 106 | "ChannelMergerNode": {} |
michael@0 | 107 | }; |
michael@0 | 108 | |
michael@0 | 109 | /** |
michael@0 | 110 | * Track an array of audio nodes |
michael@0 | 111 | |
michael@0 | 112 | /** |
michael@0 | 113 | * An Audio Node actor allowing communication to a specific audio node in the |
michael@0 | 114 | * Audio Context graph. |
michael@0 | 115 | */ |
michael@0 | 116 | let AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({ |
michael@0 | 117 | typeName: "audionode", |
michael@0 | 118 | |
michael@0 | 119 | /** |
michael@0 | 120 | * Create the Audio Node actor. |
michael@0 | 121 | * |
michael@0 | 122 | * @param DebuggerServerConnection conn |
michael@0 | 123 | * The server connection. |
michael@0 | 124 | * @param AudioNode node |
michael@0 | 125 | * The AudioNode that was created. |
michael@0 | 126 | */ |
michael@0 | 127 | initialize: function (conn, node) { |
michael@0 | 128 | protocol.Actor.prototype.initialize.call(this, conn); |
michael@0 | 129 | this.node = unwrap(node); |
michael@0 | 130 | try { |
michael@0 | 131 | this.type = this.node.toString().match(/\[object (.*)\]$/)[1]; |
michael@0 | 132 | } catch (e) { |
michael@0 | 133 | this.type = ""; |
michael@0 | 134 | } |
michael@0 | 135 | }, |
michael@0 | 136 | |
michael@0 | 137 | /** |
michael@0 | 138 | * Returns the name of the audio type. |
michael@0 | 139 | * Examples: "OscillatorNode", "MediaElementAudioSourceNode" |
michael@0 | 140 | */ |
michael@0 | 141 | getType: method(function () { |
michael@0 | 142 | return this.type; |
michael@0 | 143 | }, { |
michael@0 | 144 | response: { type: RetVal("string") } |
michael@0 | 145 | }), |
michael@0 | 146 | |
michael@0 | 147 | /** |
michael@0 | 148 | * Returns a boolean indicating if the node is a source node, |
michael@0 | 149 | * like BufferSourceNode, MediaElementAudioSourceNode, OscillatorNode, etc. |
michael@0 | 150 | */ |
michael@0 | 151 | isSource: method(function () { |
michael@0 | 152 | return !!~this.type.indexOf("Source") || this.type === "OscillatorNode"; |
michael@0 | 153 | }, { |
michael@0 | 154 | response: { source: RetVal("boolean") } |
michael@0 | 155 | }), |
michael@0 | 156 | |
michael@0 | 157 | /** |
michael@0 | 158 | * Changes a param on the audio node. Responds with a `string` that's either |
michael@0 | 159 | * an empty string `""` on success, or a description of the error upon |
michael@0 | 160 | * param set failure. |
michael@0 | 161 | * |
michael@0 | 162 | * @param String param |
michael@0 | 163 | * Name of the AudioParam to change. |
michael@0 | 164 | * @param String value |
michael@0 | 165 | * Value to change AudioParam to. |
michael@0 | 166 | */ |
michael@0 | 167 | setParam: method(function (param, value) { |
michael@0 | 168 | // Strip quotes because sometimes UIs include that for strings |
michael@0 | 169 | if (typeof value === "string") { |
michael@0 | 170 | value = value.replace(/[\'\"]*/g, ""); |
michael@0 | 171 | } |
michael@0 | 172 | try { |
michael@0 | 173 | if (isAudioParam(this.node, param)) |
michael@0 | 174 | this.node[param].value = value; |
michael@0 | 175 | else |
michael@0 | 176 | this.node[param] = value; |
michael@0 | 177 | return undefined; |
michael@0 | 178 | } catch (e) { |
michael@0 | 179 | return constructError(e); |
michael@0 | 180 | } |
michael@0 | 181 | }, { |
michael@0 | 182 | request: { |
michael@0 | 183 | param: Arg(0, "string"), |
michael@0 | 184 | value: Arg(1, "nullable:primitive") |
michael@0 | 185 | }, |
michael@0 | 186 | response: { error: RetVal("nullable:json") } |
michael@0 | 187 | }), |
michael@0 | 188 | |
michael@0 | 189 | /** |
michael@0 | 190 | * Gets a param on the audio node. |
michael@0 | 191 | * |
michael@0 | 192 | * @param String param |
michael@0 | 193 | * Name of the AudioParam to fetch. |
michael@0 | 194 | */ |
michael@0 | 195 | getParam: method(function (param) { |
michael@0 | 196 | // If property does not exist, just return "undefined" |
michael@0 | 197 | if (!this.node[param]) |
michael@0 | 198 | return undefined; |
michael@0 | 199 | let value = isAudioParam(this.node, param) ? this.node[param].value : this.node[param]; |
michael@0 | 200 | return value; |
michael@0 | 201 | }, { |
michael@0 | 202 | request: { |
michael@0 | 203 | param: Arg(0, "string") |
michael@0 | 204 | }, |
michael@0 | 205 | response: { text: RetVal("nullable:primitive") } |
michael@0 | 206 | }), |
michael@0 | 207 | |
michael@0 | 208 | /** |
michael@0 | 209 | * Get an object containing key-value pairs of additional attributes |
michael@0 | 210 | * to be consumed by a front end, like if a property should be read only, |
michael@0 | 211 | * or is a special type (Float32Array, Buffer, etc.) |
michael@0 | 212 | * |
michael@0 | 213 | * @param String param |
michael@0 | 214 | * Name of the AudioParam whose flags are desired. |
michael@0 | 215 | */ |
michael@0 | 216 | getParamFlags: method(function (param) { |
michael@0 | 217 | return (NODE_PROPERTIES[this.type] || {})[param]; |
michael@0 | 218 | }, { |
michael@0 | 219 | request: { param: Arg(0, "string") }, |
michael@0 | 220 | response: { flags: RetVal("nullable:primitive") } |
michael@0 | 221 | }), |
michael@0 | 222 | |
michael@0 | 223 | /** |
michael@0 | 224 | * Get an array of objects each containing a `param` and `value` property, |
michael@0 | 225 | * corresponding to a property name and current value of the audio node. |
michael@0 | 226 | */ |
michael@0 | 227 | getParams: method(function (param) { |
michael@0 | 228 | let props = Object.keys(NODE_PROPERTIES[this.type]); |
michael@0 | 229 | return props.map(prop => |
michael@0 | 230 | ({ param: prop, value: this.getParam(prop), flags: this.getParamFlags(prop) })); |
michael@0 | 231 | }, { |
michael@0 | 232 | response: { params: RetVal("json") } |
michael@0 | 233 | }) |
michael@0 | 234 | }); |
michael@0 | 235 | |
michael@0 | 236 | /** |
michael@0 | 237 | * The corresponding Front object for the AudioNodeActor. |
michael@0 | 238 | */ |
michael@0 | 239 | let AudioNodeFront = protocol.FrontClass(AudioNodeActor, { |
michael@0 | 240 | initialize: function (client, form) { |
michael@0 | 241 | protocol.Front.prototype.initialize.call(this, client, form); |
michael@0 | 242 | client.addActorPool(this); |
michael@0 | 243 | this.manage(this); |
michael@0 | 244 | } |
michael@0 | 245 | }); |
michael@0 | 246 | |
michael@0 | 247 | /** |
michael@0 | 248 | * The Web Audio Actor handles simple interaction with an AudioContext |
michael@0 | 249 | * high-level methods. After instantiating this actor, you'll need to set it |
michael@0 | 250 | * up by calling setup(). |
michael@0 | 251 | */ |
michael@0 | 252 | let WebAudioActor = exports.WebAudioActor = protocol.ActorClass({ |
michael@0 | 253 | typeName: "webaudio", |
michael@0 | 254 | initialize: function(conn, tabActor) { |
michael@0 | 255 | protocol.Actor.prototype.initialize.call(this, conn); |
michael@0 | 256 | this.tabActor = tabActor; |
michael@0 | 257 | this._onContentFunctionCall = this._onContentFunctionCall.bind(this); |
michael@0 | 258 | }, |
michael@0 | 259 | |
michael@0 | 260 | destroy: function(conn) { |
michael@0 | 261 | protocol.Actor.prototype.destroy.call(this, conn); |
michael@0 | 262 | this.finalize(); |
michael@0 | 263 | }, |
michael@0 | 264 | |
michael@0 | 265 | /** |
michael@0 | 266 | * Starts waiting for the current tab actor's document global to be |
michael@0 | 267 | * created, in order to instrument the Canvas context and become |
michael@0 | 268 | * aware of everything the content does with Web Audio. |
michael@0 | 269 | * |
michael@0 | 270 | * See ContentObserver and WebAudioInstrumenter for more details. |
michael@0 | 271 | */ |
michael@0 | 272 | setup: method(function({ reload }) { |
michael@0 | 273 | if (this._initialized) { |
michael@0 | 274 | return; |
michael@0 | 275 | } |
michael@0 | 276 | this._initialized = true; |
michael@0 | 277 | |
michael@0 | 278 | // Weak map mapping audio nodes to their corresponding actors |
michael@0 | 279 | this._nodeActors = new Map(); |
michael@0 | 280 | |
michael@0 | 281 | this._callWatcher = new CallWatcherActor(this.conn, this.tabActor); |
michael@0 | 282 | this._callWatcher.onCall = this._onContentFunctionCall; |
michael@0 | 283 | this._callWatcher.setup({ |
michael@0 | 284 | tracedGlobals: AUDIO_GLOBALS, |
michael@0 | 285 | startRecording: true, |
michael@0 | 286 | performReload: reload |
michael@0 | 287 | }); |
michael@0 | 288 | |
michael@0 | 289 | // Used to track when something is happening with the web audio API |
michael@0 | 290 | // the first time, to ultimately fire `start-context` event |
michael@0 | 291 | this._firstNodeCreated = false; |
michael@0 | 292 | }, { |
michael@0 | 293 | request: { reload: Option(0, "boolean") }, |
michael@0 | 294 | oneway: true |
michael@0 | 295 | }), |
michael@0 | 296 | |
michael@0 | 297 | /** |
michael@0 | 298 | * Invoked whenever an instrumented function is called, like an AudioContext |
michael@0 | 299 | * method or an AudioNode method. |
michael@0 | 300 | */ |
michael@0 | 301 | _onContentFunctionCall: function(functionCall) { |
michael@0 | 302 | let { name } = functionCall.details; |
michael@0 | 303 | |
michael@0 | 304 | // All Web Audio nodes inherit from AudioNode's prototype, so |
michael@0 | 305 | // hook into the `connect` and `disconnect` methods |
michael@0 | 306 | if (WebAudioFront.NODE_ROUTING_METHODS.has(name)) { |
michael@0 | 307 | this._handleRoutingCall(functionCall); |
michael@0 | 308 | } |
michael@0 | 309 | else if (WebAudioFront.NODE_CREATION_METHODS.has(name)) { |
michael@0 | 310 | this._handleCreationCall(functionCall); |
michael@0 | 311 | } |
michael@0 | 312 | }, |
michael@0 | 313 | |
michael@0 | 314 | _handleRoutingCall: function(functionCall) { |
michael@0 | 315 | let { caller, args, window, name } = functionCall.details; |
michael@0 | 316 | let source = unwrap(caller); |
michael@0 | 317 | let dest = unwrap(args[0]); |
michael@0 | 318 | let isAudioParam = dest instanceof unwrap(window.AudioParam); |
michael@0 | 319 | |
michael@0 | 320 | // audionode.connect(param) |
michael@0 | 321 | if (name === "connect" && isAudioParam) { |
michael@0 | 322 | this._onConnectParam(source, dest); |
michael@0 | 323 | } |
michael@0 | 324 | // audionode.connect(node) |
michael@0 | 325 | else if (name === "connect") { |
michael@0 | 326 | this._onConnectNode(source, dest); |
michael@0 | 327 | } |
michael@0 | 328 | // audionode.disconnect() |
michael@0 | 329 | else if (name === "disconnect") { |
michael@0 | 330 | this._onDisconnectNode(source); |
michael@0 | 331 | } |
michael@0 | 332 | }, |
michael@0 | 333 | |
michael@0 | 334 | _handleCreationCall: function (functionCall) { |
michael@0 | 335 | let { caller, result } = functionCall.details; |
michael@0 | 336 | // Keep track of the first node created, so we can alert |
michael@0 | 337 | // the front end that an audio context is being used since |
michael@0 | 338 | // we're not hooking into the constructor itself, just its |
michael@0 | 339 | // instance's methods. |
michael@0 | 340 | if (!this._firstNodeCreated) { |
michael@0 | 341 | // Fire the start-up event if this is the first node created |
michael@0 | 342 | // and trigger a `create-node` event for the context destination |
michael@0 | 343 | this._onStartContext(); |
michael@0 | 344 | this._onCreateNode(unwrap(caller.destination)); |
michael@0 | 345 | this._firstNodeCreated = true; |
michael@0 | 346 | } |
michael@0 | 347 | this._onCreateNode(result); |
michael@0 | 348 | }, |
michael@0 | 349 | |
michael@0 | 350 | /** |
michael@0 | 351 | * Stops listening for document global changes and puts this actor |
michael@0 | 352 | * to hibernation. This method is called automatically just before the |
michael@0 | 353 | * actor is destroyed. |
michael@0 | 354 | */ |
michael@0 | 355 | finalize: method(function() { |
michael@0 | 356 | if (!this._initialized) { |
michael@0 | 357 | return; |
michael@0 | 358 | } |
michael@0 | 359 | this._initialized = false; |
michael@0 | 360 | this._callWatcher.eraseRecording(); |
michael@0 | 361 | |
michael@0 | 362 | this._callWatcher.finalize(); |
michael@0 | 363 | this._callWatcher = null; |
michael@0 | 364 | }, { |
michael@0 | 365 | oneway: true |
michael@0 | 366 | }), |
michael@0 | 367 | |
michael@0 | 368 | /** |
michael@0 | 369 | * Events emitted by this actor. |
michael@0 | 370 | */ |
michael@0 | 371 | events: { |
michael@0 | 372 | "start-context": { |
michael@0 | 373 | type: "startContext" |
michael@0 | 374 | }, |
michael@0 | 375 | "connect-node": { |
michael@0 | 376 | type: "connectNode", |
michael@0 | 377 | source: Option(0, "audionode"), |
michael@0 | 378 | dest: Option(0, "audionode") |
michael@0 | 379 | }, |
michael@0 | 380 | "disconnect-node": { |
michael@0 | 381 | type: "disconnectNode", |
michael@0 | 382 | source: Arg(0, "audionode") |
michael@0 | 383 | }, |
michael@0 | 384 | "connect-param": { |
michael@0 | 385 | type: "connectParam", |
michael@0 | 386 | source: Arg(0, "audionode"), |
michael@0 | 387 | param: Arg(1, "string") |
michael@0 | 388 | }, |
michael@0 | 389 | "change-param": { |
michael@0 | 390 | type: "changeParam", |
michael@0 | 391 | source: Option(0, "audionode"), |
michael@0 | 392 | param: Option(0, "string"), |
michael@0 | 393 | value: Option(0, "string") |
michael@0 | 394 | }, |
michael@0 | 395 | "create-node": { |
michael@0 | 396 | type: "createNode", |
michael@0 | 397 | source: Arg(0, "audionode") |
michael@0 | 398 | } |
michael@0 | 399 | }, |
michael@0 | 400 | |
michael@0 | 401 | /** |
michael@0 | 402 | * Helper for constructing an AudioNodeActor, assigning to |
michael@0 | 403 | * internal weak map, and tracking via `manage` so it is assigned |
michael@0 | 404 | * an `actorID`. |
michael@0 | 405 | */ |
michael@0 | 406 | _constructAudioNode: function (node) { |
michael@0 | 407 | let actor = new AudioNodeActor(this.conn, node); |
michael@0 | 408 | this.manage(actor); |
michael@0 | 409 | this._nodeActors.set(node, actor); |
michael@0 | 410 | return actor; |
michael@0 | 411 | }, |
michael@0 | 412 | |
michael@0 | 413 | /** |
michael@0 | 414 | * Takes an AudioNode and returns the stored actor for it. |
michael@0 | 415 | * In some cases, we won't have an actor stored (for example, |
michael@0 | 416 | * connecting to an AudioDestinationNode, since it's implicitly |
michael@0 | 417 | * created), so make a new actor and store that. |
michael@0 | 418 | */ |
michael@0 | 419 | _actorFor: function (node) { |
michael@0 | 420 | let actor = this._nodeActors.get(node); |
michael@0 | 421 | if (!actor) { |
michael@0 | 422 | actor = this._constructAudioNode(node); |
michael@0 | 423 | } |
michael@0 | 424 | return actor; |
michael@0 | 425 | }, |
michael@0 | 426 | |
michael@0 | 427 | /** |
michael@0 | 428 | * Called on first audio node creation, signifying audio context usage |
michael@0 | 429 | */ |
michael@0 | 430 | _onStartContext: function () { |
michael@0 | 431 | events.emit(this, "start-context"); |
michael@0 | 432 | }, |
michael@0 | 433 | |
michael@0 | 434 | /** |
michael@0 | 435 | * Called when one audio node is connected to another. |
michael@0 | 436 | */ |
michael@0 | 437 | _onConnectNode: function (source, dest) { |
michael@0 | 438 | let sourceActor = this._actorFor(source); |
michael@0 | 439 | let destActor = this._actorFor(dest); |
michael@0 | 440 | events.emit(this, "connect-node", { |
michael@0 | 441 | source: sourceActor, |
michael@0 | 442 | dest: destActor |
michael@0 | 443 | }); |
michael@0 | 444 | }, |
michael@0 | 445 | |
michael@0 | 446 | /** |
michael@0 | 447 | * Called when an audio node is connected to an audio param. |
michael@0 | 448 | * Implement in bug 986705 |
michael@0 | 449 | */ |
michael@0 | 450 | _onConnectParam: function (source, dest) { |
michael@0 | 451 | // TODO bug 986705 |
michael@0 | 452 | }, |
michael@0 | 453 | |
michael@0 | 454 | /** |
michael@0 | 455 | * Called when an audio node is disconnected. |
michael@0 | 456 | */ |
michael@0 | 457 | _onDisconnectNode: function (node) { |
michael@0 | 458 | let actor = this._actorFor(node); |
michael@0 | 459 | events.emit(this, "disconnect-node", actor); |
michael@0 | 460 | }, |
michael@0 | 461 | |
michael@0 | 462 | /** |
michael@0 | 463 | * Called when a parameter changes on an audio node |
michael@0 | 464 | */ |
michael@0 | 465 | _onParamChange: function (node, param, value) { |
michael@0 | 466 | let actor = this._actorFor(node); |
michael@0 | 467 | events.emit(this, "param-change", { |
michael@0 | 468 | source: actor, |
michael@0 | 469 | param: param, |
michael@0 | 470 | value: value |
michael@0 | 471 | }); |
michael@0 | 472 | }, |
michael@0 | 473 | |
michael@0 | 474 | /** |
michael@0 | 475 | * Called on node creation. |
michael@0 | 476 | */ |
michael@0 | 477 | _onCreateNode: function (node) { |
michael@0 | 478 | let actor = this._constructAudioNode(node); |
michael@0 | 479 | events.emit(this, "create-node", actor); |
michael@0 | 480 | } |
michael@0 | 481 | }); |
michael@0 | 482 | |
michael@0 | 483 | /** |
michael@0 | 484 | * The corresponding Front object for the WebAudioActor. |
michael@0 | 485 | */ |
michael@0 | 486 | let WebAudioFront = exports.WebAudioFront = protocol.FrontClass(WebAudioActor, { |
michael@0 | 487 | initialize: function(client, { webaudioActor }) { |
michael@0 | 488 | protocol.Front.prototype.initialize.call(this, client, { actor: webaudioActor }); |
michael@0 | 489 | client.addActorPool(this); |
michael@0 | 490 | this.manage(this); |
michael@0 | 491 | } |
michael@0 | 492 | }); |
michael@0 | 493 | |
michael@0 | 494 | WebAudioFront.NODE_CREATION_METHODS = new Set(NODE_CREATION_METHODS); |
michael@0 | 495 | WebAudioFront.NODE_ROUTING_METHODS = new Set(NODE_ROUTING_METHODS); |
michael@0 | 496 | |
michael@0 | 497 | /** |
michael@0 | 498 | * Determines whether or not property is an AudioParam. |
michael@0 | 499 | * |
michael@0 | 500 | * @param AudioNode node |
michael@0 | 501 | * An AudioNode. |
michael@0 | 502 | * @param String prop |
michael@0 | 503 | * Property of `node` to evaluate to see if it's an AudioParam. |
michael@0 | 504 | * @return Boolean |
michael@0 | 505 | */ |
michael@0 | 506 | function isAudioParam (node, prop) { |
michael@0 | 507 | return /AudioParam/.test(node[prop].toString()); |
michael@0 | 508 | } |
michael@0 | 509 | |
michael@0 | 510 | /** |
michael@0 | 511 | * Takes an `Error` object and constructs a JSON-able response |
michael@0 | 512 | * |
michael@0 | 513 | * @param Error err |
michael@0 | 514 | * A TypeError, RangeError, etc. |
michael@0 | 515 | * @return Object |
michael@0 | 516 | */ |
michael@0 | 517 | function constructError (err) { |
michael@0 | 518 | return { |
michael@0 | 519 | message: err.message, |
michael@0 | 520 | type: err.constructor.name |
michael@0 | 521 | }; |
michael@0 | 522 | } |
michael@0 | 523 | |
michael@0 | 524 | function unwrap (obj) { |
michael@0 | 525 | return XPCNativeWrapper.unwrap(obj); |
michael@0 | 526 | } |