toolkit/devtools/server/actors/webaudio.js

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/toolkit/devtools/server/actors/webaudio.js	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,526 @@
     1.4 +/* This Source Code Form is subject to the terms of the Mozilla Public
     1.5 + * License, v. 2.0. If a copy of the MPL was not distributed with this
     1.6 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.7 +"use strict";
     1.8 +
     1.9 +const {Cc, Ci, Cu, Cr} = require("chrome");
    1.10 +
    1.11 +const Services = require("Services");
    1.12 +
    1.13 +const { Promise: promise } = Cu.import("resource://gre/modules/Promise.jsm", {});
    1.14 +const events = require("sdk/event/core");
    1.15 +const protocol = require("devtools/server/protocol");
    1.16 +const { CallWatcherActor, CallWatcherFront } = require("devtools/server/actors/call-watcher");
    1.17 +
    1.18 +const { on, once, off, emit } = events;
    1.19 +const { method, Arg, Option, RetVal } = protocol;
    1.20 +
    1.21 +exports.register = function(handle) {
    1.22 +  handle.addTabActor(WebAudioActor, "webaudioActor");
    1.23 +};
    1.24 +
    1.25 +exports.unregister = function(handle) {
    1.26 +  handle.removeTabActor(WebAudioActor);
    1.27 +};
    1.28 +
    1.29 +const AUDIO_GLOBALS = [
    1.30 +  "AudioContext", "AudioNode"
    1.31 +];
    1.32 +
    1.33 +const NODE_CREATION_METHODS = [
    1.34 +  "createBufferSource", "createMediaElementSource", "createMediaStreamSource",
    1.35 +  "createMediaStreamDestination", "createScriptProcessor", "createAnalyser",
    1.36 +  "createGain", "createDelay", "createBiquadFilter", "createWaveShaper",
    1.37 +  "createPanner", "createConvolver", "createChannelSplitter", "createChannelMerger",
    1.38 +  "createDynamicsCompressor", "createOscillator"
    1.39 +];
    1.40 +
    1.41 +const NODE_ROUTING_METHODS = [
    1.42 +  "connect", "disconnect"
    1.43 +];
    1.44 +
    1.45 +const NODE_PROPERTIES = {
    1.46 +  "OscillatorNode": {
    1.47 +    "type": {},
    1.48 +    "frequency": {},
    1.49 +    "detune": {}
    1.50 +  },
    1.51 +  "GainNode": {
    1.52 +    "gain": {}
    1.53 +  },
    1.54 +  "DelayNode": {
    1.55 +    "delayTime": {}
    1.56 +  },
    1.57 +  "AudioBufferSourceNode": {
    1.58 +    "buffer": { "Buffer": true },
    1.59 +    "playbackRate": {},
    1.60 +    "loop": {},
    1.61 +    "loopStart": {},
    1.62 +    "loopEnd": {}
    1.63 +  },
    1.64 +  "ScriptProcessorNode": {
    1.65 +    "bufferSize": { "readonly": true }
    1.66 +  },
    1.67 +  "PannerNode": {
    1.68 +    "panningModel": {},
    1.69 +    "distanceModel": {},
    1.70 +    "refDistance": {},
    1.71 +    "maxDistance": {},
    1.72 +    "rolloffFactor": {},
    1.73 +    "coneInnerAngle": {},
    1.74 +    "coneOuterAngle": {},
    1.75 +    "coneOuterGain": {}
    1.76 +  },
    1.77 +  "ConvolverNode": {
    1.78 +    "buffer": { "Buffer": true },
    1.79 +    "normalize": {},
    1.80 +  },
    1.81 +  "DynamicsCompressorNode": {
    1.82 +    "threshold": {},
    1.83 +    "knee": {},
    1.84 +    "ratio": {},
    1.85 +    "reduction": {},
    1.86 +    "attack": {},
    1.87 +    "release": {}
    1.88 +  },
    1.89 +  "BiquadFilterNode": {
    1.90 +    "type": {},
    1.91 +    "frequency": {},
    1.92 +    "Q": {},
    1.93 +    "detune": {},
    1.94 +    "gain": {}
    1.95 +  },
    1.96 +  "WaveShaperNode": {
    1.97 +    "curve": { "Float32Array": true },
    1.98 +    "oversample": {}
    1.99 +  },
   1.100 +  "AnalyserNode": {
   1.101 +    "fftSize": {},
   1.102 +    "minDecibels": {},
   1.103 +    "maxDecibels": {},
   1.104 +    "smoothingTimeConstraint": {},
   1.105 +    "frequencyBinCount": { "readonly": true },
   1.106 +  },
   1.107 +  "AudioDestinationNode": {},
   1.108 +  "ChannelSplitterNode": {},
   1.109 +  "ChannelMergerNode": {}
   1.110 +};
   1.111 +
   1.112 +/**
   1.113 + * Track an array of audio nodes
   1.114 +
   1.115 +/**
   1.116 + * An Audio Node actor allowing communication to a specific audio node in the
   1.117 + * Audio Context graph.
   1.118 + */
   1.119 +let AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
   1.120 +  typeName: "audionode",
   1.121 +
   1.122 +  /**
   1.123 +   * Create the Audio Node actor.
   1.124 +   *
   1.125 +   * @param DebuggerServerConnection conn
   1.126 +   *        The server connection.
   1.127 +   * @param AudioNode node
   1.128 +   *        The AudioNode that was created.
   1.129 +   */
   1.130 +  initialize: function (conn, node) {
   1.131 +    protocol.Actor.prototype.initialize.call(this, conn);
   1.132 +    this.node = unwrap(node);
   1.133 +    try {
   1.134 +      this.type = this.node.toString().match(/\[object (.*)\]$/)[1];
   1.135 +    } catch (e) {
   1.136 +      this.type = "";
   1.137 +    }
   1.138 +  },
   1.139 +
   1.140 +  /**
   1.141 +   * Returns the name of the audio type.
   1.142 +   * Examples: "OscillatorNode", "MediaElementAudioSourceNode"
   1.143 +   */
   1.144 +  getType: method(function () {
   1.145 +    return this.type;
   1.146 +  }, {
   1.147 +    response: { type: RetVal("string") }
   1.148 +  }),
   1.149 +
   1.150 +  /**
   1.151 +   * Returns a boolean indicating if the node is a source node,
   1.152 +   * like BufferSourceNode, MediaElementAudioSourceNode, OscillatorNode, etc.
   1.153 +   */
   1.154 +  isSource: method(function () {
   1.155 +    return !!~this.type.indexOf("Source") || this.type === "OscillatorNode";
   1.156 +  }, {
   1.157 +    response: { source: RetVal("boolean") }
   1.158 +  }),
   1.159 +
   1.160 +  /**
   1.161 +   * Changes a param on the audio node. Responds with a `string` that's either
   1.162 +   * an empty string `""` on success, or a description of the error upon
   1.163 +   * param set failure.
   1.164 +   *
   1.165 +   * @param String param
   1.166 +   *        Name of the AudioParam to change.
   1.167 +   * @param String value
   1.168 +   *        Value to change AudioParam to.
   1.169 +   */
   1.170 +  setParam: method(function (param, value) {
   1.171 +    // Strip quotes because sometimes UIs include that for strings
   1.172 +    if (typeof value === "string") {
   1.173 +      value = value.replace(/[\'\"]*/g, "");
   1.174 +    }
   1.175 +    try {
   1.176 +      if (isAudioParam(this.node, param))
   1.177 +        this.node[param].value = value;
   1.178 +      else
   1.179 +        this.node[param] = value;
   1.180 +      return undefined;
   1.181 +    } catch (e) {
   1.182 +      return constructError(e);
   1.183 +    }
   1.184 +  }, {
   1.185 +    request: {
   1.186 +      param: Arg(0, "string"),
   1.187 +      value: Arg(1, "nullable:primitive")
   1.188 +    },
   1.189 +    response: { error: RetVal("nullable:json") }
   1.190 +  }),
   1.191 +
   1.192 +  /**
   1.193 +   * Gets a param on the audio node.
   1.194 +   *
   1.195 +   * @param String param
   1.196 +   *        Name of the AudioParam to fetch.
   1.197 +   */
   1.198 +  getParam: method(function (param) {
   1.199 +    // If property does not exist, just return "undefined"
   1.200 +    if (!this.node[param])
   1.201 +      return undefined;
   1.202 +    let value = isAudioParam(this.node, param) ? this.node[param].value : this.node[param];
   1.203 +    return value;
   1.204 +  }, {
   1.205 +    request: {
   1.206 +      param: Arg(0, "string")
   1.207 +    },
   1.208 +    response: { text: RetVal("nullable:primitive") }
   1.209 +  }),
   1.210 +
   1.211 +  /**
   1.212 +   * Get an object containing key-value pairs of additional attributes
   1.213 +   * to be consumed by a front end, like if a property should be read only,
   1.214 +   * or is a special type (Float32Array, Buffer, etc.)
   1.215 +   *
   1.216 +   * @param String param
   1.217 +   *        Name of the AudioParam whose flags are desired.
   1.218 +   */
   1.219 +  getParamFlags: method(function (param) {
   1.220 +    return (NODE_PROPERTIES[this.type] || {})[param];
   1.221 +  }, {
   1.222 +    request: { param: Arg(0, "string") },
   1.223 +    response: { flags: RetVal("nullable:primitive") }
   1.224 +  }),
   1.225 +
   1.226 +  /**
   1.227 +   * Get an array of objects each containing a `param` and `value` property,
   1.228 +   * corresponding to a property name and current value of the audio node.
   1.229 +   */
   1.230 +  getParams: method(function (param) {
   1.231 +    let props = Object.keys(NODE_PROPERTIES[this.type]);
   1.232 +    return props.map(prop =>
   1.233 +      ({ param: prop, value: this.getParam(prop), flags: this.getParamFlags(prop) }));
   1.234 +  }, {
   1.235 +    response: { params: RetVal("json") }
   1.236 +  })
   1.237 +});
   1.238 +
   1.239 +/**
   1.240 + * The corresponding Front object for the AudioNodeActor.
   1.241 + */
   1.242 +let AudioNodeFront = protocol.FrontClass(AudioNodeActor, {
   1.243 +  initialize: function (client, form) {
   1.244 +    protocol.Front.prototype.initialize.call(this, client, form);
   1.245 +    client.addActorPool(this);
   1.246 +    this.manage(this);
   1.247 +  }
   1.248 +});
   1.249 +
   1.250 +/**
   1.251 + * The Web Audio Actor handles simple interaction with an AudioContext
   1.252 + * high-level methods. After instantiating this actor, you'll need to set it
   1.253 + * up by calling setup().
   1.254 + */
   1.255 +let WebAudioActor = exports.WebAudioActor = protocol.ActorClass({
   1.256 +  typeName: "webaudio",
   1.257 +  initialize: function(conn, tabActor) {
   1.258 +    protocol.Actor.prototype.initialize.call(this, conn);
   1.259 +    this.tabActor = tabActor;
   1.260 +    this._onContentFunctionCall = this._onContentFunctionCall.bind(this);
   1.261 +  },
   1.262 +
   1.263 +  destroy: function(conn) {
   1.264 +    protocol.Actor.prototype.destroy.call(this, conn);
   1.265 +    this.finalize();
   1.266 +  },
   1.267 +
   1.268 +  /**
   1.269 +   * Starts waiting for the current tab actor's document global to be
   1.270 +   * created, in order to instrument the Canvas context and become
   1.271 +   * aware of everything the content does with Web Audio.
   1.272 +   *
   1.273 +   * See ContentObserver and WebAudioInstrumenter for more details.
   1.274 +   */
   1.275 +  setup: method(function({ reload }) {
   1.276 +    if (this._initialized) {
   1.277 +      return;
   1.278 +    }
   1.279 +    this._initialized = true;
   1.280 +
   1.281 +    // Weak map mapping audio nodes to their corresponding actors
   1.282 +    this._nodeActors = new Map();
   1.283 +
   1.284 +    this._callWatcher = new CallWatcherActor(this.conn, this.tabActor);
   1.285 +    this._callWatcher.onCall = this._onContentFunctionCall;
   1.286 +    this._callWatcher.setup({
   1.287 +      tracedGlobals: AUDIO_GLOBALS,
   1.288 +      startRecording: true,
   1.289 +      performReload: reload
   1.290 +    });
   1.291 +
   1.292 +    // Used to track when something is happening with the web audio API
   1.293 +    // the first time, to ultimately fire `start-context` event
   1.294 +    this._firstNodeCreated = false;
   1.295 +  }, {
   1.296 +    request: { reload: Option(0, "boolean") },
   1.297 +    oneway: true
   1.298 +  }),
   1.299 +
   1.300 +  /**
   1.301 +   * Invoked whenever an instrumented function is called, like an AudioContext
   1.302 +   * method or an AudioNode method.
   1.303 +   */
   1.304 +  _onContentFunctionCall: function(functionCall) {
   1.305 +    let { name } = functionCall.details;
   1.306 +
   1.307 +    // All Web Audio nodes inherit from AudioNode's prototype, so
   1.308 +    // hook into the `connect` and `disconnect` methods
   1.309 +    if (WebAudioFront.NODE_ROUTING_METHODS.has(name)) {
   1.310 +      this._handleRoutingCall(functionCall);
   1.311 +    }
   1.312 +    else if (WebAudioFront.NODE_CREATION_METHODS.has(name)) {
   1.313 +      this._handleCreationCall(functionCall);
   1.314 +    }
   1.315 +  },
   1.316 +
   1.317 +  _handleRoutingCall: function(functionCall) {
   1.318 +    let { caller, args, window, name } = functionCall.details;
   1.319 +    let source = unwrap(caller);
   1.320 +    let dest = unwrap(args[0]);
   1.321 +    let isAudioParam = dest instanceof unwrap(window.AudioParam);
   1.322 +
   1.323 +    // audionode.connect(param)
   1.324 +    if (name === "connect" && isAudioParam) {
   1.325 +      this._onConnectParam(source, dest);
   1.326 +    }
   1.327 +    // audionode.connect(node)
   1.328 +    else if (name === "connect") {
   1.329 +      this._onConnectNode(source, dest);
   1.330 +    }
   1.331 +    // audionode.disconnect()
   1.332 +    else if (name === "disconnect") {
   1.333 +      this._onDisconnectNode(source);
   1.334 +    }
   1.335 +  },
   1.336 +
   1.337 +  _handleCreationCall: function (functionCall) {
   1.338 +    let { caller, result } = functionCall.details;
   1.339 +    // Keep track of the first node created, so we can alert
   1.340 +    // the front end that an audio context is being used since
   1.341 +    // we're not hooking into the constructor itself, just its
   1.342 +    // instance's methods.
   1.343 +    if (!this._firstNodeCreated) {
   1.344 +      // Fire the start-up event if this is the first node created
   1.345 +      // and trigger a `create-node` event for the context destination
   1.346 +      this._onStartContext();
   1.347 +      this._onCreateNode(unwrap(caller.destination));
   1.348 +      this._firstNodeCreated = true;
   1.349 +    }
   1.350 +    this._onCreateNode(result);
   1.351 +  },
   1.352 +
   1.353 +  /**
   1.354 +   * Stops listening for document global changes and puts this actor
   1.355 +   * to hibernation. This method is called automatically just before the
   1.356 +   * actor is destroyed.
   1.357 +   */
   1.358 +  finalize: method(function() {
   1.359 +    if (!this._initialized) {
   1.360 +      return;
   1.361 +    }
   1.362 +    this._initialized = false;
   1.363 +    this._callWatcher.eraseRecording();
   1.364 +
   1.365 +    this._callWatcher.finalize();
   1.366 +    this._callWatcher = null;
   1.367 +  }, {
   1.368 +   oneway: true
   1.369 +  }),
   1.370 +
   1.371 +  /**
   1.372 +   * Events emitted by this actor.
   1.373 +   */
   1.374 +  events: {
   1.375 +    "start-context": {
   1.376 +      type: "startContext"
   1.377 +    },
   1.378 +    "connect-node": {
   1.379 +      type: "connectNode",
   1.380 +      source: Option(0, "audionode"),
   1.381 +      dest: Option(0, "audionode")
   1.382 +    },
   1.383 +    "disconnect-node": {
   1.384 +      type: "disconnectNode",
   1.385 +      source: Arg(0, "audionode")
   1.386 +    },
   1.387 +    "connect-param": {
   1.388 +      type: "connectParam",
   1.389 +      source: Arg(0, "audionode"),
   1.390 +      param: Arg(1, "string")
   1.391 +    },
   1.392 +    "change-param": {
   1.393 +      type: "changeParam",
   1.394 +      source: Option(0, "audionode"),
   1.395 +      param: Option(0, "string"),
   1.396 +      value: Option(0, "string")
   1.397 +    },
   1.398 +    "create-node": {
   1.399 +      type: "createNode",
   1.400 +      source: Arg(0, "audionode")
   1.401 +    }
   1.402 +  },
   1.403 +
   1.404 +  /**
   1.405 +   * Helper for constructing an AudioNodeActor, assigning to
   1.406 +   * internal weak map, and tracking via `manage` so it is assigned
   1.407 +   * an `actorID`.
   1.408 +   */
   1.409 +  _constructAudioNode: function (node) {
   1.410 +    let actor = new AudioNodeActor(this.conn, node);
   1.411 +    this.manage(actor);
   1.412 +    this._nodeActors.set(node, actor);
   1.413 +    return actor;
   1.414 +  },
   1.415 +
   1.416 +  /**
   1.417 +   * Takes an AudioNode and returns the stored actor for it.
   1.418 +   * In some cases, we won't have an actor stored (for example,
   1.419 +   * connecting to an AudioDestinationNode, since it's implicitly
   1.420 +   * created), so make a new actor and store that.
   1.421 +   */
   1.422 +  _actorFor: function (node) {
   1.423 +    let actor = this._nodeActors.get(node);
   1.424 +    if (!actor) {
   1.425 +      actor = this._constructAudioNode(node);
   1.426 +    }
   1.427 +    return actor;
   1.428 +  },
   1.429 +
   1.430 +  /**
   1.431 +   * Called on first audio node creation, signifying audio context usage
   1.432 +   */
   1.433 +  _onStartContext: function () {
   1.434 +    events.emit(this, "start-context");
   1.435 +  },
   1.436 +
   1.437 +  /**
   1.438 +   * Called when one audio node is connected to another.
   1.439 +   */
   1.440 +  _onConnectNode: function (source, dest) {
   1.441 +    let sourceActor = this._actorFor(source);
   1.442 +    let destActor = this._actorFor(dest);
   1.443 +    events.emit(this, "connect-node", {
   1.444 +      source: sourceActor,
   1.445 +      dest: destActor
   1.446 +    });
   1.447 +  },
   1.448 +
   1.449 +  /**
   1.450 +   * Called when an audio node is connected to an audio param.
   1.451 +   * Implement in bug 986705
   1.452 +   */
   1.453 +  _onConnectParam: function (source, dest) {
   1.454 +    // TODO bug 986705
   1.455 +  },
   1.456 +
   1.457 +  /**
   1.458 +   * Called when an audio node is disconnected.
   1.459 +   */
   1.460 +  _onDisconnectNode: function (node) {
   1.461 +    let actor = this._actorFor(node);
   1.462 +    events.emit(this, "disconnect-node", actor);
   1.463 +  },
   1.464 +
   1.465 +  /**
   1.466 +   * Called when a parameter changes on an audio node
   1.467 +   */
   1.468 +  _onParamChange: function (node, param, value) {
   1.469 +    let actor = this._actorFor(node);
   1.470 +    events.emit(this, "param-change", {
   1.471 +      source: actor,
   1.472 +      param: param,
   1.473 +      value: value
   1.474 +    });
   1.475 +  },
   1.476 +
   1.477 +  /**
   1.478 +   * Called on node creation.
   1.479 +   */
   1.480 +  _onCreateNode: function (node) {
   1.481 +    let actor = this._constructAudioNode(node);
   1.482 +    events.emit(this, "create-node", actor);
   1.483 +  }
   1.484 +});
   1.485 +
   1.486 +/**
   1.487 + * The corresponding Front object for the WebAudioActor.
   1.488 + */
   1.489 +let WebAudioFront = exports.WebAudioFront = protocol.FrontClass(WebAudioActor, {
   1.490 +  initialize: function(client, { webaudioActor }) {
   1.491 +    protocol.Front.prototype.initialize.call(this, client, { actor: webaudioActor });
   1.492 +    client.addActorPool(this);
   1.493 +    this.manage(this);
   1.494 +  }
   1.495 +});
   1.496 +
   1.497 +WebAudioFront.NODE_CREATION_METHODS = new Set(NODE_CREATION_METHODS);
   1.498 +WebAudioFront.NODE_ROUTING_METHODS = new Set(NODE_ROUTING_METHODS);
   1.499 +
   1.500 +/**
   1.501 + * Determines whether or not property is an AudioParam.
   1.502 + *
   1.503 + * @param AudioNode node
   1.504 + *        An AudioNode.
   1.505 + * @param String prop
   1.506 + *        Property of `node` to evaluate to see if it's an AudioParam.
   1.507 + * @return Boolean
   1.508 + */
   1.509 +function isAudioParam (node, prop) {
   1.510 +  return /AudioParam/.test(node[prop].toString());
   1.511 +}
   1.512 +
   1.513 +/**
   1.514 + * Takes an `Error` object and constructs a JSON-able response
   1.515 + *
   1.516 + * @param Error err
   1.517 + *        A TypeError, RangeError, etc.
   1.518 + * @return Object
   1.519 + */
   1.520 +function constructError (err) {
   1.521 +  return {
   1.522 +    message: err.message,
   1.523 +    type: err.constructor.name
   1.524 +  };
   1.525 +}
   1.526 +
   1.527 +function unwrap (obj) {
   1.528 +  return XPCNativeWrapper.unwrap(obj);
   1.529 +}

mercurial