browser/devtools/webaudioeditor/webaudioeditor-controller.js

Wed, 31 Dec 2014 06:09:35 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Wed, 31 Dec 2014 06:09:35 +0100
changeset 0
6474c204b198
permissions
-rw-r--r--

Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.

     1 /* This Source Code Form is subject to the terms of the Mozilla Public
     2  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
     3  * You can obtain one at http://mozilla.org/MPL/2.0/. */
     4 "use strict";
     6 const { classes: Cc, interfaces: Ci, utils: Cu, results: Cr } = Components;
     8 Cu.import("resource://gre/modules/Services.jsm");
     9 Cu.import("resource://gre/modules/XPCOMUtils.jsm");
    10 Cu.import("resource:///modules/devtools/ViewHelpers.jsm");
    12 // Override DOM promises with Promise.jsm helpers
    13 const { defer, all } = Cu.import("resource://gre/modules/Promise.jsm", {}).Promise;
    15 const { Task } = Cu.import("resource://gre/modules/Task.jsm", {});
    16 const require = Cu.import("resource://gre/modules/devtools/Loader.jsm", {}).devtools.require;
    17 const EventEmitter = require("devtools/toolkit/event-emitter");
    18 const STRINGS_URI = "chrome://browser/locale/devtools/webaudioeditor.properties"
    19 let { console } = Cu.import("resource://gre/modules/devtools/Console.jsm", {});
    21 // The panel's window global is an EventEmitter firing the following events:
    22 const EVENTS = {
    23   // Fired when the first AudioNode has been created, signifying
    24   // that the AudioContext is being used and should be tracked via the editor.
    25   START_CONTEXT: "WebAudioEditor:StartContext",
    27   // On node creation, connect and disconnect.
    28   CREATE_NODE: "WebAudioEditor:CreateNode",
    29   CONNECT_NODE: "WebAudioEditor:ConnectNode",
    30   DISCONNECT_NODE: "WebAudioEditor:DisconnectNode",
    32   // When a node gets GC'd.
    33   DESTROY_NODE: "WebAudioEditor:DestroyNode",
    35   // On a node parameter's change.
    36   CHANGE_PARAM: "WebAudioEditor:ChangeParam",
    38   // When the UI is reset from tab navigation.
    39   UI_RESET: "WebAudioEditor:UIReset",
    41   // When a param has been changed via the UI and successfully
    42   // pushed via the actor to the raw audio node.
    43   UI_SET_PARAM: "WebAudioEditor:UISetParam",
    45   // When an audio node is added to the list pane.
    46   UI_ADD_NODE_LIST: "WebAudioEditor:UIAddNodeList",
    48   // When the Audio Context graph finishes rendering.
    49   // Is called with two arguments, first representing number of nodes
    50   // rendered, second being the number of edges rendered.
    51   UI_GRAPH_RENDERED: "WebAudioEditor:UIGraphRendered"
    52 };
    54 /**
    55  * The current target and the Web Audio Editor front, set by this tool's host.
    56  */
    57 let gToolbox, gTarget, gFront;
    59 /**
    60  * Track an array of audio nodes
    61  */
    62 let AudioNodes = [];
    63 let AudioNodeConnections = new WeakMap();
    66 // Light representation wrapping an AudioNode actor with additional properties
    67 function AudioNodeView (actor) {
    68   this.actor = actor;
    69   this.id = actor.actorID;
    70 }
    72 // A proxy for the underlying AudioNodeActor to fetch its type
    73 // and subsequently assign the type to the instance.
    74 AudioNodeView.prototype.getType = Task.async(function* () {
    75   this.type = yield this.actor.getType();
    76   return this.type;
    77 });
    79 // Helper method to create connections in the AudioNodeConnections
    80 // WeakMap for rendering
    81 AudioNodeView.prototype.connect = function (destination) {
    82   let connections = AudioNodeConnections.get(this);
    83   if (!connections) {
    84     connections = [];
    85     AudioNodeConnections.set(this, connections);
    86   }
    87   connections.push(destination);
    88 };
    90 // Helper method to remove audio connections from the current AudioNodeView
    91 AudioNodeView.prototype.disconnect = function () {
    92   AudioNodeConnections.set(this, []);
    93 };
    95 // Returns a promise that resolves to an array of objects containing
    96 // both a `param` name property and a `value` property.
    97 AudioNodeView.prototype.getParams = function () {
    98   return this.actor.getParams();
    99 };
   102 /**
   103  * Initializes the web audio editor views
   104  */
   105 function startupWebAudioEditor() {
   106   return all([
   107     WebAudioEditorController.initialize(),
   108     WebAudioGraphView.initialize(),
   109     WebAudioParamView.initialize()
   110   ]);
   111 }
   113 /**
   114  * Destroys the web audio editor controller and views.
   115  */
   116 function shutdownWebAudioEditor() {
   117   return all([
   118     WebAudioEditorController.destroy(),
   119     WebAudioGraphView.destroy(),
   120     WebAudioParamView.destroy()
   121   ]);
   122 }
   124 /**
   125  * Functions handling target-related lifetime events.
   126  */
   127 let WebAudioEditorController = {
   128   /**
   129    * Listen for events emitted by the current tab target.
   130    */
   131   initialize: function() {
   132     this._onTabNavigated = this._onTabNavigated.bind(this);
   133     gTarget.on("will-navigate", this._onTabNavigated);
   134     gTarget.on("navigate", this._onTabNavigated);
   135     gFront.on("start-context", this._onStartContext);
   136     gFront.on("create-node", this._onCreateNode);
   137     gFront.on("connect-node", this._onConnectNode);
   138     gFront.on("disconnect-node", this._onDisconnectNode);
   139     gFront.on("change-param", this._onChangeParam);
   141     // Set up events to refresh the Graph view
   142     window.on(EVENTS.CREATE_NODE, this._onUpdatedContext);
   143     window.on(EVENTS.CONNECT_NODE, this._onUpdatedContext);
   144     window.on(EVENTS.DISCONNECT_NODE, this._onUpdatedContext);
   145   },
   147   /**
   148    * Remove events emitted by the current tab target.
   149    */
   150   destroy: function() {
   151     gTarget.off("will-navigate", this._onTabNavigated);
   152     gTarget.off("navigate", this._onTabNavigated);
   153     gFront.off("start-context", this._onStartContext);
   154     gFront.off("create-node", this._onCreateNode);
   155     gFront.off("connect-node", this._onConnectNode);
   156     gFront.off("disconnect-node", this._onDisconnectNode);
   157     gFront.off("change-param", this._onChangeParam);
   158     window.off(EVENTS.CREATE_NODE, this._onUpdatedContext);
   159     window.off(EVENTS.CONNECT_NODE, this._onUpdatedContext);
   160     window.off(EVENTS.DISCONNECT_NODE, this._onUpdatedContext);
   161   },
   163   /**
   164    * Called when a new audio node is created, or the audio context
   165    * routing changes.
   166    */
   167   _onUpdatedContext: function () {
   168     WebAudioGraphView.draw();
   169   },
   171   /**
   172    * Called for each location change in the debugged tab.
   173    */
   174   _onTabNavigated: function(event) {
   175     switch (event) {
   176       case "will-navigate": {
   177         Task.spawn(function() {
   178           // Make sure the backend is prepared to handle audio contexts.
   179           yield gFront.setup({ reload: false });
   181           // Reset UI to show "Waiting for Audio Context..." and clear out
   182           // current UI.
   183           WebAudioGraphView.resetUI();
   184           WebAudioParamView.resetUI();
   186           // Clear out stored audio nodes
   187           AudioNodes.length = 0;
   188           AudioNodeConnections.clear();
   189         }).then(() => window.emit(EVENTS.UI_RESET));
   190         break;
   191       }
   192       case "navigate": {
   193         // TODO Case of bfcache, needs investigating
   194         // bug 994250
   195         break;
   196       }
   197     }
   198   },
   200   /**
   201    * Called after the first audio node is created in an audio context,
   202    * signaling that the audio context is being used.
   203    */
   204   _onStartContext: function() {
   205     WebAudioGraphView.showContent();
   206     window.emit(EVENTS.START_CONTEXT);
   207   },
   209   /**
   210    * Called when a new node is created. Creates an `AudioNodeView` instance
   211    * for tracking throughout the editor.
   212    */
   213   _onCreateNode: Task.async(function* (nodeActor) {
   214     let node = new AudioNodeView(nodeActor);
   215     yield node.getType();
   216     AudioNodes.push(node);
   217     window.emit(EVENTS.CREATE_NODE, node.id);
   218   }),
   220   /**
   221    * Called when a node is connected to another node.
   222    */
   223   _onConnectNode: Task.async(function* ({ source: sourceActor, dest: destActor }) {
   224     // Since node create and connect are probably executed back to back,
   225     // and the controller's `_onCreateNode` needs to look up type,
   226     // the edge creation could be called before the graph node is actually
   227     // created. This way, we can check and listen for the event before
   228     // adding an edge.
   229     let [source, dest] = yield waitForNodeCreation(sourceActor, destActor);
   231     source.connect(dest);
   232     window.emit(EVENTS.CONNECT_NODE, source.id, dest.id);
   234     function waitForNodeCreation (sourceActor, destActor) {
   235       let deferred = defer();
   236       let source = getViewNodeByActor(sourceActor);
   237       let dest = getViewNodeByActor(destActor);
   239       if (!source || !dest)
   240         window.on(EVENTS.CREATE_NODE, function createNodeListener (_, id) {
   241           let createdNode = getViewNodeById(id);
   242           if (equalActors(sourceActor, createdNode.actor))
   243             source = createdNode;
   244           if (equalActors(destActor, createdNode.actor))
   245             dest = createdNode;
   246           if (source && dest) {
   247             window.off(EVENTS.CREATE_NODE, createNodeListener);
   248             deferred.resolve([source, dest]);
   249           }
   250         });
   251       else
   252         deferred.resolve([source, dest]);
   253       return deferred.promise;
   254     }
   255   }),
   257   /**
   258    * Called when a node is disconnected.
   259    */
   260   _onDisconnectNode: function(nodeActor) {
   261     let node = getViewNodeByActor(nodeActor);
   262     node.disconnect();
   263     window.emit(EVENTS.DISCONNECT_NODE, node.id);
   264   },
   266   /**
   267    * Called when a node param is changed.
   268    */
   269   _onChangeParam: function({ actor, param, value }) {
   270     window.emit(EVENTS.CHANGE_PARAM, getViewNodeByActor(actor), param, value);
   271   }
   272 };
   274 /**
   275  * Convenient way of emitting events from the panel window.
   276  */
   277 EventEmitter.decorate(this);
   279 /**
   280  * DOM query helper.
   281  */
   282 function $(selector, target = document) { return target.querySelector(selector); }
   283 function $$(selector, target = document) { return target.querySelectorAll(selector); }
   285 /**
   286  * Compare `actorID` between two actors to determine if they're corresponding
   287  * to the same underlying actor.
   288  */
   289 function equalActors (actor1, actor2) {
   290   return actor1.actorID === actor2.actorID;
   291 }
   293 /**
   294  * Returns the corresponding ViewNode by actor
   295  */
   296 function getViewNodeByActor (actor) {
   297   for (let i = 0; i < AudioNodes.length; i++) {
   298     if (equalActors(AudioNodes[i].actor, actor))
   299       return AudioNodes[i];
   300   }
   301   return null;
   302 }
   304 /**
   305  * Returns the corresponding ViewNode by actorID
   306  */
   307 function getViewNodeById (id) {
   308   return getViewNodeByActor({ actorID: id });
   309 }

mercurial