michael@0: /* This Source Code Form is subject to the terms of the Mozilla Public michael@0: * License, v. 2.0. If a copy of the MPL was not distributed with this file, michael@0: * You can obtain one at http://mozilla.org/MPL/2.0/. */ michael@0: "use strict"; michael@0: michael@0: const { classes: Cc, interfaces: Ci, utils: Cu, results: Cr } = Components; michael@0: michael@0: Cu.import("resource://gre/modules/Services.jsm"); michael@0: Cu.import("resource://gre/modules/XPCOMUtils.jsm"); michael@0: Cu.import("resource:///modules/devtools/ViewHelpers.jsm"); michael@0: michael@0: // Override DOM promises with Promise.jsm helpers michael@0: const { defer, all } = Cu.import("resource://gre/modules/Promise.jsm", {}).Promise; michael@0: michael@0: const { Task } = Cu.import("resource://gre/modules/Task.jsm", {}); michael@0: const require = Cu.import("resource://gre/modules/devtools/Loader.jsm", {}).devtools.require; michael@0: const EventEmitter = require("devtools/toolkit/event-emitter"); michael@0: const STRINGS_URI = "chrome://browser/locale/devtools/webaudioeditor.properties" michael@0: let { console } = Cu.import("resource://gre/modules/devtools/Console.jsm", {}); michael@0: michael@0: // The panel's window global is an EventEmitter firing the following events: michael@0: const EVENTS = { michael@0: // Fired when the first AudioNode has been created, signifying michael@0: // that the AudioContext is being used and should be tracked via the editor. michael@0: START_CONTEXT: "WebAudioEditor:StartContext", michael@0: michael@0: // On node creation, connect and disconnect. michael@0: CREATE_NODE: "WebAudioEditor:CreateNode", michael@0: CONNECT_NODE: "WebAudioEditor:ConnectNode", michael@0: DISCONNECT_NODE: "WebAudioEditor:DisconnectNode", michael@0: michael@0: // When a node gets GC'd. michael@0: DESTROY_NODE: "WebAudioEditor:DestroyNode", michael@0: michael@0: // On a node parameter's change. michael@0: CHANGE_PARAM: "WebAudioEditor:ChangeParam", michael@0: michael@0: // When the UI is reset from tab navigation. michael@0: UI_RESET: "WebAudioEditor:UIReset", michael@0: michael@0: // When a param has been changed via the UI and successfully michael@0: // pushed via the actor to the raw audio node. michael@0: UI_SET_PARAM: "WebAudioEditor:UISetParam", michael@0: michael@0: // When an audio node is added to the list pane. michael@0: UI_ADD_NODE_LIST: "WebAudioEditor:UIAddNodeList", michael@0: michael@0: // When the Audio Context graph finishes rendering. michael@0: // Is called with two arguments, first representing number of nodes michael@0: // rendered, second being the number of edges rendered. michael@0: UI_GRAPH_RENDERED: "WebAudioEditor:UIGraphRendered" michael@0: }; michael@0: michael@0: /** michael@0: * The current target and the Web Audio Editor front, set by this tool's host. michael@0: */ michael@0: let gToolbox, gTarget, gFront; michael@0: michael@0: /** michael@0: * Track an array of audio nodes michael@0: */ michael@0: let AudioNodes = []; michael@0: let AudioNodeConnections = new WeakMap(); michael@0: michael@0: michael@0: // Light representation wrapping an AudioNode actor with additional properties michael@0: function AudioNodeView (actor) { michael@0: this.actor = actor; michael@0: this.id = actor.actorID; michael@0: } michael@0: michael@0: // A proxy for the underlying AudioNodeActor to fetch its type michael@0: // and subsequently assign the type to the instance. michael@0: AudioNodeView.prototype.getType = Task.async(function* () { michael@0: this.type = yield this.actor.getType(); michael@0: return this.type; michael@0: }); michael@0: michael@0: // Helper method to create connections in the AudioNodeConnections michael@0: // WeakMap for rendering michael@0: AudioNodeView.prototype.connect = function (destination) { michael@0: let connections = AudioNodeConnections.get(this); michael@0: if (!connections) { michael@0: connections = []; michael@0: AudioNodeConnections.set(this, connections); michael@0: } michael@0: connections.push(destination); michael@0: }; michael@0: michael@0: // Helper method to remove audio connections from the current AudioNodeView michael@0: AudioNodeView.prototype.disconnect = function () { michael@0: AudioNodeConnections.set(this, []); michael@0: }; michael@0: michael@0: // Returns a promise that resolves to an array of objects containing michael@0: // both a `param` name property and a `value` property. michael@0: AudioNodeView.prototype.getParams = function () { michael@0: return this.actor.getParams(); michael@0: }; michael@0: michael@0: michael@0: /** michael@0: * Initializes the web audio editor views michael@0: */ michael@0: function startupWebAudioEditor() { michael@0: return all([ michael@0: WebAudioEditorController.initialize(), michael@0: WebAudioGraphView.initialize(), michael@0: WebAudioParamView.initialize() michael@0: ]); michael@0: } michael@0: michael@0: /** michael@0: * Destroys the web audio editor controller and views. michael@0: */ michael@0: function shutdownWebAudioEditor() { michael@0: return all([ michael@0: WebAudioEditorController.destroy(), michael@0: WebAudioGraphView.destroy(), michael@0: WebAudioParamView.destroy() michael@0: ]); michael@0: } michael@0: michael@0: /** michael@0: * Functions handling target-related lifetime events. michael@0: */ michael@0: let WebAudioEditorController = { michael@0: /** michael@0: * Listen for events emitted by the current tab target. michael@0: */ michael@0: initialize: function() { michael@0: this._onTabNavigated = this._onTabNavigated.bind(this); michael@0: gTarget.on("will-navigate", this._onTabNavigated); michael@0: gTarget.on("navigate", this._onTabNavigated); michael@0: gFront.on("start-context", this._onStartContext); michael@0: gFront.on("create-node", this._onCreateNode); michael@0: gFront.on("connect-node", this._onConnectNode); michael@0: gFront.on("disconnect-node", this._onDisconnectNode); michael@0: gFront.on("change-param", this._onChangeParam); michael@0: michael@0: // Set up events to refresh the Graph view michael@0: window.on(EVENTS.CREATE_NODE, this._onUpdatedContext); michael@0: window.on(EVENTS.CONNECT_NODE, this._onUpdatedContext); michael@0: window.on(EVENTS.DISCONNECT_NODE, this._onUpdatedContext); michael@0: }, michael@0: michael@0: /** michael@0: * Remove events emitted by the current tab target. michael@0: */ michael@0: destroy: function() { michael@0: gTarget.off("will-navigate", this._onTabNavigated); michael@0: gTarget.off("navigate", this._onTabNavigated); michael@0: gFront.off("start-context", this._onStartContext); michael@0: gFront.off("create-node", this._onCreateNode); michael@0: gFront.off("connect-node", this._onConnectNode); michael@0: gFront.off("disconnect-node", this._onDisconnectNode); michael@0: gFront.off("change-param", this._onChangeParam); michael@0: window.off(EVENTS.CREATE_NODE, this._onUpdatedContext); michael@0: window.off(EVENTS.CONNECT_NODE, this._onUpdatedContext); michael@0: window.off(EVENTS.DISCONNECT_NODE, this._onUpdatedContext); michael@0: }, michael@0: michael@0: /** michael@0: * Called when a new audio node is created, or the audio context michael@0: * routing changes. michael@0: */ michael@0: _onUpdatedContext: function () { michael@0: WebAudioGraphView.draw(); michael@0: }, michael@0: michael@0: /** michael@0: * Called for each location change in the debugged tab. michael@0: */ michael@0: _onTabNavigated: function(event) { michael@0: switch (event) { michael@0: case "will-navigate": { michael@0: Task.spawn(function() { michael@0: // Make sure the backend is prepared to handle audio contexts. michael@0: yield gFront.setup({ reload: false }); michael@0: michael@0: // Reset UI to show "Waiting for Audio Context..." and clear out michael@0: // current UI. michael@0: WebAudioGraphView.resetUI(); michael@0: WebAudioParamView.resetUI(); michael@0: michael@0: // Clear out stored audio nodes michael@0: AudioNodes.length = 0; michael@0: AudioNodeConnections.clear(); michael@0: }).then(() => window.emit(EVENTS.UI_RESET)); michael@0: break; michael@0: } michael@0: case "navigate": { michael@0: // TODO Case of bfcache, needs investigating michael@0: // bug 994250 michael@0: break; michael@0: } michael@0: } michael@0: }, michael@0: michael@0: /** michael@0: * Called after the first audio node is created in an audio context, michael@0: * signaling that the audio context is being used. michael@0: */ michael@0: _onStartContext: function() { michael@0: WebAudioGraphView.showContent(); michael@0: window.emit(EVENTS.START_CONTEXT); michael@0: }, michael@0: michael@0: /** michael@0: * Called when a new node is created. Creates an `AudioNodeView` instance michael@0: * for tracking throughout the editor. michael@0: */ michael@0: _onCreateNode: Task.async(function* (nodeActor) { michael@0: let node = new AudioNodeView(nodeActor); michael@0: yield node.getType(); michael@0: AudioNodes.push(node); michael@0: window.emit(EVENTS.CREATE_NODE, node.id); michael@0: }), michael@0: michael@0: /** michael@0: * Called when a node is connected to another node. michael@0: */ michael@0: _onConnectNode: Task.async(function* ({ source: sourceActor, dest: destActor }) { michael@0: // Since node create and connect are probably executed back to back, michael@0: // and the controller's `_onCreateNode` needs to look up type, michael@0: // the edge creation could be called before the graph node is actually michael@0: // created. This way, we can check and listen for the event before michael@0: // adding an edge. michael@0: let [source, dest] = yield waitForNodeCreation(sourceActor, destActor); michael@0: michael@0: source.connect(dest); michael@0: window.emit(EVENTS.CONNECT_NODE, source.id, dest.id); michael@0: michael@0: function waitForNodeCreation (sourceActor, destActor) { michael@0: let deferred = defer(); michael@0: let source = getViewNodeByActor(sourceActor); michael@0: let dest = getViewNodeByActor(destActor); michael@0: michael@0: if (!source || !dest) michael@0: window.on(EVENTS.CREATE_NODE, function createNodeListener (_, id) { michael@0: let createdNode = getViewNodeById(id); michael@0: if (equalActors(sourceActor, createdNode.actor)) michael@0: source = createdNode; michael@0: if (equalActors(destActor, createdNode.actor)) michael@0: dest = createdNode; michael@0: if (source && dest) { michael@0: window.off(EVENTS.CREATE_NODE, createNodeListener); michael@0: deferred.resolve([source, dest]); michael@0: } michael@0: }); michael@0: else michael@0: deferred.resolve([source, dest]); michael@0: return deferred.promise; michael@0: } michael@0: }), michael@0: michael@0: /** michael@0: * Called when a node is disconnected. michael@0: */ michael@0: _onDisconnectNode: function(nodeActor) { michael@0: let node = getViewNodeByActor(nodeActor); michael@0: node.disconnect(); michael@0: window.emit(EVENTS.DISCONNECT_NODE, node.id); michael@0: }, michael@0: michael@0: /** michael@0: * Called when a node param is changed. michael@0: */ michael@0: _onChangeParam: function({ actor, param, value }) { michael@0: window.emit(EVENTS.CHANGE_PARAM, getViewNodeByActor(actor), param, value); michael@0: } michael@0: }; michael@0: michael@0: /** michael@0: * Convenient way of emitting events from the panel window. michael@0: */ michael@0: EventEmitter.decorate(this); michael@0: michael@0: /** michael@0: * DOM query helper. michael@0: */ michael@0: function $(selector, target = document) { return target.querySelector(selector); } michael@0: function $$(selector, target = document) { return target.querySelectorAll(selector); } michael@0: michael@0: /** michael@0: * Compare `actorID` between two actors to determine if they're corresponding michael@0: * to the same underlying actor. michael@0: */ michael@0: function equalActors (actor1, actor2) { michael@0: return actor1.actorID === actor2.actorID; michael@0: } michael@0: michael@0: /** michael@0: * Returns the corresponding ViewNode by actor michael@0: */ michael@0: function getViewNodeByActor (actor) { michael@0: for (let i = 0; i < AudioNodes.length; i++) { michael@0: if (equalActors(AudioNodes[i].actor, actor)) michael@0: return AudioNodes[i]; michael@0: } michael@0: return null; michael@0: } michael@0: michael@0: /** michael@0: * Returns the corresponding ViewNode by actorID michael@0: */ michael@0: function getViewNodeById (id) { michael@0: return getViewNodeByActor({ actorID: id }); michael@0: } michael@0: