Bug 1265732 - Decouple the AudioNodeFront and WebAudioFront from the AudioNodeActor and WebAudioActor respectively; r=ejpbruel

This commit is contained in:
Nick Fitzgerald 2016-06-03 10:45:10 -07:00
parent 84bd208e23
commit 644e1b42f8
9 changed files with 292 additions and 221 deletions

View File

@ -7,7 +7,7 @@
// `actorHasMethod` and `getTrait`.
var { WebAudioFront } =
require("devtools/server/actors/webaudio");
require("devtools/shared/fronts/webaudio");
function* testTarget(client, target) {
yield target.makeRemote();

View File

@ -7,7 +7,7 @@
const { Cc, Ci, Cu, Cr } = require("chrome");
const EventEmitter = require("devtools/shared/event-emitter");
const { WebAudioFront } = require("devtools/server/actors/webaudio");
const { WebAudioFront } = require("devtools/shared/fronts/webaudio");
var Promise = require("promise");
function WebAudioEditorPanel(iframeWindow, toolbox) {

View File

@ -14,7 +14,7 @@ var { generateUUID } = Cc["@mozilla.org/uuid-generator;1"].getService(Ci.nsIUUID
var Promise = require("promise");
var Services = require("Services");
var { WebAudioFront } = require("devtools/server/actors/webaudio");
var { WebAudioFront } = require("devtools/shared/fronts/webaudio");
var DevToolsUtils = require("devtools/shared/DevToolsUtils");
var audioNodes = require("devtools/server/actors/utils/audionodes.json");
var mm = null;

View File

@ -17,6 +17,14 @@ const { createValueGrip } = require("devtools/server/actors/object");
const AutomationTimeline = require("./utils/automation-timeline");
const { on, once, off, emit } = events;
const { types, method, Arg, Option, RetVal, preEvent } = protocol;
const {
audionodeSpec,
webAudioSpec,
AUTOMATION_METHODS,
NODE_CREATION_METHODS,
NODE_ROUTING_METHODS,
} = require("devtools/shared/specs/webaudio");
const { WebAudioFront } = require("devtools/shared/fronts/webaudio");
const AUDIO_NODE_DEFINITION = require("devtools/server/actors/utils/audionodes.json");
const ENABLE_AUTOMATION = false;
const AUTOMATION_GRANULARITY = 2000;
@ -26,31 +34,11 @@ const AUDIO_GLOBALS = [
"AudioContext", "AudioNode", "AudioParam"
];
const NODE_CREATION_METHODS = [
"createBufferSource", "createMediaElementSource", "createMediaStreamSource",
"createMediaStreamDestination", "createScriptProcessor", "createAnalyser",
"createGain", "createDelay", "createBiquadFilter", "createWaveShaper",
"createPanner", "createConvolver", "createChannelSplitter", "createChannelMerger",
"createDynamicsCompressor", "createOscillator", "createStereoPanner"
];
const AUTOMATION_METHODS = [
"setValueAtTime", "linearRampToValueAtTime", "exponentialRampToValueAtTime",
"setTargetAtTime", "setValueCurveAtTime", "cancelScheduledValues"
];
const NODE_ROUTING_METHODS = [
"connect", "disconnect"
];
/**
* An Audio Node actor allowing communication to a specific audio node in the
* Audio Context graph.
*/
types.addActorType("audionode");
var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
typeName: "audionode",
var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClassWithSpec(audionodeSpec, {
form: function (detail) {
if (detail === "actorid") {
return this.actorID;
@ -106,9 +94,9 @@ var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
*
* DEPRECATED: Use `audionode.type` instead, left here for legacy reasons.
*/
getType: method(function () {
getType: function () {
return this.type;
}, { response: { type: RetVal("string") }}),
},
/**
* Returns a boolean indicating if the AudioNode has been "bypassed",
@ -116,7 +104,7 @@ var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
*
* @return Boolean
*/
isBypassed: method(function () {
isBypassed: function () {
let node = this.node.get();
if (node === null) {
return false;
@ -125,9 +113,7 @@ var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
// Cast to boolean incase `passThrough` is undefined,
// like for AudioDestinationNode
return !!node.passThrough;
}, {
response: { bypassed: RetVal("boolean") }
}),
},
/**
* Takes a boolean, either enabling or disabling the "passThrough" option
@ -139,7 +125,7 @@ var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
* Whether the bypass value should be set on or off.
* @return Boolean
*/
bypass: method(function (enable) {
bypass: function (enable) {
let node = this.node.get();
if (node === null) {
@ -151,10 +137,7 @@ var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
}
return this.isBypassed();
}, {
request: { enable: Arg(0, "boolean") },
response: { bypassed: RetVal("boolean") }
}),
},
/**
* Changes a param on the audio node. Responds with either `undefined`
@ -165,7 +148,7 @@ var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
* @param String value
* Value to change AudioParam to.
*/
setParam: method(function (param, value) {
setParam: function (param, value) {
let node = this.node.get();
if (node === null) {
@ -184,13 +167,7 @@ var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
} catch (e) {
return constructError(e);
}
}, {
request: {
param: Arg(0, "string"),
value: Arg(1, "nullable:primitive")
},
response: { error: RetVal("nullable:json") }
}),
},
/**
* Gets a param on the audio node.
@ -198,7 +175,7 @@ var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
* @param String param
* Name of the AudioParam to fetch.
*/
getParam: method(function (param) {
getParam: function (param) {
let node = this.node.get();
if (node === null) {
@ -217,12 +194,7 @@ var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
let grip = createValueGrip(value, null, createObjectGrip);
return grip;
}, {
request: {
param: Arg(0, "string")
},
response: { text: RetVal("nullable:primitive") }
}),
},
/**
* Get an object containing key-value pairs of additional attributes
@ -232,29 +204,24 @@ var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
* @param String param
* Name of the AudioParam whose flags are desired.
*/
getParamFlags: method(function (param) {
getParamFlags: function (param) {
return ((AUDIO_NODE_DEFINITION[this.type] || {}).properties || {})[param];
}, {
request: { param: Arg(0, "string") },
response: { flags: RetVal("nullable:primitive") }
}),
},
/**
* Get an array of objects each containing a `param` and `value` property,
* corresponding to a property name and current value of the audio node.
*/
getParams: method(function (param) {
getParams: function (param) {
let props = Object.keys(AUDIO_NODE_DEFINITION[this.type].properties || {});
return props.map(prop =>
({ param: prop, value: this.getParam(prop), flags: this.getParamFlags(prop) }));
}, {
response: { params: RetVal("json") }
}),
},
/**
* Connects this audionode to an AudioParam via `node.connect(param)`.
*/
connectParam: method(function (destActor, paramName, output) {
connectParam: function (destActor, paramName, output) {
let srcNode = this.node.get();
let destNode = destActor.node.get();
@ -272,19 +239,12 @@ var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
} catch (e) {
return constructError(e);
}
}, {
request: {
destActor: Arg(0, "audionode"),
paramName: Arg(1, "string"),
output: Arg(2, "nullable:number")
},
response: { error: RetVal("nullable:json") }
}),
},
/**
* Connects this audionode to another via `node.connect(dest)`.
*/
connectNode: method(function (destActor, output, input) {
connectNode: function (destActor, output, input) {
let srcNode = this.node.get();
let destNode = destActor.node.get();
@ -302,19 +262,12 @@ var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
} catch (e) {
return constructError(e);
}
}, {
request: {
destActor: Arg(0, "audionode"),
output: Arg(1, "nullable:number"),
input: Arg(2, "nullable:number")
},
response: { error: RetVal("nullable:json") }
}),
},
/**
* Disconnects this audionode from all connections via `node.disconnect()`.
*/
disconnect: method(function (destActor, output) {
disconnect: function (destActor, output) {
let node = this.node.get();
if (node === null) {
@ -328,12 +281,9 @@ var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
} catch (e) {
return constructError(e);
}
}, {
request: { output: Arg(0, "nullable:number") },
response: { error: RetVal("nullable:json") }
}),
},
getAutomationData: method(function (paramName) {
getAutomationData: function (paramName) {
let timeline = this.automation[paramName];
if (!timeline) {
return null;
@ -373,10 +323,7 @@ var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
}
return { events, values };
}, {
request: { paramName: Arg(0, "string") },
response: { values: RetVal("nullable:json") }
}),
},
/**
* Called via WebAudioActor, registers an automation event
@ -389,7 +336,7 @@ var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
* @param Array args
* Arguments passed into the automation call.
*/
addAutomationEvent: method(function (paramName, eventName, args = []) {
addAutomationEvent: function (paramName, eventName, args = []) {
let node = this.node.get();
let timeline = this.automation[paramName];
@ -431,14 +378,7 @@ var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
} catch (e) {
return constructError(e);
}
}, {
request: {
paramName: Arg(0, "string"),
eventName: Arg(1, "string"),
args: Arg(2, "nullable:json")
},
response: { error: RetVal("nullable:json") }
}),
},
/**
* Registers the automation event in the AudioNodeActor's
@ -459,48 +399,12 @@ var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
}
});
/**
* The corresponding Front object for the AudioNodeActor.
*
* @attribute {String} type
* The type of audio node, like "OscillatorNode", "MediaElementAudioSourceNode"
* @attribute {Boolean} source
* Boolean indicating if the node is a source node, like BufferSourceNode,
* MediaElementAudioSourceNode, OscillatorNode, etc.
* @attribute {Boolean} bypassable
* Boolean indicating if the audio node is bypassable (splitter,
* merger and destination nodes, for example, are not)
*/
var AudioNodeFront = protocol.FrontClass(AudioNodeActor, {
form: function (form, detail) {
if (detail === "actorid") {
this.actorID = form;
return;
}
this.actorID = form.actor;
this.type = form.type;
this.source = form.source;
this.bypassable = form.bypassable;
},
initialize: function (client, form) {
protocol.Front.prototype.initialize.call(this, client, form);
// if we were manually passed a form, this was created manually and
// needs to own itself for now.
if (form) {
this.manage(this);
}
}
});
/**
* The Web Audio Actor handles simple interaction with an AudioContext
* high-level methods. After instantiating this actor, you'll need to set it
* up by calling setup().
*/
var WebAudioActor = exports.WebAudioActor = protocol.ActorClass({
typeName: "webaudio",
var WebAudioActor = exports.WebAudioActor = protocol.ActorClassWithSpec(webAudioSpec, {
initialize: function (conn, tabActor) {
protocol.Actor.prototype.initialize.call(this, conn);
this.tabActor = tabActor;
@ -526,11 +430,9 @@ var WebAudioActor = exports.WebAudioActor = protocol.ActorClass({
* Returns definition of all AudioNodes, such as AudioParams, and
* flags.
*/
getDefinition: method(function () {
getDefinition: function () {
return AUDIO_NODE_DEFINITION;
}, {
response: { definition: RetVal("json") }
}),
},
/**
* Starts waiting for the current tab actor's document global to be
@ -539,7 +441,7 @@ var WebAudioActor = exports.WebAudioActor = protocol.ActorClass({
*
* See ContentObserver and WebAudioInstrumenter for more details.
*/
setup: method(function ({ reload }) {
setup: function ({ reload }) {
// Used to track when something is happening with the web audio API
// the first time, to ultimately fire `start-context` event
this._firstNodeCreated = false;
@ -569,10 +471,7 @@ var WebAudioActor = exports.WebAudioActor = protocol.ActorClass({
// Bind to the `window-destroyed` event so we can unbind events between
// the global destruction and the `finalize` cleanup method on the actor.
on(this.tabActor, "window-destroyed", this._onGlobalDestroyed);
}, {
request: { reload: Option(0, "boolean") },
oneway: true
}),
},
/**
* Invoked whenever an instrumented function is called, like an AudioContext
@ -655,7 +554,7 @@ var WebAudioActor = exports.WebAudioActor = protocol.ActorClass({
* to hibernation. This method is called automatically just before the
* actor is destroyed.
*/
finalize: method(function () {
finalize: function () {
if (!this._initialized) {
return;
}
@ -669,53 +568,6 @@ var WebAudioActor = exports.WebAudioActor = protocol.ActorClass({
this._callWatcher.eraseRecording();
this._callWatcher.finalize();
this._callWatcher = null;
}, {
oneway: true
}),
/**
* Events emitted by this actor.
*/
events: {
"start-context": {
type: "startContext"
},
"connect-node": {
type: "connectNode",
source: Option(0, "audionode"),
dest: Option(0, "audionode")
},
"disconnect-node": {
type: "disconnectNode",
source: Arg(0, "audionode")
},
"connect-param": {
type: "connectParam",
source: Option(0, "audionode"),
dest: Option(0, "audionode"),
param: Option(0, "string")
},
"change-param": {
type: "changeParam",
source: Option(0, "audionode"),
param: Option(0, "string"),
value: Option(0, "string")
},
"create-node": {
type: "createNode",
source: Arg(0, "audionode")
},
"destroy-node": {
type: "destroyNode",
source: Arg(0, "audionode")
},
"automation-event": {
type: "automationEvent",
node: Option(0, "audionode"),
paramName: Option(0, "string"),
eventName: Option(0, "string"),
args: Option(0, "json")
}
},
/**
@ -884,35 +736,6 @@ var WebAudioActor = exports.WebAudioActor = protocol.ActorClass({
}
});
/**
* The corresponding Front object for the WebAudioActor.
*/
var WebAudioFront = exports.WebAudioFront = protocol.FrontClass(WebAudioActor, {
initialize: function (client, { webaudioActor }) {
protocol.Front.prototype.initialize.call(this, client, { actor: webaudioActor });
this.manage(this);
},
/**
* If connecting to older geckos (<Fx43), where audio node actor's do not
* contain `type`, `source` and `bypassable` properties, fetch
* them manually here.
*/
_onCreateNode: preEvent("create-node", function (audionode) {
if (!audionode.type) {
return audionode.getType().then(type => {
audionode.type = type;
audionode.source = !!AUDIO_NODE_DEFINITION[type].source;
audionode.bypassable = !AUDIO_NODE_DEFINITION[type].unbypassable;
});
}
}),
});
WebAudioFront.AUTOMATION_METHODS = new Set(AUTOMATION_METHODS);
WebAudioFront.NODE_CREATION_METHODS = new Set(NODE_CREATION_METHODS);
WebAudioFront.NODE_ROUTING_METHODS = new Set(NODE_ROUTING_METHODS);
/**
* Determines whether or not property is an AudioParam.
*

View File

@ -14,5 +14,6 @@ DevToolsModules(
'inspector.js',
'storage.js',
'styles.js',
'stylesheets.js'
'stylesheets.js',
'webaudio.js'
)

View File

@ -0,0 +1,83 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const {
audionodeSpec,
webAudioSpec,
AUTOMATION_METHODS,
NODE_CREATION_METHODS,
NODE_ROUTING_METHODS,
} = require("devtools/shared/specs/webaudio");
const protocol = require("devtools/shared/protocol");
const AUDIO_NODE_DEFINITION = require("devtools/server/actors/utils/audionodes.json");
/**
* The corresponding Front object for the AudioNodeActor.
*
* @attribute {String} type
* The type of audio node, like "OscillatorNode", "MediaElementAudioSourceNode"
* @attribute {Boolean} source
* Boolean indicating if the node is a source node, like BufferSourceNode,
* MediaElementAudioSourceNode, OscillatorNode, etc.
* @attribute {Boolean} bypassable
* Boolean indicating if the audio node is bypassable (splitter,
* merger and destination nodes, for example, are not)
*/
const AudioNodeFront = protocol.FrontClassWithSpec(audionodeSpec, {
form: function (form, detail) {
if (detail === "actorid") {
this.actorID = form;
return;
}
this.actorID = form.actor;
this.type = form.type;
this.source = form.source;
this.bypassable = form.bypassable;
},
initialize: function (client, form) {
protocol.Front.prototype.initialize.call(this, client, form);
// if we were manually passed a form, this was created manually and
// needs to own itself for now.
if (form) {
this.manage(this);
}
}
});
exports.AudioNodeFront = AudioNodeFront;
/**
* The corresponding Front object for the WebAudioActor.
*/
const WebAudioFront = protocol.FrontClassWithSpec(webAudioSpec, {
initialize: function (client, { webaudioActor }) {
protocol.Front.prototype.initialize.call(this, client, { actor: webaudioActor });
this.manage(this);
},
/**
* If connecting to older geckos (<Fx43), where audio node actor's do not
* contain `type`, `source` and `bypassable` properties, fetch
* them manually here.
*/
_onCreateNode: protocol.preEvent("create-node", function (audionode) {
if (!audionode.type) {
return audionode.getType().then(type => {
audionode.type = type;
audionode.source = !!AUDIO_NODE_DEFINITION[type].source;
audionode.bypassable = !AUDIO_NODE_DEFINITION[type].unbypassable;
});
}
return null;
}),
});
WebAudioFront.AUTOMATION_METHODS = new Set(AUTOMATION_METHODS);
WebAudioFront.NODE_CREATION_METHODS = new Set(NODE_CREATION_METHODS);
WebAudioFront.NODE_ROUTING_METHODS = new Set(NODE_ROUTING_METHODS);
exports.WebAudioFront = WebAudioFront;

View File

@ -17,5 +17,6 @@ DevToolsModules(
'storage.js',
'styleeditor.js',
'styles.js',
'stylesheets.js'
'stylesheets.js',
'webaudio.js'
)

View File

@ -0,0 +1,163 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const {
Arg,
Option,
RetVal,
generateActorSpec,
types,
} = require("devtools/shared/protocol");
exports.NODE_CREATION_METHODS = [
"createBufferSource", "createMediaElementSource", "createMediaStreamSource",
"createMediaStreamDestination", "createScriptProcessor", "createAnalyser",
"createGain", "createDelay", "createBiquadFilter", "createWaveShaper",
"createPanner", "createConvolver", "createChannelSplitter", "createChannelMerger",
"createDynamicsCompressor", "createOscillator", "createStereoPanner"
];
exports.AUTOMATION_METHODS = [
"setValueAtTime", "linearRampToValueAtTime", "exponentialRampToValueAtTime",
"setTargetAtTime", "setValueCurveAtTime", "cancelScheduledValues"
];
exports.NODE_ROUTING_METHODS = [
"connect", "disconnect"
];
types.addActorType("audionode");
const audionodeSpec = generateActorSpec({
typeName: "audionode",
methods: {
getType: { response: { type: RetVal("string") }},
isBypassed: {
response: { bypassed: RetVal("boolean") }
},
bypass: {
request: { enable: Arg(0, "boolean") },
response: { bypassed: RetVal("boolean") }
},
setParam: {
request: {
param: Arg(0, "string"),
value: Arg(1, "nullable:primitive")
},
response: { error: RetVal("nullable:json") }
},
getParam: {
request: {
param: Arg(0, "string")
},
response: { text: RetVal("nullable:primitive") }
},
getParamFlags: {
request: { param: Arg(0, "string") },
response: { flags: RetVal("nullable:primitive") }
},
getParams: {
response: { params: RetVal("json") }
},
connectParam: {
request: {
destActor: Arg(0, "audionode"),
paramName: Arg(1, "string"),
output: Arg(2, "nullable:number")
},
response: { error: RetVal("nullable:json") }
},
connectNode: {
request: {
destActor: Arg(0, "audionode"),
output: Arg(1, "nullable:number"),
input: Arg(2, "nullable:number")
},
response: { error: RetVal("nullable:json") }
},
disconnect: {
request: { output: Arg(0, "nullable:number") },
response: { error: RetVal("nullable:json") }
},
getAutomationData: {
request: { paramName: Arg(0, "string") },
response: { values: RetVal("nullable:json") }
},
addAutomationEvent: {
request: {
paramName: Arg(0, "string"),
eventName: Arg(1, "string"),
args: Arg(2, "nullable:json")
},
response: { error: RetVal("nullable:json") }
},
}
});
exports.audionodeSpec = audionodeSpec;
const webAudioSpec = generateActorSpec({
typeName: "webaudio",
/**
* Events emitted by this actor.
*/
events: {
"start-context": {
type: "startContext"
},
"connect-node": {
type: "connectNode",
source: Option(0, "audionode"),
dest: Option(0, "audionode")
},
"disconnect-node": {
type: "disconnectNode",
source: Arg(0, "audionode")
},
"connect-param": {
type: "connectParam",
source: Option(0, "audionode"),
dest: Option(0, "audionode"),
param: Option(0, "string")
},
"change-param": {
type: "changeParam",
source: Option(0, "audionode"),
param: Option(0, "string"),
value: Option(0, "string")
},
"create-node": {
type: "createNode",
source: Arg(0, "audionode")
},
"destroy-node": {
type: "destroyNode",
source: Arg(0, "audionode")
},
"automation-event": {
type: "automationEvent",
node: Option(0, "audionode"),
paramName: Option(0, "string"),
eventName: Option(0, "string"),
args: Option(0, "json")
}
},
methods: {
getDefinition: {
response: { definition: RetVal("json") }
},
setup: {
request: { reload: Option(0, "boolean") },
oneway: true
},
finalize: {
oneway: true
}
}
});
exports.webAudioSpec = webAudioSpec;

View File