mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-24 05:11:16 +00:00
Bug 1524026
- part 2 : add test. r=padenot,smaug
Differential Revision: https://phabricator.services.mozilla.com/D18408 --HG-- extra : moz-landing-system : lando
This commit is contained in:
parent
0d138f528c
commit
9b9c5c38d9
@ -728,6 +728,8 @@ skip-if = android_version >= '23' # bug 1424903
|
||||
skip-if = android_version >= '23' # bug 1424903
|
||||
[test_autoplay_policy_web_audio_mediaElementAudioSourceNode.html]
|
||||
skip-if = android_version >= '23' # bug 1424903
|
||||
[test_autoplay_policy_web_audio_AudioParamStream.html]
|
||||
skip-if = android_version >= '23' # bug 1424903
|
||||
[test_buffered.html]
|
||||
skip-if = android_version == '22' # bug 1308388, android(bug 1232305)
|
||||
[test_bug448534.html]
|
||||
|
@ -0,0 +1,170 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<title>Autoplay policy test : suspend/resume the AudioParam's stream</title>
|
||||
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
|
||||
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
|
||||
<script type="text/javascript" src="manifest.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
|
||||
/**
|
||||
* This test is used to ensure the AudioParam's stream can be suspended/resumed
|
||||
* by AudioContext.
|
||||
*/
|
||||
|
||||
SimpleTest.waitForExplicitFinish();
|
||||
|
||||
(async function testSuspendAndResumeAudioParamStreams() {
|
||||
await setupTestPreferences();
|
||||
|
||||
info(`- create the AudioContext -`);
|
||||
createAudioContext();
|
||||
|
||||
info(`- the AudioContext is not allowed to start in beginning -`);
|
||||
await audioContextShouldBeBlocked();
|
||||
|
||||
info(`- connect AudioScheduledSourceNode to the AudioParam and start AudioScheduledSourceNode, the AudioParam's stream should be suspended in the beginning -`)
|
||||
let audioParamsArr = await connectAudioScheduledSourceNodeToAudioParams();
|
||||
|
||||
info(`- the AudioContext and the AudioParam's stream should be resumed -`);
|
||||
await audioContextAndAudioParamStreamsShouldBeResumed(audioParamsArr);
|
||||
|
||||
info(`- suspend the AudioContext which should also suspend the AudioParam's stream -`);
|
||||
await suspendAudioContextAndAudioParamStreams(audioParamsArr);
|
||||
|
||||
endTest();
|
||||
})();
|
||||
|
||||
/**
|
||||
* Test utility functions
|
||||
*/
|
||||
|
||||
function setupTestPreferences() {
|
||||
return SpecialPowers.pushPrefEnv({"set": [
|
||||
["media.autoplay.default", SpecialPowers.Ci.nsIAutoplay.BLOCKED],
|
||||
["media.autoplay.enabled.user-gestures-needed", true],
|
||||
["media.autoplay.block-webaudio", true],
|
||||
["media.autoplay.block-event.enabled", true],
|
||||
]});
|
||||
}
|
||||
|
||||
function createAudioContext() {
|
||||
window.ac = new AudioContext();
|
||||
|
||||
ac.allowedToStart = new Promise(resolve => {
|
||||
ac.addEventListener("statechange", function() {
|
||||
if (ac.state === "running") {
|
||||
resolve();
|
||||
}
|
||||
}, {once: true});
|
||||
});
|
||||
|
||||
ac.notAllowedToStart = new Promise(resolve => {
|
||||
ac.addEventListener("blocked", async function() {
|
||||
resolve();
|
||||
}, {once: true});
|
||||
});
|
||||
}
|
||||
|
||||
async function audioContextShouldBeBlocked() {
|
||||
await ac.notAllowedToStart;
|
||||
is(ac.state, "suspended", `AudioContext is blocked.`);
|
||||
}
|
||||
|
||||
function createAudioParams(nodeType) {
|
||||
switch (nodeType) {
|
||||
case "audioBufferSource":
|
||||
let buffer = ac.createBufferSource();
|
||||
return [buffer.playbackRate, buffer.detune];
|
||||
case "biquadFilter":
|
||||
let bf = ac.createBiquadFilter();
|
||||
return [bf.frequency, bf.detune, bf.Q, bf.gain];
|
||||
case "constantSource":
|
||||
return [ac.createConstantSource().offset];
|
||||
case "dynamicsCompressor":
|
||||
let dc = ac.createDynamicsCompressor();
|
||||
return [dc.threshold, dc.knee, dc.ratio, dc.attack, dc.release];
|
||||
case "delay":
|
||||
return [ac.createDelay(5.0).delayTime];
|
||||
case "gain":
|
||||
return [ac.createGain().gain];
|
||||
case "oscillator":
|
||||
let osc = ac.createOscillator();
|
||||
return [osc.frequency, osc.detune];
|
||||
case "panner":
|
||||
let panner = ac.createPanner();
|
||||
return [panner.positionX, panner.positionY, panner.positionZ,
|
||||
panner.orientationX, panner.orientationY, panner.orientationZ];
|
||||
case "stereoPanner":
|
||||
return [ac.createStereoPanner().pan];
|
||||
default:
|
||||
ok(false, `non-defined node type ${nodeType}.`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
function createAudioParamArrWithName(nodeType) {
|
||||
let audioParamsArr = createAudioParams(nodeType);
|
||||
for (let audioParam of audioParamsArr) {
|
||||
audioParam.name = nodeType;
|
||||
}
|
||||
return audioParamsArr;
|
||||
}
|
||||
|
||||
function createAllAudioParamsFromDifferentAudioNode() {
|
||||
const NodesWithAudioParam =
|
||||
["audioBufferSource", "biquadFilter", "constantSource", "delay",
|
||||
"dynamicsCompressor", "gain", "oscillator", "panner", "stereoPanner"];
|
||||
let audioParamsArr = [];
|
||||
for (let nodeType of NodesWithAudioParam) {
|
||||
audioParamsArr = audioParamsArr.concat(createAudioParamArrWithName(nodeType));
|
||||
}
|
||||
ok(audioParamsArr.length >= NodesWithAudioParam.length,
|
||||
`Length of AudioParam array (${audioParamsArr.length}) is longer than the "
|
||||
"length of node type array (${NodesWithAudioParam.length}).`);
|
||||
return audioParamsArr;
|
||||
}
|
||||
|
||||
function connectAudioScheduledSourceNodeToAudioParams() {
|
||||
let osc = ac.createOscillator();
|
||||
let audioParamsArr = createAllAudioParamsFromDifferentAudioNode();
|
||||
for (let audioParam of audioParamsArr) {
|
||||
osc.connect(audioParam);
|
||||
ok(SpecialPowers.wrap(audioParam).isStreamSuspended,
|
||||
`(${audioParam.name}) audioParam's stream has been suspended.`);
|
||||
}
|
||||
|
||||
// simulate user gesture in order to start video.
|
||||
SpecialPowers.wrap(document).notifyUserGestureActivation();
|
||||
osc.start();
|
||||
return audioParamsArr;
|
||||
}
|
||||
|
||||
async function audioContextAndAudioParamStreamsShouldBeResumed(audioParamsArr) {
|
||||
await ac.allowedToStart;
|
||||
is(ac.state, "running", `AudioContext is allowed to start.`);
|
||||
for (let audioParam of audioParamsArr) {
|
||||
ok(!SpecialPowers.wrap(audioParam).isStreamSuspended,
|
||||
`(${audioParam.name}) audioParam's stream has been resumed.`);;
|
||||
}
|
||||
}
|
||||
|
||||
async function suspendAudioContextAndAudioParamStreams(audioParamsArr) {
|
||||
await ac.suspend();
|
||||
is(ac.state, "suspended", `AudioContext is suspended.`);
|
||||
for (let audioParam of audioParamsArr) {
|
||||
ok(SpecialPowers.wrap(audioParam).isStreamSuspended,
|
||||
`(${audioParam.name}) audioParam's stream has been suspended.`);;
|
||||
}
|
||||
}
|
||||
|
||||
function endTest() {
|
||||
// reset the activation flag in order not to interfere following test in the
|
||||
// verify mode which would run the test using same document couple times.
|
||||
SpecialPowers.wrap(document).clearUserGestureActivation();
|
||||
SimpleTest.finish();
|
||||
}
|
||||
|
||||
</script>
|
@ -148,6 +148,10 @@ class AudioParam final : public nsWrapperCache, public AudioParamTimeline {
|
||||
|
||||
float MaxValue() const { return mMaxValue; }
|
||||
|
||||
bool IsStreamSuspended() const {
|
||||
return mStream ? mStream->IsSuspended() : false;
|
||||
}
|
||||
|
||||
const nsTArray<AudioNode::InputNode>& InputNodes() const {
|
||||
return mInputNodes;
|
||||
}
|
||||
|
@ -55,3 +55,9 @@ partial interface AudioParam {
|
||||
[ChromeOnly]
|
||||
readonly attribute DOMString name;
|
||||
};
|
||||
|
||||
partial interface AudioParam {
|
||||
// This attribute is used for mochitest only.
|
||||
[ChromeOnly]
|
||||
readonly attribute boolean isStreamSuspended;
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user