Bug 1358061 - remove moz-audiochannel codes for media element and web audio. r=baku

MozReview-Commit-ID: KPuhxCVezOZ

--HG--
extra : rebase_source : 74a9c9a9746d0add26f279c6dd5ddc30681be901
This commit is contained in:
Alastor Wu 2017-04-26 12:02:32 +08:00
parent 6b3d58414b
commit c03792963c
14 changed files with 1 additions and 481 deletions

View File

@ -627,7 +627,6 @@ GK_ATOM(mouseout, "mouseout")
GK_ATOM(mouseover, "mouseover")
GK_ATOM(mousethrough, "mousethrough")
GK_ATOM(mouseup, "mouseup")
GK_ATOM(mozaudiochannel, "mozaudiochannel")
GK_ATOM(mozfullscreenchange, "mozfullscreenchange")
GK_ATOM(mozfullscreenerror, "mozfullscreenerror")
GK_ATOM(mozpointerlockchange, "mozpointerlockchange")
@ -1959,10 +1958,6 @@ GK_ATOM(onuserproximity, "onuserproximity")
// light sensor support
GK_ATOM(ondevicelight, "ondevicelight")
// Audio channel events
GK_ATOM(onmozinterruptbegin, "onmozinterruptbegin")
GK_ATOM(onmozinterruptend, "onmozinterruptend")
// MediaDevices device change event
GK_ATOM(ondevicechange, "ondevicechange")

View File

@ -1444,23 +1444,6 @@ NS_IMPL_BOOL_ATTR(HTMLMediaElement, Loop, loop)
NS_IMPL_BOOL_ATTR(HTMLMediaElement, DefaultMuted, muted)
NS_IMPL_ENUM_ATTR_DEFAULT_VALUE(HTMLMediaElement, Preload, preload, nullptr)
NS_IMETHODIMP
HTMLMediaElement::GetMozAudioChannelType(nsAString& aValue)
{
nsString defaultValue;
AudioChannelService::GetDefaultAudioChannelString(defaultValue);
NS_ConvertUTF16toUTF8 str(defaultValue);
GetEnumAttr(nsGkAtoms::mozaudiochannel, str.get(), aValue);
return NS_OK;
}
NS_IMETHODIMP
HTMLMediaElement::SetMozAudioChannelType(const nsAString& aValue)
{
return SetAttrHelper(nsGkAtoms::mozaudiochannel, aValue);
}
NS_IMETHODIMP_(bool)
HTMLMediaElement::IsVideo()
{
@ -4142,77 +4125,12 @@ bool HTMLMediaElement::ParseAttribute(int32_t aNamespaceID,
if (aAttribute == nsGkAtoms::preload) {
return aResult.ParseEnumValue(aValue, kPreloadTable, false);
}
// Remove the b2g-specific audio channel setting in bug1299390.
if (aAttribute == nsGkAtoms::mozaudiochannel) {
const nsAttrValue::EnumTable* table =
AudioChannelService::GetAudioChannelTable();
MOZ_ASSERT(table);
bool parsed = aResult.ParseEnumValue(aValue, table, false, &table[0]);
if (!parsed) {
return false;
}
AudioChannel audioChannel = static_cast<AudioChannel>(aResult.GetEnumValue());
if (audioChannel == mAudioChannel ||
!CheckAudioChannelPermissions(aValue)) {
return true;
}
// We cannot change the AudioChannel of a decoder.
if (mDecoder) {
return true;
}
mAudioChannel = audioChannel;
if (mSrcStream) {
RefPtr<MediaStream> stream = GetSrcMediaStream();
if (stream) {
stream->SetAudioChannelType(mAudioChannel);
}
}
return true;
}
}
return nsGenericHTMLElement::ParseAttribute(aNamespaceID, aAttribute, aValue,
aResult);
}
bool HTMLMediaElement::CheckAudioChannelPermissions(const nsAString& aString)
{
// Only normal channel doesn't need permission.
if (aString.EqualsASCII("normal")) {
return true;
}
// Maybe this audio channel is equal to the default value from the pref.
nsString audioChannel;
AudioChannelService::GetDefaultAudioChannelString(audioChannel);
if (audioChannel.Equals(aString)) {
return true;
}
nsCOMPtr<nsIPermissionManager> permissionManager =
services::GetPermissionManager();
if (!permissionManager) {
return false;
}
uint32_t perm = nsIPermissionManager::UNKNOWN_ACTION;
permissionManager->TestExactPermissionFromPrincipal(NodePrincipal(),
nsCString(NS_LITERAL_CSTRING("audio-channel-") + NS_ConvertUTF16toUTF8(aString)).get(), &perm);
if (perm != nsIPermissionManager::ALLOW_ACTION) {
return false;
}
return true;
}
void HTMLMediaElement::DoneCreatingElement()
{
if (HasAttr(kNameSpaceID_None, nsGkAtoms::muted)) {
@ -7063,15 +6981,6 @@ HTMLMediaElement::GetOrCreateTextTrackManager()
return mTextTrackManager;
}
void
HTMLMediaElement::SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv)
{
nsString channel;
channel.AssignASCII(AudioChannelValues::strings[uint32_t(aValue)].value,
AudioChannelValues::strings[uint32_t(aValue)].length);
SetHTMLAttr(nsGkAtoms::mozaudiochannel, channel, aRv);
}
MediaDecoderOwner::NextFrameStatus
HTMLMediaElement::NextFrameStatus()
{

View File

@ -693,13 +693,6 @@ public:
double MozFragmentEnd();
AudioChannel MozAudioChannelType() const
{
return mAudioChannel;
}
void SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv);
AudioTrackList* AudioTracks();
VideoTrackList* VideoTracks();
@ -751,9 +744,6 @@ public:
// that will soon be gone.
bool IsBeingDestroyed();
IMPL_EVENT_HANDLER(mozinterruptbegin)
IMPL_EVENT_HANDLER(mozinterruptend)
// These are used for testing only
float ComputedVolume() const;
bool ComputedMuted() const;
@ -1250,9 +1240,6 @@ protected:
void ReportTelemetry();
// Check the permissions for audiochannel.
bool CheckAudioChannelPermissions(const nsAString& aType);
// Seeks to aTime seconds. aSeekType can be Exact to seek to exactly the
// seek target, or PrevSyncPoint if a quicker but less precise seek is
// desired, and we'll seek to the sync point (keyframe and/or start of the

View File

@ -1,91 +0,0 @@
<!DOCTYPE HTML>
<html>
<body>
<div id="content" style="display: none">
<audio id="audio1" />
<audio id="audio2" mozaudiochannel="foo" />
</div>
<script type="application/javascript">
function is(a, b, msg) {
parent.postMessage({ status: a === b, msg: msg }, '*');
}
function ok(a, msg) {
parent.postMessage({ status: !!a, msg: msg }, '*');
}
function finish() {
parent.postMessage({ finish: true }, '*');
}
function test_basic() {
var audio1 = document.getElementById("audio1");
ok(audio1, "Audio Element exists");
is(audio1.mozAudioChannelType, "normal", "Default audio1 channel == 'normal'");
try {
audio1.mozAudioChannelType = "foo";
} catch(e) {}
is(audio1.mozAudioChannelType, "normal", "Default audio1 channel == 'normal'");
var audio2 = document.getElementById("audio2");
ok(audio2, "Audio Element exists");
is(audio2.mozAudioChannelType, "normal", "Default audio2 channel == 'normal'");
try {
audio2.mozAudioChannelType = "foo";
} catch(e) {}
is(audio2.mozAudioChannelType, "normal", "Default audio2 channel == 'normal'");
runTest();
}
function test_preferences(aChannel) {
SpecialPowers.pushPrefEnv({"set": [["media.defaultAudioChannel", aChannel ]]},
function() {
var audio = document.createElement('audio');
ok(audio, "Audio Element created");
is(audio.mozAudioChannelType, aChannel, "Default audio channel == '" + aChannel + "'");
runTest();
}
);
}
function test_wrong_preferences() {
SpecialPowers.pushPrefEnv({"set": [["media.defaultAudioChannel", 'foobar' ]]},
function() {
var audio = document.createElement('audio');
ok(audio, "Audio Element created");
is(audio.mozAudioChannelType, 'normal', "Default audio channel == 'normal'");
runTest();
}
);
}
var tests = [
test_basic,
function() { test_preferences("content"); },
function() { test_preferences("notification"); },
function() { test_preferences("alarm"); },
function() { test_preferences("telephony"); },
function() { test_preferences("ringer"); },
function() { test_preferences("publicnotification"); },
test_wrong_preferences,
];
function runTest() {
if (!tests.length) {
finish();
return;
}
var test = tests.shift();
test();
}
runTest();
</script>
</body>
</html>

View File

@ -183,7 +183,6 @@ support-files =
reflect.js
file_ignoreuserfocus.html
simpleFileOpener.js
file_mozaudiochannel.html
file_bug1166138_1x.png
file_bug1166138_2x.png
file_bug1166138_def.png
@ -506,7 +505,6 @@ skip-if = toolkit == 'android' # bug 939642
[test_map_attributes_reflection.html]
[test_meta_attributes_reflection.html]
[test_mod_attributes_reflection.html]
[test_mozaudiochannel.html]
[test_named_options.html]
[test_nested_invalid_fieldsets.html]
[test_object_attributes_reflection.html]

View File

@ -1,31 +0,0 @@
<!DOCTYPE HTML>
<html>
<head>
<title>Test for mozaudiochannel</title>
<script type="application/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<script type="text/javascript" src="/tests/SimpleTest/EventUtils.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
</head>
<body>
<pre id="test">
<script type="application/javascript">
SimpleTest.waitForExplicitFinish();
SpecialPowers.pushPrefEnv({"set": [["media.useAudioChannelAPI", true]]}, function() {
var ifr = document.createElement('iframe');
ifr.src = 'file_mozaudiochannel.html';
onmessage = function(e) {
if ("finish" in e.data) {
SimpleTest.finish();
} else {
ok(e.data.status, e.data.msg);
}
}
document.body.appendChild(ifr);
});
</script>
</pre>
</body>
</html>

View File

@ -87,40 +87,6 @@ interface nsIDOMHTMLMediaElement : nsISupports
// it is equal to the media duration.
readonly attribute double mozFragmentEnd;
// Mozilla extension: an audio channel type for media elements.
// An exception is thrown if the app tries to change the audio channel type
// without the permission (manifest file for B2G apps).
// The supported values are:
// * normal (default value)
// Automatically paused if "notification" or higher priority channel
// is played
// Use case: normal applications
// * content
// Automatically paused if "notification" or higher priority channel
// is played. Also paused if another app starts using "content"
// channel. Using this channel never affects applications using
// the "normal" channel.
// Use case: video/audio players
// * notification
// Automatically paused if "alarm" or higher priority channel is played.
// Use case: New email, incoming SMS
// * alarm
// Automatically paused if "telephony" or higher priority channel is
// played.
// User case: Alarm clock, calendar alarms
// * telephony
// Automatically paused if "ringer" or higher priority
// channel is played.
// Use case: dialer, voip
// * ringer
// Automatically paused if "publicnotification" or higher priority
// channel is played.
// Use case: dialer, voip
// * publicnotification
// Always plays in speaker, even when headphones are plugged in.
// Use case: Camera shutter sound.
attribute DOMString mozAudioChannelType;
// In addition the media element has this new events:
// * onmozinterruptbegin - called when the media element is interrupted
// because of the audiochannel manager.

View File

@ -1066,21 +1066,6 @@ AudioContext::Unmute() const
}
}
AudioChannel
AudioContext::MozAudioChannelType() const
{
return mDestination->MozAudioChannelType();
}
AudioChannel
AudioContext::TestAudioChannelInAudioNodeStream()
{
MediaStream* stream = mDestination->Stream();
MOZ_ASSERT(stream);
return stream->AudioChannelType();
}
size_t
AudioContext::SizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) const
{

View File

@ -312,10 +312,6 @@ public:
JSObject* GetGlobalJSObject() const;
AudioChannel MozAudioChannelType() const;
AudioChannel TestAudioChannelInAudioNodeStream();
void RegisterNode(AudioNode* aNode);
void UnregisterNode(AudioNode* aNode);
@ -323,9 +319,6 @@ public:
BasicWaveFormCache* GetBasicWaveFormCache();
IMPL_EVENT_HANDLER(mozinterruptbegin)
IMPL_EVENT_HANDLER(mozinterruptend)
bool CheckClosed(ErrorResult& aRv);
void Dispatch(already_AddRefed<nsIRunnable>&& aRunnable);

View File

@ -545,9 +545,6 @@ AudioDestinationNode::WindowSuspendChanged(nsSuspendedTypes aSuspend)
"this = %p, aSuspend = %s\n", this, SuspendTypeToStr(aSuspend)));
mAudioChannelSuspended = suspended;
Context()->DispatchTrustedEvent(!suspended ?
NS_LITERAL_STRING("mozinterruptend") :
NS_LITERAL_STRING("mozinterruptbegin"));
DisabledTrackMode disabledMode = suspended ? DisabledTrackMode::SILENCE_BLACK
: DisabledTrackMode::ENABLED;

View File

@ -171,9 +171,6 @@ tags=capturestream
tags=capturestream
[test_mixingRules.html]
skip-if = toolkit == 'android' # bug 1091965
[test_mozaudiochannel.html]
# Android: bug 1061675; OSX 10.6: bug 1097721
skip-if = (toolkit == 'android') || (os == 'mac' && os_version == '10.6')
[test_nodeToParamConnection.html]
[test_nodeCreationDocumentGone.html]
[test_OfflineAudioContext.html]

View File

@ -1,151 +0,0 @@
<!DOCTYPE HTML>
<html>
<head>
<title>Test for mozaudiochannel</title>
<script type="application/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<script type="text/javascript" src="/tests/SimpleTest/EventUtils.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
</head>
<body>
<p id="display"></p>
<pre id="test">
<script type="application/javascript">
function test_basic() {
var ac = new AudioContext();
ok(ac, "AudioContext created");
// Default
is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
// Unpermitted channels
ac = new AudioContext("content");
is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
ac = new AudioContext("notification");
is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
ac = new AudioContext("alarm");
is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
ac = new AudioContext("telephony");
is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
ac = new AudioContext("ringer");
is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
ac = new AudioContext("publicnotification");
is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
runTest();
}
function test_permission(aChannel) {
var ac = new AudioContext();
ok(ac, "AudioContext created");
is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
var channel = SpecialPowers.wrap(ac).testAudioChannelInAudioNodeStream();
is(channel, "normal", "AudioNodeStream is using the correct default audio channel.");
SpecialPowers.pushPermissions(
[{ "type": "audio-channel-" + aChannel, "allow": true, "context": document }],
function() {
var ac = new AudioContext(aChannel);
is(ac.mozAudioChannelType, aChannel, "Default ac channel == '" + aChannel + "'");
var channel = SpecialPowers.wrap(ac).testAudioChannelInAudioNodeStream();
is(channel, aChannel, "AudioNodeStream is using the correct new audio channel.");
runTest();
}
);
}
function test_preferences(aChannel) {
SpecialPowers.pushPrefEnv({"set": [["media.defaultAudioChannel", aChannel ]]},
function() {
SpecialPowers.pushPermissions(
[{ "type": "audio-channel-" + aChannel, "allow": false, "context": document }],
function() {
var ac = new AudioContext(aChannel);
ok(ac, "AudioContext created");
is(ac.mozAudioChannelType, aChannel, "Default ac channel == '" + aChannel + "'");
var channel = SpecialPowers.wrap(ac).testAudioChannelInAudioNodeStream();
is(channel, aChannel, "AudioNodeStream is using the correct audio channel.");
runTest();
}
);
}
);
}
function test_wrong_preferences() {
SpecialPowers.pushPrefEnv({"set": [["media.defaultAudioChannel", 'foobar' ]]},
function() {
var ac = new AudioContext();
ok(ac, "AudioContext created");
is(ac.mozAudioChannelType, 'normal', "Default ac channel == 'normal'");
runTest();
}
);
}
function test_testAudioChannelInAudioNodeStream() {
var ac = new AudioContext();
ok(ac, "AudioContext created");
var status = false;
try {
ac.testAudioChannelInAudioNodeStream();
} catch(e) {
status = true;
}
ok(status, "testAudioChannelInAudioNodeStream() should not exist in content.");
runTest();
}
var tests = [
test_basic,
function() { test_permission("content"); },
function() { test_permission("notification"); },
function() { test_permission("alarm"); },
function() { test_permission("telephony"); },
function() { test_permission("ringer"); },
function() { test_permission("publicnotification"); },
function() { test_preferences("content"); },
function() { test_preferences("notification"); },
function() { test_preferences("alarm"); },
function() { test_preferences("telephony"); },
function() { test_preferences("ringer"); },
function() { test_preferences("publicnotification"); },
test_wrong_preferences,
test_testAudioChannelInAudioNodeStream,
];
function runTest() {
if (!tests.length) {
SimpleTest.finish();
return;
}
var test = tests.shift();
test();
}
SpecialPowers.pushPrefEnv({"set": [["media.useAudioChannelAPI", true ]]}, runTest);
SimpleTest.waitForExplicitFinish();
SimpleTest.requestLongerTimeout(5);
</script>
</pre>
</body>
</html>

View File

@ -33,23 +33,4 @@ interface AudioContext : BaseAudioContext {
[NewObject, Throws]
MediaStreamAudioDestinationNode createMediaStreamDestination();
};
// Mozilla extensions
partial interface AudioContext {
// Read AudioChannel.webidl for more information about this attribute.
[Pref="media.useAudioChannelAPI"]
readonly attribute AudioChannel mozAudioChannelType;
// These 2 events are dispatched when the AudioContext object is muted by
// the AudioChannelService. It's call 'interrupt' because when this event is
// dispatched on a HTMLMediaElement, the audio stream is paused.
[Pref="media.useAudioChannelAPI"]
attribute EventHandler onmozinterruptbegin;
[Pref="media.useAudioChannelAPI"]
attribute EventHandler onmozinterruptend;
// This method is for test only.
[ChromeOnly] AudioChannel testAudioChannelInAudioNodeStream();
};
};

View File

@ -138,21 +138,6 @@ partial interface HTMLMediaElement {
// the media element has a fragment URI for the currentSrc, otherwise
// it is equal to the media duration.
readonly attribute double mozFragmentEnd;
// Mozilla extension: an audio channel type for media elements.
// Read AudioChannel.webidl for more information about this attribute.
[SetterThrows, Pref="media.useAudioChannelAPI"]
attribute AudioChannel mozAudioChannelType;
// In addition the media element has this new events:
// * onmozinterruptbegin - called when the media element is interrupted
// because of the audiochannel manager.
// * onmozinterruptend - called when the interruption is concluded
[Pref="media.useAudioChannelAPI"]
attribute EventHandler onmozinterruptbegin;
[Pref="media.useAudioChannelAPI"]
attribute EventHandler onmozinterruptend;
};
// Encrypted Media Extensions