mirror of
https://github.com/mozilla/gecko-dev.git
synced 2025-02-25 20:01:50 +00:00
Backed out 3 changesets (bug 1693250) for causing high frequency failures on test_recreate_audio_sink_clock_time.html. CLOSED TREE
Backed out changeset 7a2f57793a3b (bug 1693250) Backed out changeset f869096a85f2 (bug 1693250) Backed out changeset 54b9fbe63086 (bug 1693250)
This commit is contained in:
parent
dee814265a
commit
cf366b2840
@ -2075,10 +2075,6 @@ bool HTMLMediaElement::IsVideoDecodingSuspended() const {
|
||||
return mDecoder && mDecoder->IsVideoDecodingSuspended();
|
||||
}
|
||||
|
||||
void HTMLMediaElement::SetSuspend(bool aSuspend) {
|
||||
SuspendOrResumeElement(aSuspend);
|
||||
}
|
||||
|
||||
double HTMLMediaElement::TotalVideoPlayTime() const {
|
||||
return mDecoder ? mDecoder->GetTotalVideoPlayTimeInSeconds() : -1.0;
|
||||
}
|
||||
@ -7209,7 +7205,6 @@ void HTMLMediaElement::SetAudibleState(bool aAudible) {
|
||||
mIsAudioTrackAudible = aAudible;
|
||||
NotifyAudioPlaybackChanged(
|
||||
AudioChannelService::AudibleChangedReasons::eDataAudibleChanged);
|
||||
DispatchAsyncTestingEvent(u"mozaudiblestatechanged"_ns);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -655,9 +655,6 @@ class HTMLMediaElement : public nsGenericHTMLElement,
|
||||
// For use by mochitests.
|
||||
bool IsVideoDecodingSuspended() const;
|
||||
|
||||
// For use by mochitests. Suspend or resume the media decoder and resources.
|
||||
void SetSuspend(bool aSuspend);
|
||||
|
||||
// These functions return accumulated time, which are used for the telemetry
|
||||
// usage. Return -1 for error.
|
||||
double TotalVideoPlayTime() const;
|
||||
|
@ -372,15 +372,6 @@ void MediaDecoder::OnPlaybackEvent(MediaPlaybackEvent&& aEvent) {
|
||||
case MediaPlaybackEvent::VideoOnlySeekCompleted:
|
||||
GetOwner()->DispatchAsyncEvent(u"mozvideoonlyseekcompleted"_ns);
|
||||
break;
|
||||
case MediaPlaybackEvent::AudioSinkAudioGapDetected:
|
||||
GetOwner()->DispatchAsyncTestingEvent(u"mozaudiosinkaudiogapdetected"_ns);
|
||||
break;
|
||||
case MediaPlaybackEvent::SuspendedMediaSink:
|
||||
GetOwner()->DispatchAsyncTestingEvent(u"mozsuspendedmediasink"_ns);
|
||||
break;
|
||||
case MediaPlaybackEvent::ResumedMediaSink:
|
||||
GetOwner()->DispatchAsyncTestingEvent(u"mozresumedmediasink"_ns);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
@ -33,9 +33,6 @@ class MediaDecoderOwner {
|
||||
// Dispatch an asynchronous event to the decoder owner
|
||||
virtual void DispatchAsyncEvent(const nsAString& aName) = 0;
|
||||
|
||||
// Dispatch an asynchronous test-only event to the decoder owner
|
||||
virtual void DispatchAsyncTestingEvent(const nsAString& aName){};
|
||||
|
||||
// Triggers a recomputation of readyState.
|
||||
virtual void UpdateReadyState() = 0;
|
||||
|
||||
|
@ -3243,12 +3243,6 @@ void MediaDecoderStateMachine::SeekingState::SeekCompleted() {
|
||||
mMaster->mOnPlaybackEvent.Notify(MediaPlaybackEvent::Invalidate);
|
||||
}
|
||||
|
||||
// `mLastClockTimeBeforeStopSink` stores the position from which we want to
|
||||
// restart the media sink, but once the seeking completes, that position
|
||||
// should be reset because the playback should start from a new position,
|
||||
// instead of previous clock time.
|
||||
mMaster->mLastClockTimeBeforeStopSink.reset();
|
||||
|
||||
GoToNextState();
|
||||
}
|
||||
|
||||
@ -3348,7 +3342,6 @@ RefPtr<ShutdownPromise> MediaDecoderStateMachine::ShutdownState::Enter() {
|
||||
master->mMetadataManager.Disconnect();
|
||||
master->mOnMediaNotSeekable.Disconnect();
|
||||
master->mAudibleListener.DisconnectIfExists();
|
||||
master->mAudioSinkAudioGapListener.DisconnectIfExists();
|
||||
|
||||
// Disconnect canonicals and mirrors before shutting down our task queue.
|
||||
master->mStreamName.DisconnectIfConnected();
|
||||
@ -3442,17 +3435,10 @@ void MediaDecoderStateMachine::InitializationTask(MediaDecoder* aDecoder) {
|
||||
}
|
||||
|
||||
void MediaDecoderStateMachine::AudioAudibleChanged(bool aAudible) {
|
||||
LOG("AudioAudibleChanged=%d", aAudible);
|
||||
mIsAudioDataAudible = aAudible;
|
||||
}
|
||||
|
||||
void MediaDecoderStateMachine::OnAudioSinkAudioGapDetected(int64_t aGapFrames) {
|
||||
LOG("OnAudioSinkAudioGapDetected, gap-frames=%" PRId64, aGapFrames);
|
||||
mOnPlaybackEvent.Notify(MediaPlaybackEvent::AudioSinkAudioGapDetected);
|
||||
}
|
||||
|
||||
MediaSink* MediaDecoderStateMachine::CreateAudioSink() {
|
||||
MOZ_ASSERT(OnTaskQueue());
|
||||
if (mOutputCaptureState != MediaDecoder::OutputCaptureState::None) {
|
||||
DecodedStream* stream = new DecodedStream(
|
||||
this,
|
||||
@ -3462,8 +3448,6 @@ MediaSink* MediaDecoderStateMachine::CreateAudioSink() {
|
||||
mOutputTracks, mVolume, mPlaybackRate, mPreservesPitch, mAudioQueue,
|
||||
mVideoQueue, mSinkDevice.Ref());
|
||||
mAudibleListener.DisconnectIfExists();
|
||||
// Audible state would be updated later after the decoded stream starts.
|
||||
AudioAudibleChanged(false);
|
||||
mAudibleListener = stream->AudibleEvent().Connect(
|
||||
OwnerThread(), this, &MediaDecoderStateMachine::AudioAudibleChanged);
|
||||
return stream;
|
||||
@ -3474,14 +3458,8 @@ MediaSink* MediaDecoderStateMachine::CreateAudioSink() {
|
||||
UniquePtr<AudioSink> audioSink{new AudioSink(
|
||||
mTaskQueue, mAudioQueue, Info().mAudio, mShouldResistFingerprinting)};
|
||||
mAudibleListener.DisconnectIfExists();
|
||||
mAudioSinkAudioGapListener.DisconnectIfExists();
|
||||
// Audible state would be updated later after the sink starts.
|
||||
AudioAudibleChanged(false);
|
||||
mAudibleListener = audioSink->AudibleEvent().Connect(
|
||||
mTaskQueue, this, &MediaDecoderStateMachine::AudioAudibleChanged);
|
||||
mAudioSinkAudioGapListener = audioSink->AudioGapEvent().Connect(
|
||||
mTaskQueue, this,
|
||||
&MediaDecoderStateMachine::OnAudioSinkAudioGapDetected);
|
||||
return audioSink;
|
||||
};
|
||||
return new AudioSinkWrapper(
|
||||
@ -3880,9 +3858,7 @@ RefPtr<MediaDecoder::SeekPromise> MediaDecoderStateMachine::Seek(
|
||||
void MediaDecoderStateMachine::StopMediaSink() {
|
||||
MOZ_ASSERT(OnTaskQueue());
|
||||
if (mMediaSink->IsStarted()) {
|
||||
mLastClockTimeBeforeStopSink = Some(mMediaSink->GetPosition());
|
||||
LOG("Stop MediaSink, last clock time (%" PRId64 ")",
|
||||
(*mLastClockTimeBeforeStopSink).ToMicroseconds());
|
||||
LOG("Stop MediaSink");
|
||||
mMediaSink->Stop();
|
||||
mMediaSinkAudioEndedPromise.DisconnectIfExists();
|
||||
mMediaSinkVideoEndedPromise.DisconnectIfExists();
|
||||
@ -4064,10 +4040,8 @@ nsresult MediaDecoderStateMachine::StartMediaSink() {
|
||||
}
|
||||
|
||||
mAudioCompleted = false;
|
||||
const TimeUnit startTime = mLastClockTimeBeforeStopSink
|
||||
? *mLastClockTimeBeforeStopSink
|
||||
: GetMediaTime();
|
||||
LOG("Start sink, start time=%" PRId64, startTime.ToMicroseconds());
|
||||
const auto startTime = GetMediaTime();
|
||||
LOG("StartMediaSink, mediaTime=%" PRId64, startTime.ToMicroseconds());
|
||||
nsresult rv = mMediaSink->Start(startTime, Info());
|
||||
StreamNameChanged();
|
||||
|
||||
@ -4482,7 +4456,6 @@ void MediaDecoderStateMachine::SuspendMediaSink() {
|
||||
mIsMediaSinkSuspended = true;
|
||||
StopMediaSink();
|
||||
mMediaSink->Shutdown();
|
||||
mOnPlaybackEvent.Notify(MediaPlaybackEvent::SuspendedMediaSink);
|
||||
}
|
||||
|
||||
void MediaDecoderStateMachine::InvokeResumeMediaSink() {
|
||||
@ -4508,7 +4481,6 @@ void MediaDecoderStateMachine::ResumeMediaSink() {
|
||||
mMediaSink = CreateMediaSink();
|
||||
MaybeStartPlayback();
|
||||
}
|
||||
mOnPlaybackEvent.Notify(MediaPlaybackEvent::ResumedMediaSink);
|
||||
}
|
||||
|
||||
void MediaDecoderStateMachine::UpdateSecondaryVideoContainer() {
|
||||
|
@ -254,7 +254,6 @@ class MediaDecoderStateMachine
|
||||
void OnVideoPopped(const RefPtr<VideoData>& aSample);
|
||||
|
||||
void AudioAudibleChanged(bool aAudible);
|
||||
void OnAudioSinkAudioGapDetected(int64_t aGapFrames);
|
||||
|
||||
void SetPlaybackRate(double aPlaybackRate) override;
|
||||
void SetIsLiveStream(bool aIsLiveStream) override {
|
||||
@ -495,8 +494,6 @@ class MediaDecoderStateMachine
|
||||
MediaEventListener mVideoQueueListener;
|
||||
MediaEventListener mAudibleListener;
|
||||
MediaEventListener mOnMediaNotSeekable;
|
||||
// Debug usage for detecting audio gap in the audio sink.
|
||||
MediaEventListener mAudioSinkAudioGapListener;
|
||||
|
||||
const bool mIsMSE;
|
||||
|
||||
@ -562,12 +559,6 @@ class MediaDecoderStateMachine
|
||||
// after Initialization. TaskQueue thread only.
|
||||
bool mIsMediaSinkSuspended = false;
|
||||
|
||||
// This stores the last clock time before the media sink got shutdown, and
|
||||
// we might use that as the next start time when we re-create the media sink
|
||||
// later, which ensures the clock time is monotonically increased even if
|
||||
// the media sinks have been changed multiple times.
|
||||
Maybe<media::TimeUnit> mLastClockTimeBeforeStopSink;
|
||||
|
||||
public:
|
||||
AbstractCanonical<PrincipalHandle>* CanonicalOutputPrincipal() {
|
||||
return &mCanonicalOutputPrincipal;
|
||||
|
@ -42,9 +42,6 @@ struct MediaPlaybackEvent {
|
||||
CancelVideoSuspendTimer,
|
||||
VideoOnlySeekBegin,
|
||||
VideoOnlySeekCompleted,
|
||||
AudioSinkAudioGapDetected,
|
||||
SuspendedMediaSink,
|
||||
ResumedMediaSink,
|
||||
} mType;
|
||||
|
||||
using DataType = Variant<Nothing, int64_t>;
|
||||
|
@ -154,7 +154,6 @@ RefPtr<MediaSink::EndedPromise> AudioSink::Start(
|
||||
mAudioPopped.Connect(mOwnerThread, this, &AudioSink::OnAudioPopped);
|
||||
|
||||
mStartTime = aStartTime;
|
||||
SINK_LOG("Start=%" PRId64, mStartTime.ToMicroseconds());
|
||||
|
||||
// To ensure at least one audio packet will be popped from AudioQueue and
|
||||
// ready to be played.
|
||||
@ -536,7 +535,7 @@ void AudioSink::NotifyAudioNeeded() {
|
||||
|
||||
SINK_LOG("Gap in the audio input, push %" PRId64 " frames of silence",
|
||||
missingFrames.value());
|
||||
mAudioGapEvent.Notify(missingFrames.value());
|
||||
|
||||
RefPtr<AudioData> silenceData;
|
||||
AlignedAudioBuffer silenceBuffer(missingFrames.value() * data->mChannels);
|
||||
if (!silenceBuffer) {
|
||||
|
@ -6,7 +6,6 @@
|
||||
#ifndef AudioSink_h__
|
||||
#define AudioSink_h__
|
||||
|
||||
#include <stdint.h>
|
||||
#include "AudioStream.h"
|
||||
#include "AudibilityMonitor.h"
|
||||
#include "MediaEventSource.h"
|
||||
@ -80,7 +79,6 @@ class AudioSink : private AudioStream::DataSource {
|
||||
void SetPlaying(bool aPlaying);
|
||||
|
||||
MediaEventSource<bool>& AudibleEvent() { return mAudibleEvent; }
|
||||
MediaEventSource<int64_t>& AudioGapEvent() { return mAudioGapEvent; }
|
||||
|
||||
void GetDebugInfo(dom::MediaSinkDebugInfo& aInfo);
|
||||
|
||||
@ -173,8 +171,6 @@ class AudioSink : private AudioStream::DataSource {
|
||||
MediaEventProducer<bool> mAudibleEvent;
|
||||
// Only signed on the real-time audio thread.
|
||||
MediaEventProducer<void> mAudioPopped;
|
||||
// Debug purpose for detecting audio gap.
|
||||
MediaEventProducer<int64_t> mAudioGapEvent;
|
||||
|
||||
Atomic<bool> mProcessedQueueFinished;
|
||||
MediaQueue<AudioData>& mAudioQueue;
|
||||
|
@ -824,9 +824,6 @@ skip-if =
|
||||
(os == "mac") # Bug 1110922
|
||||
(os == "win") # Bug 1110922
|
||||
(os == "android" && processor == "aarch64") # Bug 1110922
|
||||
[test_recreate_audio_sink_audible_change.html]
|
||||
scheme=https
|
||||
[test_recreate_audio_sink_clock_time.html]
|
||||
[test_replay_metadata.html]
|
||||
[test_reset_events_async.html]
|
||||
[test_video_dimensions.html]
|
||||
|
@ -1,77 +0,0 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<title>Test media's audible state should change when changing audio sink</title>
|
||||
<script src="/tests/SimpleTest/SimpleTest.js"></script>
|
||||
<script src="manifest.js"></script>
|
||||
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
|
||||
</head>
|
||||
<body>
|
||||
<script class="testbody" type="text/javascript">
|
||||
/**
|
||||
* This test is used to ensure that when shutdown and recreate the audio sink,
|
||||
* the media element's audible state should reflect the audio sink's status.
|
||||
* Eg. when shutdown the sink, the audible state would become inaudible, because
|
||||
* we have no sink at that moment. After recreating the sink, the audible state
|
||||
* should become audible again when the sink starts playing audible.
|
||||
*/
|
||||
add_task(async function setTestPref() {
|
||||
await SpecialPowers.pushPrefEnv({
|
||||
set: [["media.testing-only-events", true],
|
||||
["media.setsinkid.enabled", true],
|
||||
// Disable the prompt in order to expose audio output
|
||||
["media.navigator.permission.disabled", true],
|
||||
]});
|
||||
});
|
||||
|
||||
add_task(async function testRecreateAudioSinkWhenChangeAudioSink() {
|
||||
const audio = await createPlayingAndAudibleLoopingAudio();
|
||||
|
||||
// Expose an audio output device.
|
||||
SpecialPowers.wrap(document).notifyUserGestureActivation();
|
||||
await navigator.mediaDevices.selectAudioOutput();
|
||||
|
||||
info(`set sink id in order to recreate the audio sink`);
|
||||
let {deviceId} = await navigator.mediaDevices.selectAudioOutput()
|
||||
.catch(e => ok(false, `failed to get device Id`));
|
||||
|
||||
info(`audible state should be reset first when the old sink is shut down`);
|
||||
await Promise.all([
|
||||
// Become inaudible first, then audible
|
||||
expectToReceiveEvent(audio, "mozaudiblestatechanged", { times : 2 }),
|
||||
audio.setSinkId(deviceId),
|
||||
]);
|
||||
ok(true, `new sink should become audible again`);
|
||||
});
|
||||
|
||||
/**
|
||||
* Follwing are helper functions.
|
||||
*/
|
||||
async function createPlayingAndAudibleLoopingAudio() {
|
||||
const audio = document.createElement('audio');
|
||||
audio.src = "small-shot.ogg";
|
||||
audio.loop = true;
|
||||
document.body.appendChild(audio);
|
||||
|
||||
info(`wait for audio starting playing and becoming audible.`);
|
||||
await Promise.all([
|
||||
expectToReceiveEvent(audio, "mozaudiblestatechanged", { times : 1 }),
|
||||
audio.play()]);
|
||||
return audio;
|
||||
}
|
||||
|
||||
function expectToReceiveEvent(element, event, { times }) {
|
||||
return new Promise(r => {
|
||||
let receivedTimes = 0;
|
||||
element.addEventListener(event, _ => {
|
||||
if (++receivedTimes == times) {
|
||||
ok(true, `received ${event} ${times} times.`);
|
||||
r();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
@ -1,82 +0,0 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<title>consistent growing clock time after switching to new audio sink</title>
|
||||
<script src="/tests/SimpleTest/SimpleTest.js"></script>
|
||||
<script src="manifest.js"></script>
|
||||
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
|
||||
</head>
|
||||
<body>
|
||||
<script class="testbody" type="text/javascript">
|
||||
/**
|
||||
* This test is used to ensure that when recreating another audio sink, the old
|
||||
* sink should inherit the clock time from the previous sink in order to make
|
||||
* the clock time consistently growing.
|
||||
*/
|
||||
add_task(async function setTestPref() {
|
||||
await SpecialPowers.pushPrefEnv({
|
||||
set: [["media.testing-only-events", true]]});
|
||||
});
|
||||
|
||||
add_task(async function testConsistentGrowingClockTimeBetweenSinks() {
|
||||
const kExpectedLoopingTimes = 3;
|
||||
const audio = await createPlayingAndAudibleLoopingAudio();
|
||||
await waitAtLeastFinishLoopingOnce(audio, { times : kExpectedLoopingTimes });
|
||||
|
||||
info(`suspend then resume the media element, which would result in creating a new audio sink`);
|
||||
let waitingSinkPromise = Promise.all([
|
||||
once(audio, "mozsuspendedmediasink"),
|
||||
once(audio, "mozresumedmediasink"),
|
||||
]);
|
||||
let gapCounter = 0;
|
||||
audio.addEventListener("mozaudiosinkaudiogapdetected", _ => {
|
||||
// When we switch to the new sink, it's possible to have one small gap
|
||||
// because the previous clock time would not always match the first sample.
|
||||
// But we shouldn't have other gaps if we keep playing in the same sink.
|
||||
if (++gapCounter > 1) {
|
||||
ok(false, `gap ${gapCounter} : too many gaps in this audio!`);
|
||||
}
|
||||
});
|
||||
SpecialPowers.wrap(audio).setSuspend(true);
|
||||
SpecialPowers.wrap(audio).setSuspend(false);
|
||||
await waitingSinkPromise;
|
||||
// Wait a while to see if we receive any gap events.
|
||||
await expectToReceiveEvent(audio, "timeupdate", { times : 5});
|
||||
});
|
||||
|
||||
/**
|
||||
* Follwing are helper functions.
|
||||
*/
|
||||
async function createPlayingAndAudibleLoopingAudio() {
|
||||
const audio = document.createElement('audio');
|
||||
audio.src = "small-shot.ogg";
|
||||
audio.loop = true;
|
||||
document.body.appendChild(audio);
|
||||
|
||||
info(`wait for audio starting playing and becoming audible.`);
|
||||
await Promise.all([
|
||||
expectToReceiveEvent(audio, "mozaudiblestatechanged", { times : 1 }),
|
||||
audio.play()]);
|
||||
return audio;
|
||||
}
|
||||
|
||||
function waitAtLeastFinishLoopingOnce(element, { times }) {
|
||||
info(`wait until audio finishes looping at least once`);
|
||||
return expectToReceiveEvent(element, "seeked", { times });
|
||||
}
|
||||
|
||||
function expectToReceiveEvent(element, event, { times }) {
|
||||
return new Promise(r => {
|
||||
let receivedTimes = 0;
|
||||
element.addEventListener(event, _ => {
|
||||
if (++receivedTimes == times) {
|
||||
ok(true, `received ${event} ${times} times.`);
|
||||
r();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
@ -168,11 +168,6 @@ partial interface HTMLMediaElement {
|
||||
// from playing.
|
||||
[ChromeOnly]
|
||||
readonly attribute boolean isSuspendedByInactiveDocOrDocShell;
|
||||
|
||||
// Used for testing to suspend/resume underlying media decoder and resources
|
||||
// for the media element.
|
||||
[ChromeOnly]
|
||||
undefined setSuspend(boolean aSuspend);
|
||||
};
|
||||
|
||||
/*
|
||||
|
Loading…
x
Reference in New Issue
Block a user