Backed out 3 changesets (bug 1324552, bug 1324545, bug 1350973) for causing perma mda leakcheck failures CLOSED TREE

Backed out changeset db14e59c7e6e (bug 1324545)
Backed out changeset 088e13476d39 (bug 1324552)
Backed out changeset e79a64fc49e7 (bug 1350973)
This commit is contained in:
shindli 2019-07-11 18:55:03 +03:00
parent 55b47893cf
commit 883c6619a6
18 changed files with 54 additions and 121 deletions

View File

@ -1132,17 +1132,6 @@ void AudioCallbackDriver::CompleteAudioContextOperations(
}
}
TimeDuration AudioCallbackDriver::AudioOutputLatency() {
uint32_t latency_frames;
int rv = cubeb_stream_get_latency(mAudioStream, &latency_frames);
if (rv || mSampleRate == 0) {
return TimeDuration::FromSeconds(0.0);
}
return TimeDuration::FromSeconds(static_cast<double>(latency_frames) /
mSampleRate);
}
void AudioCallbackDriver::FallbackToSystemClockDriver() {
MOZ_ASSERT(!ThreadRunning());
GraphImpl()->GetMonitor().AssertCurrentThreadOwns();

View File

@ -433,9 +433,6 @@ class AudioCallbackDriver : public GraphDriver,
void CompleteAudioContextOperations(AsyncCubebOperation aOperation);
// Returns the output latency for the current audio output stream.
TimeDuration AudioOutputLatency();
private:
/* Remove Mixer callbacks when switching */
void RemoveMixerCallback();

View File

@ -957,10 +957,6 @@ void MediaStreamGraphImpl::DeviceChanged() {
MediaStreamGraphImpl* mGraphImpl;
};
// Reset the latency, it will get fetched again next time it's queried.
MOZ_ASSERT(NS_IsMainThread());
mAudioOutputLatency = 0.0;
AppendMessage(MakeUnique<Message>(this));
}
@ -3196,8 +3192,7 @@ MediaStreamGraphImpl::MediaStreamGraphImpl(GraphDriverType aDriverRequested,
mCanRunMessagesSynchronously(false)
#endif
,
mMainThreadGraphTime(0, "MediaStreamGraphImpl::mMainThreadGraphTime"),
mAudioOutputLatency(0.0) {
mMainThreadGraphTime(0, "MediaStreamGraphImpl::mMainThreadGraphTime") {
if (mRealtime) {
if (aDriverRequested == AUDIO_THREAD_DRIVER) {
// Always start with zero input channels, and no particular preferences
@ -3801,29 +3796,6 @@ void MediaStreamGraph::ApplyAudioContextOperation(
aDestinationStream, aStreams, aOperation, aPromise, aFlags));
}
double MediaStreamGraph::AudioOutputLatency() {
return static_cast<MediaStreamGraphImpl*>(this)->AudioOutputLatency();
}
double MediaStreamGraphImpl::AudioOutputLatency() {
MOZ_ASSERT(NS_IsMainThread());
if (mAudioOutputLatency != 0.0) {
return mAudioOutputLatency;
}
MonitorAutoLock lock(mMonitor);
if (CurrentDriver()->AsAudioCallbackDriver()) {
mAudioOutputLatency = CurrentDriver()
->AsAudioCallbackDriver()
->AudioOutputLatency()
.ToSeconds();
} else {
// Failure mode: return 0.0 if running on a normal thread.
mAudioOutputLatency = 0.0;
}
return mAudioOutputLatency;
}
bool MediaStreamGraph::IsNonRealtime() const {
return !static_cast<const MediaStreamGraphImpl*>(this)->mRealtime;
}

View File

@ -1272,8 +1272,6 @@ class MediaStreamGraph {
*/
TrackRate GraphRate() const { return mSampleRate; }
double AudioOutputLatency();
void RegisterCaptureStreamForWindow(uint64_t aWindowId,
ProcessedMediaStream* aCaptureStream);
void UnregisterCaptureStreamForWindow(uint64_t aWindowId);

View File

@ -453,8 +453,6 @@ class MediaStreamGraphImpl : public MediaStreamGraph,
uint32_t AudioOutputChannelCount() const { return mOutputChannels; }
double AudioOutputLatency();
/**
* The audio input channel count for a MediaStreamGraph is the max of all the
* channel counts requested by the listeners. The max channel count is
@ -952,12 +950,6 @@ class MediaStreamGraphImpl : public MediaStreamGraph,
* Read by stable state runnable on main thread. Protected by mMonitor.
*/
GraphTime mNextMainThreadGraphTime = 0;
/**
* Cached audio output latency, in seconds. Main thread only. This is reset
* whenever the audio device running this MediaStreamGraph changes.
*/
double mAudioOutputLatency;
};
} // namespace mozilla

View File

@ -36,7 +36,6 @@
#include "mozilla/dom/OscillatorNodeBinding.h"
#include "mozilla/dom/PannerNodeBinding.h"
#include "mozilla/dom/PeriodicWaveBinding.h"
#include "mozilla/dom/Performance.h"
#include "mozilla/dom/Promise.h"
#include "mozilla/dom/StereoPannerNodeBinding.h"
#include "mozilla/dom/WaveShaperNodeBinding.h"
@ -524,30 +523,6 @@ AudioListener* AudioContext::Listener() {
return mListener;
}
double AudioContext::OutputLatency() { return Graph()->AudioOutputLatency(); }
void AudioContext::GetOutputTimestamp(AudioTimestamp& aTimeStamp) {
if (!Destination()) {
aTimeStamp.mContextTime.Construct(0.0);
aTimeStamp.mPerformanceTime.Construct(0.0);
return;
}
// The currentTime currently being output is the currentTime minus the audio
// output latency.
aTimeStamp.mContextTime.Construct(
std::max(0.0, CurrentTime() - OutputLatency()));
nsPIDOMWindowInner* parent = GetParentObject();
Performance* perf = parent ? parent->GetPerformance() : nullptr;
if (perf) {
// Convert to milliseconds.
aTimeStamp.mPerformanceTime.Construct(
std::max(0., perf->Now() - (OutputLatency() * 1000.)));
} else {
aTimeStamp.mPerformanceTime.Construct(0.0);
}
}
Worklet* AudioContext::GetAudioWorklet(ErrorResult& aRv) {
if (!mWorklet) {
mWorklet = AudioWorkletImpl::CreateWorklet(this, aRv);

View File

@ -9,7 +9,6 @@
#include "AudioParamDescriptorMap.h"
#include "mozilla/dom/OfflineAudioContextBinding.h"
#include "mozilla/dom/AudioContextBinding.h"
#include "MediaBufferDecoder.h"
#include "mozilla/Attributes.h"
#include "mozilla/DOMEventTargetHelper.h"
@ -193,16 +192,6 @@ class AudioContext final : public DOMEventTargetHelper,
AudioContextState State() const { return mAudioContextState; }
double BaseLatency() const {
// Gecko does not do any buffering between rendering the audio and sending
// it to the audio subsystem.
return 0.0;
}
double OutputLatency();
void GetOutputTimestamp(AudioTimestamp& aTimeStamp);
Worklet* GetAudioWorklet(ErrorResult& aRv);
bool IsRunning() const;

View File

@ -13,12 +13,9 @@
namespace mozilla {
namespace dom {
MediaElementAudioSourceNode::MediaElementAudioSourceNode(
AudioContext* aContext, HTMLMediaElement* aElement)
: MediaStreamAudioSourceNode(aContext, TrackChangeBehavior::FollowChanges),
mElement(aElement) {
MOZ_ASSERT(aElement);
}
MediaElementAudioSourceNode::MediaElementAudioSourceNode(AudioContext* aContext)
: MediaStreamAudioSourceNode(aContext, TrackChangeBehavior::FollowChanges) {
}
/* static */
already_AddRefed<MediaElementAudioSourceNode>
@ -31,7 +28,7 @@ MediaElementAudioSourceNode::Create(
}
RefPtr<MediaElementAudioSourceNode> node =
new MediaElementAudioSourceNode(&aAudioContext, aOptions.mMediaElement);
new MediaElementAudioSourceNode(&aAudioContext);
RefPtr<DOMMediaStream> stream = aOptions.mMediaElement->CaptureAudio(
aRv, aAudioContext.Destination()->Stream()->Graph());
@ -74,9 +71,5 @@ void MediaElementAudioSourceNode::Destroy() {
MediaStreamAudioSourceNode::Destroy();
}
HTMLMediaElement* MediaElementAudioSourceNode::MediaElement() {
return mElement;
}
} // namespace dom
} // namespace mozilla

View File

@ -42,11 +42,8 @@ class MediaElementAudioSourceNode final : public MediaStreamAudioSourceNode {
return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
}
HTMLMediaElement* MediaElement();
private:
explicit MediaElementAudioSourceNode(AudioContext* aContext,
HTMLMediaElement* aElement);
explicit MediaElementAudioSourceNode(AudioContext* aContext);
void Destroy() override;
@ -55,8 +52,6 @@ class MediaElementAudioSourceNode final : public MediaStreamAudioSourceNode {
void ListenForAllowedToPlay(const MediaElementAudioSourceOptions& aOptions);
MozPromiseRequestHolder<GenericNonExclusivePromise> mAllowedToPlayRequest;
RefPtr<HTMLMediaElement> mElement;
};
} // namespace dom

View File

@ -65,8 +65,6 @@ class MediaStreamAudioSourceNode
uint16_t NumberOfInputs() const override { return 0; }
DOMMediaStream* GetMediaStream() { return mInputStream; }
const char* NodeType() const override { return "MediaStreamAudioSourceNode"; }
virtual const char* CrossOriginErrorString() const {

View File

@ -14,18 +14,12 @@ dictionary AudioContextOptions {
float sampleRate = 0;
};
dictionary AudioTimestamp {
double contextTime;
DOMHighResTimeStamp performanceTime;
};
[Pref="dom.webaudio.enabled",
Constructor(optional AudioContextOptions contextOptions = {})]
interface AudioContext : BaseAudioContext {
readonly attribute double baseLatency;
readonly attribute double outputLatency;
AudioTimestamp getOutputTimestamp();
// Bug 1324545: readonly attribute double outputLatency;
// Bug 1324545: AudioTimestamp getOutputTimestamp ();
[Throws]
Promise<void> suspend();

View File

@ -27,6 +27,7 @@ interface BaseAudioContext : EventTarget {
readonly attribute AudioContextState state;
[Throws, SameObject, SecureContext, Pref="dom.audioworklet.enabled"]
readonly attribute AudioWorklet audioWorklet;
// Bug 1324552: readonly attribute double baseLatency;
[Throws]
Promise<void> resume();

View File

@ -17,7 +17,7 @@ dictionary MediaElementAudioSourceOptions {
[Pref="dom.webaudio.enabled",
Constructor(AudioContext context, MediaElementAudioSourceOptions options)]
interface MediaElementAudioSourceNode : AudioNode {
readonly attribute HTMLMediaElement mediaElement;
};
// Mozilla extensions

View File

@ -17,8 +17,7 @@ dictionary MediaStreamAudioSourceOptions {
[Pref="dom.webaudio.enabled",
Constructor(AudioContext context, MediaStreamAudioSourceOptions options)]
interface MediaStreamAudioSourceNode : AudioNode {
[BinaryName="GetMediaStream"]
readonly attribute MediaStream mediaStream;
};
// Mozilla extensions

View File

@ -14,9 +14,15 @@
[AudioListener interface: attribute forwardX]
expected: FAIL
[MediaStreamAudioSourceNode interface: attribute mediaStream]
expected: FAIL
[AudioParam interface: calling cancelAndHoldAtTime(double) on new AudioBufferSourceNode(context).playbackRate with too few arguments must throw TypeError]
expected: FAIL
[AudioContext interface: context must inherit property "getOutputTimestamp()" with the proper type]
expected: FAIL
[AudioListener interface: context.listener must inherit property "forwardX" with the proper type]
expected: FAIL
@ -35,6 +41,12 @@
[AudioListener interface: context.listener must inherit property "positionY" with the proper type]
expected: FAIL
[MediaElementAudioSourceNode interface: new MediaElementAudioSourceNode(context, {mediaElement: new Audio}) must inherit property "mediaElement" with the proper type]
expected: FAIL
[AudioContext interface: operation getOutputTimestamp()]
expected: FAIL
[Stringification of new AudioProcessingEvent('', {\n playbackTime: 0, inputBuffer: buffer, outputBuffer: buffer\n })]
expected: FAIL
@ -62,6 +74,12 @@
[AudioListener interface: context.listener must inherit property "forwardY" with the proper type]
expected: FAIL
[AudioContext interface: context must inherit property "baseLatency" with the proper type]
expected: FAIL
[MediaElementAudioSourceNode interface: attribute mediaElement]
expected: FAIL
[AudioListener interface: context.listener must inherit property "positionX" with the proper type]
expected: FAIL
@ -80,12 +98,21 @@
[AudioParamMap must be primary interface of worklet_node.parameters]
expected: FAIL
[AudioContext interface: context must inherit property "outputLatency" with the proper type]
expected: FAIL
[OfflineAudioContext interface: operation suspend(double)]
expected: FAIL
[AudioContext interface: attribute baseLatency]
expected: FAIL
[AudioParam interface: new AudioBufferSourceNode(context).playbackRate must inherit property "cancelAndHoldAtTime(double)" with the proper type]
expected: FAIL
[AudioContext interface: attribute outputLatency]
expected: FAIL
[AudioListener interface: attribute upX]
expected: FAIL

View File

@ -0,0 +1,2 @@
[audiocontext-getoutputtimestamp.html]
expected: ERROR

View File

@ -1,4 +1,16 @@
[audiocontextoptions.html]
[X default baseLatency is not greater than 0. Got undefined.]
expected: FAIL
[X balanced baseLatency is not greater than or equal to undefined. Got undefined.]
expected: FAIL
[X playback baseLatency is not greater than or equal to undefined. Got undefined.]
expected: FAIL
[< [test-audiocontextoptions-latencyHint-basic\] 3 out of 9 assertions were failed.]
expected: FAIL
[X double-constructor baseLatency small is not less than or equal to undefined. Got undefined.]
expected: FAIL

View File

@ -35,7 +35,7 @@
`context.sampleRate (${context.sampleRate} Hz)`).beGreaterThan(0);
defaultLatency = context.baseLatency;
should(defaultLatency, 'default baseLatency').beGreaterThanOrEqualTo(0);
should(defaultLatency, 'default baseLatency').beGreaterThan(0);
// Verify that an AudioContext can be created with the expected
// latency types.
@ -130,7 +130,7 @@
should(context1.baseLatency, 'high latency context baseLatency')
.beEqualTo(context2.baseLatency);
should(context1.baseLatency, 'high latency context baseLatency')
.beGreaterThanOrEqualTo(interactiveLatency);
.beGreaterThan(interactiveLatency);
closingPromises.push(context1.close());
closingPromises.push(context2.close());