Bug 952893. Part 1: Block the AudioDestinationNode when it's the only node in the AudioContext. r=padenot

This commit is contained in:
Robert O'Callahan 2014-01-16 00:08:20 +13:00
parent 14fd5bbbb9
commit d583f4c9b5
10 changed files with 132 additions and 11 deletions

View File

@ -10,6 +10,7 @@
#include "ThreeDPoint.h" #include "ThreeDPoint.h"
#include "AudioChannelFormat.h" #include "AudioChannelFormat.h"
#include "AudioParamTimeline.h" #include "AudioParamTimeline.h"
#include "AudioContext.h"
using namespace mozilla::dom; using namespace mozilla::dom;
@ -30,7 +31,7 @@ AudioNodeStream::~AudioNodeStream()
} }
void void
AudioNodeStream::SetStreamTimeParameter(uint32_t aIndex, MediaStream* aRelativeToStream, AudioNodeStream::SetStreamTimeParameter(uint32_t aIndex, AudioContext* aContext,
double aStreamTime) double aStreamTime)
{ {
class Message : public ControlMessage { class Message : public ControlMessage {
@ -50,7 +51,9 @@ AudioNodeStream::SetStreamTimeParameter(uint32_t aIndex, MediaStream* aRelativeT
}; };
MOZ_ASSERT(this); MOZ_ASSERT(this);
GraphImpl()->AppendMessage(new Message(this, aIndex, aRelativeToStream, aStreamTime)); GraphImpl()->AppendMessage(new Message(this, aIndex,
aContext->DestinationStream(),
aStreamTime - aContext->ExtraCurrentTime()));
} }
void void

View File

@ -16,6 +16,7 @@ namespace dom {
struct ThreeDPoint; struct ThreeDPoint;
class AudioParamTimeline; class AudioParamTimeline;
class DelayNodeEngine; class DelayNodeEngine;
class AudioContext;
} }
class ThreadSharedFloatArrayBufferList; class ThreadSharedFloatArrayBufferList;
@ -33,6 +34,8 @@ class AudioNodeEngine;
*/ */
class AudioNodeStream : public ProcessedMediaStream { class AudioNodeStream : public ProcessedMediaStream {
public: public:
typedef mozilla::dom::AudioContext AudioContext;
enum { AUDIO_TRACK = 1 }; enum { AUDIO_TRACK = 1 };
typedef nsAutoTArray<AudioChunk, 1> OutputChunks; typedef nsAutoTArray<AudioChunk, 1> OutputChunks;
@ -66,7 +69,7 @@ public:
* Sets a parameter that's a time relative to some stream's played time. * Sets a parameter that's a time relative to some stream's played time.
* This time is converted to a time relative to this stream when it's set. * This time is converted to a time relative to this stream when it's set.
*/ */
void SetStreamTimeParameter(uint32_t aIndex, MediaStream* aRelativeToStream, void SetStreamTimeParameter(uint32_t aIndex, AudioContext* aContext,
double aStreamTime); double aStreamTime);
void SetDoubleParameter(uint32_t aIndex, double aValue); void SetDoubleParameter(uint32_t aIndex, double aValue);
void SetInt32Parameter(uint32_t aIndex, int32_t aValue); void SetInt32Parameter(uint32_t aIndex, int32_t aValue);

View File

@ -532,7 +532,7 @@ AudioBufferSourceNode::Start(double aWhen, double aOffset,
// Don't set parameter unnecessarily // Don't set parameter unnecessarily
if (aWhen > 0.0) { if (aWhen > 0.0) {
ns->SetStreamTimeParameter(START, Context()->DestinationStream(), aWhen); ns->SetStreamTimeParameter(START, Context(), aWhen);
} }
MarkActive(); MarkActive();
@ -616,8 +616,7 @@ AudioBufferSourceNode::Stop(double aWhen, ErrorResult& aRv)
return; return;
} }
ns->SetStreamTimeParameter(STOP, Context()->DestinationStream(), ns->SetStreamTimeParameter(STOP, Context(), std::max(0.0, aWhen));
std::max(0.0, aWhen));
} }
void void

View File

@ -84,6 +84,7 @@ AudioContext::AudioContext(nsPIDOMWindow* aWindow,
: nsDOMEventTargetHelper(aWindow) : nsDOMEventTargetHelper(aWindow)
, mSampleRate(GetSampleRateForAudioContext(aIsOffline, aSampleRate)) , mSampleRate(GetSampleRateForAudioContext(aIsOffline, aSampleRate))
, mNumberOfChannels(aNumberOfChannels) , mNumberOfChannels(aNumberOfChannels)
, mNodeCount(0)
, mIsOffline(aIsOffline) , mIsOffline(aIsOffline)
, mIsStarted(!aIsOffline) , mIsStarted(!aIsOffline)
, mIsShutDown(false) , mIsShutDown(false)
@ -95,6 +96,10 @@ AudioContext::AudioContext(nsPIDOMWindow* aWindow,
mDestination = new AudioDestinationNode(this, aIsOffline, aNumberOfChannels, mDestination = new AudioDestinationNode(this, aIsOffline, aNumberOfChannels,
aLength, aSampleRate); aLength, aSampleRate);
mDestination->Stream()->AddAudioOutput(&gWebAudioOutputKey); mDestination->Stream()->AddAudioOutput(&gWebAudioOutputKey);
// We skip calling SetIsOnlyNodeForContext during mDestination's constructor,
// because we can only call SetIsOnlyNodeForContext after mDestination has
// been set up.
mDestination->SetIsOnlyNodeForContext(true);
} }
AudioContext::~AudioContext() AudioContext::~AudioContext()
@ -543,7 +548,8 @@ AudioContext::DestinationStream() const
double double
AudioContext::CurrentTime() const AudioContext::CurrentTime() const
{ {
return MediaTimeToSeconds(Destination()->Stream()->GetCurrentTime()); return MediaTimeToSeconds(Destination()->Stream()->GetCurrentTime()) +
Destination()->ExtraCurrentTime();
} }
void void
@ -589,6 +595,18 @@ AudioContext::Resume()
} }
} }
void
AudioContext::UpdateNodeCount(int32_t aDelta)
{
bool firstNode = mNodeCount == 0;
mNodeCount += aDelta;
MOZ_ASSERT(mNodeCount >= 0);
// mDestinationNode may be null when we're destroying nodes unlinked by CC
if (!firstNode && mDestination) {
mDestination->SetIsOnlyNodeForContext(mNodeCount == 1);
}
}
JSContext* JSContext*
AudioContext::GetJSContext() const AudioContext::GetJSContext() const
{ {
@ -679,5 +697,11 @@ AudioContext::CollectReports(nsIHandleReportCallback* aHandleReport,
amount, "Memory used by AudioContext objects (Web Audio)."); amount, "Memory used by AudioContext objects (Web Audio).");
} }
double
AudioContext::ExtraCurrentTime() const
{
return mDestination->ExtraCurrentTime();
}
} }
} }

View File

@ -246,6 +246,12 @@ public:
AudioChannel MozAudioChannelType() const; AudioChannel MozAudioChannelType() const;
void SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv); void SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv);
void UpdateNodeCount(int32_t aDelta);
// Returns the difference between CurrentTime() and the current time of the
// AudioDestinationNode's MediaStream.
double ExtraCurrentTime() const;
private: private:
void RemoveFromDecodeQueue(WebAudioDecodeJob* aDecodeJob); void RemoveFromDecodeQueue(WebAudioDecodeJob* aDecodeJob);
void ShutdownDecoder(); void ShutdownDecoder();
@ -272,6 +278,8 @@ private:
nsTHashtable<nsPtrHashKey<PannerNode> > mPannerNodes; nsTHashtable<nsPtrHashKey<PannerNode> > mPannerNodes;
// Number of channels passed in the OfflineAudioContext ctor. // Number of channels passed in the OfflineAudioContext ctor.
uint32_t mNumberOfChannels; uint32_t mNumberOfChannels;
// Number of nodes that currently exist for this AudioContext
int32_t mNodeCount;
bool mIsOffline; bool mIsOffline;
bool mIsStarted; bool mIsStarted;
bool mIsShutDown; bool mIsShutDown;

View File

@ -17,6 +17,8 @@
#include "nsIPermissionManager.h" #include "nsIPermissionManager.h"
#include "nsIScriptObjectPrincipal.h" #include "nsIScriptObjectPrincipal.h"
#include "nsServiceManagerUtils.h" #include "nsServiceManagerUtils.h"
#include "nsIAppShell.h"
#include "nsWidgetsCID.h"
namespace mozilla { namespace mozilla {
namespace dom { namespace dom {
@ -217,6 +219,8 @@ AudioDestinationNode::AudioDestinationNode(AudioContext* aContext,
, mAudioChannel(AudioChannel::Normal) , mAudioChannel(AudioChannel::Normal)
, mIsOffline(aIsOffline) , mIsOffline(aIsOffline)
, mHasFinished(false) , mHasFinished(false)
, mExtraCurrentTime(0)
, mExtraCurrentTimeUpdatedSinceLastStableState(false)
{ {
MediaStreamGraph* graph = aIsOffline ? MediaStreamGraph* graph = aIsOffline ?
MediaStreamGraph::CreateNonRealtimeInstance() : MediaStreamGraph::CreateNonRealtimeInstance() :
@ -486,6 +490,62 @@ AudioDestinationNode::CreateAudioChannelAgent()
mAudioChannelAgent->StartPlaying(&state); mAudioChannelAgent->StartPlaying(&state);
SetCanPlay(state == AudioChannelState::AUDIO_CHANNEL_STATE_NORMAL); SetCanPlay(state == AudioChannelState::AUDIO_CHANNEL_STATE_NORMAL);
} }
void
AudioDestinationNode::NotifyStableState()
{
mExtraCurrentTimeUpdatedSinceLastStableState = false;
}
static NS_DEFINE_CID(kAppShellCID, NS_APPSHELL_CID);
void
AudioDestinationNode::ScheduleStableStateNotification()
{
nsCOMPtr<nsIAppShell> appShell = do_GetService(kAppShellCID);
if (appShell) {
nsCOMPtr<nsIRunnable> event =
NS_NewRunnableMethod(this, &AudioDestinationNode::NotifyStableState);
appShell->RunInStableState(event);
}
}
double
AudioDestinationNode::ExtraCurrentTime()
{
if (!mStartedBlockingDueToBeingOnlyNode.IsNull() &&
!mExtraCurrentTimeUpdatedSinceLastStableState) {
mExtraCurrentTimeUpdatedSinceLastStableState = true;
mExtraCurrentTimeSinceLastStartedBlocking =
(TimeStamp::Now() - mStartedBlockingDueToBeingOnlyNode).ToSeconds();
ScheduleStableStateNotification();
}
return mExtraCurrentTime + mExtraCurrentTimeSinceLastStartedBlocking;
}
void
AudioDestinationNode::SetIsOnlyNodeForContext(bool aIsOnlyNode)
{
if (!mStartedBlockingDueToBeingOnlyNode.IsNull() == aIsOnlyNode) {
return;
}
if (aIsOnlyNode) {
mStream->ChangeExplicitBlockerCount(1);
mStartedBlockingDueToBeingOnlyNode = TimeStamp::Now();
mExtraCurrentTimeSinceLastStartedBlocking = 0;
// Don't do an update of mExtraCurrentTimeSinceLastStartedBlocking until the next stable state.
mExtraCurrentTimeUpdatedSinceLastStableState = true;
ScheduleStableStateNotification();
} else {
// Force update of mExtraCurrentTimeSinceLastStartedBlocking if necessary
ExtraCurrentTime();
mExtraCurrentTime += mExtraCurrentTimeSinceLastStartedBlocking;
mStream->ChangeExplicitBlockerCount(-1);
mStartedBlockingDueToBeingOnlyNode = TimeStamp();
}
}
} }
} }

View File

@ -70,12 +70,22 @@ public:
virtual void NotifyMainThreadStateChanged() MOZ_OVERRIDE; virtual void NotifyMainThreadStateChanged() MOZ_OVERRIDE;
void FireOfflineCompletionEvent(); void FireOfflineCompletionEvent();
// An amount that should be added to the MediaStream's current time to
// get the AudioContext.currentTime.
double ExtraCurrentTime();
// When aIsOnlyNode is true, this is the only node for the AudioContext.
void SetIsOnlyNodeForContext(bool aIsOnlyNode);
private: private:
bool CheckAudioChannelPermissions(AudioChannel aValue); bool CheckAudioChannelPermissions(AudioChannel aValue);
void CreateAudioChannelAgent(); void CreateAudioChannelAgent();
void SetCanPlay(bool aCanPlay); void SetCanPlay(bool aCanPlay);
void NotifyStableState();
void ScheduleStableStateNotification();
SelfReference<AudioDestinationNode> mOfflineRenderingRef; SelfReference<AudioDestinationNode> mOfflineRenderingRef;
uint32_t mFramesToProduce; uint32_t mFramesToProduce;
@ -83,8 +93,16 @@ private:
// Audio Channel Type. // Audio Channel Type.
AudioChannel mAudioChannel; AudioChannel mAudioChannel;
<<<<<<< /home/roc/mozilla-central/content/media/webaudio/AudioDestinationNode.h
bool mIsOffline; bool mIsOffline;
bool mHasFinished; bool mHasFinished;
=======
TimeStamp mStartedBlockingDueToBeingOnlyNode;
double mExtraCurrentTime;
double mExtraCurrentTimeSinceLastStartedBlocking;
bool mExtraCurrentTimeUpdatedSinceLastStableState;
>>>>>>> /tmp/AudioDestinationNode.h~other.MvuUBx
}; };
} }

View File

@ -19,6 +19,7 @@ NS_IMPL_CYCLE_COLLECTION_CLASS(AudioNode)
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(AudioNode, nsDOMEventTargetHelper) NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(AudioNode, nsDOMEventTargetHelper)
tmp->DisconnectFromGraph(); tmp->DisconnectFromGraph();
tmp->mContext->UpdateNodeCount(-1);
NS_IMPL_CYCLE_COLLECTION_UNLINK(mContext) NS_IMPL_CYCLE_COLLECTION_UNLINK(mContext)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mOutputNodes) NS_IMPL_CYCLE_COLLECTION_UNLINK(mOutputNodes)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mOutputParams) NS_IMPL_CYCLE_COLLECTION_UNLINK(mOutputParams)
@ -58,6 +59,9 @@ AudioNode::AudioNode(AudioContext* aContext,
, mChannelInterpretation(aChannelInterpretation) , mChannelInterpretation(aChannelInterpretation)
{ {
MOZ_ASSERT(aContext); MOZ_ASSERT(aContext);
nsDOMEventTargetHelper::BindToOwner(aContext->GetParentObject());
SetIsDOMBinding();
aContext->UpdateNodeCount(1);
} }
AudioNode::~AudioNode() AudioNode::~AudioNode()
@ -65,6 +69,9 @@ AudioNode::~AudioNode()
MOZ_ASSERT(mInputNodes.IsEmpty()); MOZ_ASSERT(mInputNodes.IsEmpty());
MOZ_ASSERT(mOutputNodes.IsEmpty()); MOZ_ASSERT(mOutputNodes.IsEmpty());
MOZ_ASSERT(mOutputParams.IsEmpty()); MOZ_ASSERT(mOutputParams.IsEmpty());
if (mContext) {
mContext->UpdateNodeCount(-1);
}
} }
template <class InputNode> template <class InputNode>

View File

@ -578,8 +578,7 @@ OscillatorNode::Start(double aWhen, ErrorResult& aRv)
// TODO: Perhaps we need to do more here. // TODO: Perhaps we need to do more here.
ns->SetStreamTimeParameter(OscillatorNodeEngine::START, ns->SetStreamTimeParameter(OscillatorNodeEngine::START,
Context()->DestinationStream(), Context(), aWhen);
aWhen);
MarkActive(); MarkActive();
} }
@ -605,8 +604,7 @@ OscillatorNode::Stop(double aWhen, ErrorResult& aRv)
// TODO: Perhaps we need to do more here. // TODO: Perhaps we need to do more here.
ns->SetStreamTimeParameter(OscillatorNodeEngine::STOP, ns->SetStreamTimeParameter(OscillatorNodeEngine::STOP,
Context()->DestinationStream(), Context(), std::max(0.0, aWhen));
std::max(0.0, aWhen));
} }
void void

View File

@ -9,6 +9,7 @@ PARALLEL_DIRS += ['blink', 'test']
TEST_TOOL_DIRS += ['compiledtest'] TEST_TOOL_DIRS += ['compiledtest']
EXPORTS += [ EXPORTS += [
'AudioContext.h',
'AudioParamTimeline.h', 'AudioParamTimeline.h',
'MediaBufferDecoder.h', 'MediaBufferDecoder.h',
'ThreeDPoint.h', 'ThreeDPoint.h',