mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-12-24 16:54:08 +00:00
Bug 851966 - Only store the produced AudioChunks for AudioNodeStreams that will result in playback; r=roc
This commit is contained in:
parent
31297a7d6b
commit
ddf63292e4
@ -189,7 +189,7 @@ AudioNodeStream::ObtainInputBlock(AudioChunk* aTmpChunk)
|
||||
if (a->IsFinishedOnGraphThread()) {
|
||||
continue;
|
||||
}
|
||||
AudioChunk* chunk = a->mLastChunk;
|
||||
AudioChunk* chunk = &a->mLastChunk;
|
||||
// XXX when we implement DelayNode, this will no longer be true and we'll
|
||||
// need to treat a null chunk (when the DelayNode hasn't had a chance
|
||||
// to produce data yet) as silence here.
|
||||
@ -268,11 +268,16 @@ AudioNodeStream::ProduceOutput(GraphTime aFrom, GraphTime aTo)
|
||||
}
|
||||
}
|
||||
|
||||
mLastChunk = segment->AppendAndConsumeChunk(&outputChunk);
|
||||
mLastChunk = outputChunk;
|
||||
if (mKind == MediaStreamGraph::EXTERNAL_STREAM) {
|
||||
segment->AppendAndConsumeChunk(&outputChunk);
|
||||
} else {
|
||||
segment->AppendNullData(outputChunk.GetDuration());
|
||||
}
|
||||
|
||||
for (uint32_t j = 0; j < mListeners.Length(); ++j) {
|
||||
MediaStreamListener* l = mListeners[j];
|
||||
AudioChunk copyChunk = *mLastChunk;
|
||||
AudioChunk copyChunk = outputChunk;
|
||||
AudioSegment tmpSegment;
|
||||
tmpSegment.AppendAndConsumeChunk(©Chunk);
|
||||
l->NotifyQueuedTrackChanges(Graph(), AUDIO_NODE_STREAM_TRACK_ID,
|
||||
|
@ -42,8 +42,11 @@ public:
|
||||
/**
|
||||
* Transfers ownership of aEngine to the new AudioNodeStream.
|
||||
*/
|
||||
explicit AudioNodeStream(AudioNodeEngine* aEngine)
|
||||
: ProcessedMediaStream(nullptr), mEngine(aEngine), mLastChunk(nullptr)
|
||||
AudioNodeStream(AudioNodeEngine* aEngine,
|
||||
MediaStreamGraph::AudioNodeStreamKind aKind)
|
||||
: ProcessedMediaStream(nullptr),
|
||||
mEngine(aEngine),
|
||||
mKind(aKind)
|
||||
{
|
||||
}
|
||||
~AudioNodeStream();
|
||||
@ -81,7 +84,9 @@ protected:
|
||||
// The engine that will generate output for this node.
|
||||
nsAutoPtr<AudioNodeEngine> mEngine;
|
||||
// The last block produced by this node.
|
||||
AudioChunk* mLastChunk;
|
||||
AudioChunk mLastChunk;
|
||||
// Whether this is an internal or external stream
|
||||
MediaStreamGraph::AudioNodeStreamKind mKind;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -2002,9 +2002,10 @@ MediaStreamGraph::CreateTrackUnionStream(DOMMediaStream* aWrapper)
|
||||
}
|
||||
|
||||
AudioNodeStream*
|
||||
MediaStreamGraph::CreateAudioNodeStream(AudioNodeEngine* aEngine)
|
||||
MediaStreamGraph::CreateAudioNodeStream(AudioNodeEngine* aEngine,
|
||||
AudioNodeStreamKind aKind)
|
||||
{
|
||||
AudioNodeStream* stream = new AudioNodeStream(aEngine);
|
||||
AudioNodeStream* stream = new AudioNodeStream(aEngine, aKind);
|
||||
NS_ADDREF(stream);
|
||||
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
|
||||
stream->SetGraphImpl(graph);
|
||||
|
@ -883,11 +883,16 @@ public:
|
||||
* particular tracks of each input stream.
|
||||
*/
|
||||
ProcessedMediaStream* CreateTrackUnionStream(DOMMediaStream* aWrapper);
|
||||
// Internal AudioNodeStreams can only pass their output to another
|
||||
// AudioNode, whereas external AudioNodeStreams can pass their output
|
||||
// to an nsAudioStream for playback.
|
||||
enum AudioNodeStreamKind { INTERNAL_STREAM, EXTERNAL_STREAM };
|
||||
/**
|
||||
* Create a stream that will process audio for an AudioNode.
|
||||
* Takes ownership of aEngine.
|
||||
*/
|
||||
AudioNodeStream* CreateAudioNodeStream(AudioNodeEngine* aEngine);
|
||||
AudioNodeStream* CreateAudioNodeStream(AudioNodeEngine* aEngine,
|
||||
AudioNodeStreamKind aKind);
|
||||
/**
|
||||
* Returns the number of graph updates sent. This can be used to track
|
||||
* whether a given update has been processed by the graph thread and reflected
|
||||
|
@ -227,7 +227,8 @@ AudioBufferSourceNode::AudioBufferSourceNode(AudioContext* aContext)
|
||||
, mStartCalled(false)
|
||||
{
|
||||
SetProduceOwnOutput(true);
|
||||
mStream = aContext->Graph()->CreateAudioNodeStream(new AudioBufferSourceNodeEngine());
|
||||
mStream = aContext->Graph()->CreateAudioNodeStream(new AudioBufferSourceNodeEngine(),
|
||||
MediaStreamGraph::INTERNAL_STREAM);
|
||||
mStream->AddMainThreadListener(this);
|
||||
}
|
||||
|
||||
|
@ -31,7 +31,8 @@ NS_IMPL_CYCLE_COLLECTING_RELEASE(AudioDestinationNode)
|
||||
AudioDestinationNode::AudioDestinationNode(AudioContext* aContext, MediaStreamGraph* aGraph)
|
||||
: AudioNode(aContext)
|
||||
{
|
||||
mStream = aGraph->CreateAudioNodeStream(new AudioNodeEngine());
|
||||
mStream = aGraph->CreateAudioNodeStream(new AudioNodeEngine(),
|
||||
MediaStreamGraph::EXTERNAL_STREAM);
|
||||
SetIsDOMBinding();
|
||||
}
|
||||
|
||||
|
@ -118,7 +118,7 @@ GainNode::GainNode(AudioContext* aContext)
|
||||
, mGain(new AudioParam(this, SendGainToStream, 1.0f, 0.0f, 1.0f))
|
||||
{
|
||||
GainNodeEngine* engine = new GainNodeEngine(aContext->Destination());
|
||||
mStream = aContext->Graph()->CreateAudioNodeStream(engine);
|
||||
mStream = aContext->Graph()->CreateAudioNodeStream(engine, MediaStreamGraph::INTERNAL_STREAM);
|
||||
engine->SetSourceStream(static_cast<AudioNodeStream*> (mStream.get()));
|
||||
}
|
||||
|
||||
|
@ -121,7 +121,8 @@ PannerNode::PannerNode(AudioContext* aContext)
|
||||
, mConeOuterAngle(360.)
|
||||
, mConeOuterGain(0.)
|
||||
{
|
||||
mStream = aContext->Graph()->CreateAudioNodeStream(new PannerNodeEngine());
|
||||
mStream = aContext->Graph()->CreateAudioNodeStream(new PannerNodeEngine(),
|
||||
MediaStreamGraph::INTERNAL_STREAM);
|
||||
// We should register once we have set up our stream and engine.
|
||||
Context()->Listener()->RegisterPannerNode(this);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user