Bug 864164 - Part 2: Send the AudioBufferSourceNode buffer parameter changes to the stream; r=padenot

This commit is contained in:
Ehsan Akhgari 2013-04-22 17:01:22 -04:00
parent c1c42130cf
commit f4ede88cdf
6 changed files with 127 additions and 29 deletions

View File

@ -12,6 +12,7 @@
#include "AudioDestinationNode.h"
#include "PannerNode.h"
#include "speex/speex_resampler.h"
#include <limits>
namespace mozilla {
namespace dom {
@ -400,10 +401,13 @@ AudioBufferSourceNode::AudioBufferSourceNode(AudioContext* aContext)
: AudioNode(aContext)
, mLoopStart(0.0)
, mLoopEnd(0.0)
, mOffset(0.0)
, mDuration(std::numeric_limits<double>::min())
, mPlaybackRate(new AudioParam(this, SendPlaybackRateToStream, 1.0f))
, mPannerNode(nullptr)
, mLoop(false)
, mStartCalled(false)
, mOffsetAndDurationRemembered(false)
{
mStream = aContext->Graph()->CreateAudioNodeStream(
new AudioBufferSourceNodeEngine(this, aContext->Destination()),
@ -425,7 +429,7 @@ AudioBufferSourceNode::WrapObject(JSContext* aCx, JSObject* aScope)
}
void
AudioBufferSourceNode::Start(JSContext* aCx, double aWhen, double aOffset,
AudioBufferSourceNode::Start(double aWhen, double aOffset,
const Optional<double>& aDuration, ErrorResult& aRv)
{
if (mStartCalled) {
@ -435,40 +439,77 @@ AudioBufferSourceNode::Start(JSContext* aCx, double aWhen, double aOffset,
mStartCalled = true;
AudioNodeStream* ns = static_cast<AudioNodeStream*>(mStream.get());
if (!mBuffer || !ns) {
if (!ns) {
// Nothing to play, or we're already dead for some reason
return;
}
float rate = mBuffer->SampleRate();
int32_t lengthSamples = mBuffer->Length();
nsRefPtr<ThreadSharedFloatArrayBufferList> data =
mBuffer->GetThreadSharedChannelsForRate(aCx);
if (mBuffer) {
double duration = aDuration.WasPassed() ?
aDuration.Value() :
std::numeric_limits<double>::min();
SendOffsetAndDurationParametersToStream(ns, aOffset, duration);
} else {
// Remember our argument so that we can use them once we have a buffer
mOffset = aOffset;
mDuration = aDuration.WasPassed() ?
aDuration.Value() :
std::numeric_limits<double>::min();
mOffsetAndDurationRemembered = true;
}
// Don't set parameter unnecessarily
if (aWhen > 0.0) {
ns->SetStreamTimeParameter(START, Context()->DestinationStream(), aWhen);
}
MOZ_ASSERT(!mPlayingRef, "We can only accept a successful start() call once");
mPlayingRef.Take(this);
}
void
AudioBufferSourceNode::SendBufferParameterToStream(JSContext* aCx)
{
AudioNodeStream* ns = static_cast<AudioNodeStream*>(mStream.get());
MOZ_ASSERT(ns, "Why don't we have a stream here?");
if (mBuffer) {
float rate = mBuffer->SampleRate();
nsRefPtr<ThreadSharedFloatArrayBufferList> data =
mBuffer->GetThreadSharedChannelsForRate(aCx);
ns->SetBuffer(data.forget());
ns->SetInt32Parameter(SAMPLE_RATE, rate);
} else {
ns->SetBuffer(nullptr);
}
if (mOffsetAndDurationRemembered) {
SendOffsetAndDurationParametersToStream(ns, mOffset, mDuration);
}
}
void
AudioBufferSourceNode::SendOffsetAndDurationParametersToStream(AudioNodeStream* aStream,
double aOffset,
double aDuration)
{
float rate = mBuffer ? mBuffer->SampleRate() : Context()->SampleRate();
int32_t lengthSamples = mBuffer ? mBuffer->Length() : 0;
double length = double(lengthSamples) / rate;
double offset = std::max(0.0, aOffset);
double endOffset = aDuration.WasPassed() ?
std::min(aOffset + aDuration.Value(), length) : length;
double endOffset = aDuration == std::numeric_limits<double>::min() ?
length : std::min(aOffset + aDuration, length);
if (offset >= endOffset) {
return;
}
ns->SetBuffer(data.forget());
// Don't set parameter unnecessarily
if (aWhen > 0.0) {
ns->SetStreamTimeParameter(START, Context()->DestinationStream(), aWhen);
}
int32_t offsetTicks = NS_lround(offset*rate);
// Don't set parameter unnecessarily
if (offsetTicks > 0) {
ns->SetInt32Parameter(OFFSET, offsetTicks);
aStream->SetInt32Parameter(OFFSET, offsetTicks);
}
ns->SetInt32Parameter(DURATION,
NS_lround(endOffset*rate) - offsetTicks);
ns->SetInt32Parameter(SAMPLE_RATE, rate);
MOZ_ASSERT(!mPlayingRef, "We can only accept a successful start() call once");
mPlayingRef.Take(this);
aStream->SetInt32Parameter(DURATION, NS_lround(endOffset*rate) - offsetTicks);
}
void
@ -479,6 +520,12 @@ AudioBufferSourceNode::Stop(double aWhen, ErrorResult& aRv)
return;
}
if (!mBuffer) {
// We don't have a buffer, so the stream is never marked as finished.
// Therefore we need to drop our playing ref right now.
mPlayingRef.Drop(this);
}
AudioNodeStream* ns = static_cast<AudioNodeStream*>(mStream.get());
if (!ns) {
// We've already stopped and had our stream shut down

View File

@ -55,18 +55,18 @@ public:
virtual JSObject* WrapObject(JSContext* aCx, JSObject* aScope);
void Start(JSContext* aCx, double aWhen, double aOffset,
void Start(double aWhen, double aOffset,
const Optional<double>& aDuration, ErrorResult& aRv);
void NoteOn(JSContext* aCx, double aWhen, ErrorResult& aRv)
void NoteOn(double aWhen, ErrorResult& aRv)
{
Start(aCx, aWhen, 0.0, Optional<double>(), aRv);
Start(aWhen, 0.0, Optional<double>(), aRv);
}
void NoteGrainOn(JSContext* aCx, double aWhen, double aOffset,
void NoteGrainOn(double aWhen, double aOffset,
double aDuration, ErrorResult& aRv)
{
Optional<double> duration;
duration.Construct(aDuration);
Start(aCx, aWhen, aOffset, duration, aRv);
Start(aWhen, aOffset, duration, aRv);
}
void Stop(double aWhen, ErrorResult& aRv);
void NoteOff(double aWhen, ErrorResult& aRv)
@ -74,13 +74,14 @@ public:
Stop(aWhen, aRv);
}
AudioBuffer* GetBuffer() const
AudioBuffer* GetBuffer(JSContext* aCx) const
{
return mBuffer;
}
void SetBuffer(AudioBuffer* aBuffer)
void SetBuffer(JSContext* aCx, AudioBuffer* aBuffer)
{
mBuffer = aBuffer;
SendBufferParameterToStream(aCx);
}
AudioParam* PlaybackRate() const
{
@ -136,17 +137,24 @@ private:
};
void SendLoopParametersToStream();
void SendBufferParameterToStream(JSContext* aCx);
void SendOffsetAndDurationParametersToStream(AudioNodeStream* aStream,
double aOffset,
double aDuration);
static void SendPlaybackRateToStream(AudioNode* aNode);
private:
double mLoopStart;
double mLoopEnd;
double mOffset;
double mDuration;
nsRefPtr<AudioBuffer> mBuffer;
nsRefPtr<AudioParam> mPlaybackRate;
PannerNode* mPannerNode;
SelfReference<AudioBufferSourceNode> mPlayingRef; // a reference to self while playing
bool mLoop;
bool mStartCalled;
bool mOffsetAndDurationRemembered;
};
}

View File

@ -25,6 +25,7 @@ MOCHITEST_FILES := \
test_audioBufferSourceNode.html \
test_audioBufferSourceNodeLoop.html \
test_audioBufferSourceNodeLoopStartEnd.html \
test_audioBufferSourceNodeNullBuffer.html \
test_badConnect.html \
test_biquadFilterNode.html \
test_currentTime.html \

View File

@ -21,10 +21,10 @@ addLoadEvent(function() {
}
var source = context.createBufferSource();
source.buffer = buffer;
var sp = context.createScriptProcessor(2048);
source.start(0);
source.buffer = buffer;
source.connect(sp);
sp.connect(context.destination);
sp.onaudioprocess = function(e) {

View File

@ -0,0 +1,42 @@
<!DOCTYPE HTML>
<html>
<head>
<title>Test AudioBufferSourceNode</title>
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<script type="text/javascript" src="webaudio.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
</head>
<body>
<pre id="test">
<script class="testbody" type="text/javascript">
SimpleTest.waitForExplicitFinish();
addLoadEvent(function() {
SpecialPowers.setBoolPref("media.webaudio.enabled", true);
var context = new AudioContext();
var expectedBuffer = context.createBuffer(1, 2048, context.sampleRate); // silence
var source = context.createBufferSource();
var sp = context.createScriptProcessor(2048);
source.start(0);
source.buffer = null;
is(source.buffer, null, "Try playing back a null buffer");
source.connect(sp);
sp.connect(context.destination);
sp.onaudioprocess = function(e) {
compareBuffers(e.inputBuffer.getChannelData(0), expectedBuffer.getChannelData(0));
compareBuffers(e.inputBuffer.getChannelData(1), expectedBuffer.getChannelData(0));
sp.onaudioprocess = null;
SpecialPowers.clearUserPref("media.webaudio.enabled");
SimpleTest.finish();
};
});
</script>
</pre>
</body>
</html>

View File

@ -106,7 +106,7 @@ DOMInterfaces = {
},
'AudioBufferSourceNode': {
'implicitJSContext': [ 'start', 'noteOn', 'noteGrainOn' ],
'implicitJSContext': [ 'buffer' ],
'resultNotAddRefed': [ 'playbackRate' ],
},