2012-10-31 19:09:32 +00:00
|
|
|
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
|
|
|
|
/* vim:set ts=2 sw=2 sts=2 et cindent: */
|
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
|
|
|
|
|
|
|
#include "GainNode.h"
|
|
|
|
#include "mozilla/dom/GainNodeBinding.h"
|
2013-01-29 23:37:51 +00:00
|
|
|
#include "AudioNodeEngine.h"
|
|
|
|
#include "AudioNodeStream.h"
|
|
|
|
#include "AudioDestinationNode.h"
|
2012-10-31 19:09:32 +00:00
|
|
|
|
|
|
|
namespace mozilla {
|
|
|
|
namespace dom {
|
|
|
|
|
2012-11-27 23:08:22 +00:00
|
|
|
NS_IMPL_CYCLE_COLLECTION_INHERITED_1(GainNode, AudioNode,
|
|
|
|
mGain)
|
2012-10-31 19:09:32 +00:00
|
|
|
|
|
|
|
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(GainNode)
|
|
|
|
NS_INTERFACE_MAP_END_INHERITING(AudioNode)
|
|
|
|
|
|
|
|
NS_IMPL_ADDREF_INHERITED(GainNode, AudioNode)
|
|
|
|
NS_IMPL_RELEASE_INHERITED(GainNode, AudioNode)
|
|
|
|
|
2013-01-29 23:37:51 +00:00
|
|
|
struct ConvertTimeToTickHelper
|
|
|
|
{
|
|
|
|
AudioNodeStream* mSourceStream;
|
|
|
|
AudioNodeStream* mDestinationStream;
|
|
|
|
|
|
|
|
static int64_t Convert(double aTime, void* aClosure)
|
|
|
|
{
|
|
|
|
TrackRate sampleRate = IdealAudioRate();
|
|
|
|
|
|
|
|
ConvertTimeToTickHelper* This = static_cast<ConvertTimeToTickHelper*> (aClosure);
|
|
|
|
TrackTicks tick = This->mSourceStream->GetCurrentPosition();
|
|
|
|
StreamTime streamTime = TicksToTimeRoundDown(sampleRate, tick);
|
|
|
|
GraphTime graphTime = This->mSourceStream->StreamTimeToGraphTime(streamTime);
|
|
|
|
StreamTime destinationStreamTime = This->mDestinationStream->GraphTimeToStreamTime(graphTime);
|
|
|
|
return TimeToTicksRoundDown(sampleRate, destinationStreamTime + SecondsToMediaTime(aTime));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
class GainNodeEngine : public AudioNodeEngine
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
explicit GainNodeEngine(AudioDestinationNode* aDestination)
|
|
|
|
: mSource(nullptr)
|
|
|
|
, mDestination(static_cast<AudioNodeStream*> (aDestination->Stream()))
|
2013-03-14 03:44:49 +00:00
|
|
|
// Keep the default value in sync with the default value in GainNode::GainNode.
|
|
|
|
, mGain(1.f)
|
2013-01-29 23:37:51 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetSourceStream(AudioNodeStream* aSource)
|
|
|
|
{
|
|
|
|
mSource = aSource;
|
|
|
|
}
|
|
|
|
|
|
|
|
enum Parameters {
|
|
|
|
GAIN
|
|
|
|
};
|
|
|
|
void SetTimelineParameter(uint32_t aIndex, const AudioParamTimeline& aValue) MOZ_OVERRIDE
|
|
|
|
{
|
|
|
|
switch (aIndex) {
|
|
|
|
case GAIN:
|
|
|
|
MOZ_ASSERT(mSource && mDestination);
|
|
|
|
mGain = aValue;
|
|
|
|
ConvertTimeToTickHelper ctth;
|
|
|
|
ctth.mSourceStream = mSource;
|
|
|
|
ctth.mDestinationStream = mDestination;
|
|
|
|
mGain.ConvertEventTimesToTicks(ConvertTimeToTickHelper::Convert, &ctth);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
NS_ERROR("Bad GainNodeEngine TimelineParameter");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
virtual void ProduceAudioBlock(AudioNodeStream* aStream,
|
|
|
|
const AudioChunk& aInput,
|
|
|
|
AudioChunk* aOutput,
|
|
|
|
bool* aFinished)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(mSource == aStream, "Invalid source stream");
|
|
|
|
|
|
|
|
*aOutput = aInput;
|
|
|
|
if (mGain.HasSimpleValue()) {
|
|
|
|
// Optimize the case where we only have a single value set as the volume
|
|
|
|
aOutput->mVolume *= mGain.GetValue();
|
|
|
|
} else {
|
|
|
|
// First, compute a vector of gains for each track tick based on the
|
|
|
|
// timeline at hand, and then for each channel, multiply the values
|
|
|
|
// in the buffer with the gain vector.
|
|
|
|
|
|
|
|
// Compute the gain values for the duration of the input AudioChunk
|
|
|
|
// XXX we need to add a method to AudioEventTimeline to compute this buffer directly.
|
|
|
|
float computedGain[WEBAUDIO_BLOCK_SIZE];
|
|
|
|
for (size_t counter = 0; counter < WEBAUDIO_BLOCK_SIZE; ++counter) {
|
|
|
|
TrackTicks tick = aStream->GetCurrentPosition() + counter;
|
|
|
|
computedGain[counter] = mGain.GetValueAtTime<TrackTicks>(tick);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Apply the gain to the output buffer
|
|
|
|
for (size_t channel = 0; channel < aOutput->mChannelData.Length(); ++channel) {
|
|
|
|
float* buffer = static_cast<float*> (const_cast<void*>
|
|
|
|
(aOutput->mChannelData[channel]));
|
|
|
|
AudioBlockCopyChannelWithScale(buffer, computedGain, buffer);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
AudioNodeStream* mSource;
|
|
|
|
AudioNodeStream* mDestination;
|
|
|
|
AudioParamTimeline mGain;
|
|
|
|
};
|
|
|
|
|
2012-10-31 19:09:32 +00:00
|
|
|
GainNode::GainNode(AudioContext* aContext)
|
|
|
|
: AudioNode(aContext)
|
2013-01-29 23:37:51 +00:00
|
|
|
, mGain(new AudioParam(this, SendGainToStream, 1.0f, 0.0f, 1.0f))
|
2012-10-31 19:09:32 +00:00
|
|
|
{
|
2013-01-29 23:37:51 +00:00
|
|
|
GainNodeEngine* engine = new GainNodeEngine(aContext->Destination());
|
|
|
|
mStream = aContext->Graph()->CreateAudioNodeStream(engine);
|
|
|
|
engine->SetSourceStream(static_cast<AudioNodeStream*> (mStream.get()));
|
|
|
|
}
|
|
|
|
|
|
|
|
GainNode::~GainNode()
|
|
|
|
{
|
|
|
|
DestroyMediaStream();
|
2012-10-31 19:09:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
JSObject*
|
2013-01-24 00:50:18 +00:00
|
|
|
GainNode::WrapObject(JSContext* aCx, JSObject* aScope)
|
2012-10-31 19:09:32 +00:00
|
|
|
{
|
2013-01-24 00:50:18 +00:00
|
|
|
return GainNodeBinding::Wrap(aCx, aScope, this);
|
2012-10-31 19:09:32 +00:00
|
|
|
}
|
|
|
|
|
2013-01-29 23:37:51 +00:00
|
|
|
void
|
|
|
|
GainNode::SendGainToStream(AudioNode* aNode)
|
|
|
|
{
|
|
|
|
GainNode* This = static_cast<GainNode*>(aNode);
|
|
|
|
AudioNodeStream* ns = static_cast<AudioNodeStream*>(This->mStream.get());
|
|
|
|
ns->SetTimelineParameter(GainNodeEngine::GAIN, *This->mGain);
|
|
|
|
}
|
|
|
|
|
2012-10-31 19:09:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|