gecko-dev/content/media/webaudio/GainNode.cpp
Ehsan Akhgari 0165be1d18 Bug 850970 - Set the initial value of gain nodes on the audio stream to make sure that they don't mute playback; r=roc
--HG--
extra : rebase_source : caf58ca903665f8b3014451926cec9772969663f
2013-03-13 23:44:49 -04:00

147 lines
4.7 KiB
C++

/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "GainNode.h"
#include "mozilla/dom/GainNodeBinding.h"
#include "AudioNodeEngine.h"
#include "AudioNodeStream.h"
#include "AudioDestinationNode.h"
namespace mozilla {
namespace dom {
NS_IMPL_CYCLE_COLLECTION_INHERITED_1(GainNode, AudioNode,
mGain)
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(GainNode)
NS_INTERFACE_MAP_END_INHERITING(AudioNode)
NS_IMPL_ADDREF_INHERITED(GainNode, AudioNode)
NS_IMPL_RELEASE_INHERITED(GainNode, AudioNode)
struct ConvertTimeToTickHelper
{
AudioNodeStream* mSourceStream;
AudioNodeStream* mDestinationStream;
static int64_t Convert(double aTime, void* aClosure)
{
TrackRate sampleRate = IdealAudioRate();
ConvertTimeToTickHelper* This = static_cast<ConvertTimeToTickHelper*> (aClosure);
TrackTicks tick = This->mSourceStream->GetCurrentPosition();
StreamTime streamTime = TicksToTimeRoundDown(sampleRate, tick);
GraphTime graphTime = This->mSourceStream->StreamTimeToGraphTime(streamTime);
StreamTime destinationStreamTime = This->mDestinationStream->GraphTimeToStreamTime(graphTime);
return TimeToTicksRoundDown(sampleRate, destinationStreamTime + SecondsToMediaTime(aTime));
}
};
class GainNodeEngine : public AudioNodeEngine
{
public:
explicit GainNodeEngine(AudioDestinationNode* aDestination)
: mSource(nullptr)
, mDestination(static_cast<AudioNodeStream*> (aDestination->Stream()))
// Keep the default value in sync with the default value in GainNode::GainNode.
, mGain(1.f)
{
}
void SetSourceStream(AudioNodeStream* aSource)
{
mSource = aSource;
}
enum Parameters {
GAIN
};
void SetTimelineParameter(uint32_t aIndex, const AudioParamTimeline& aValue) MOZ_OVERRIDE
{
switch (aIndex) {
case GAIN:
MOZ_ASSERT(mSource && mDestination);
mGain = aValue;
ConvertTimeToTickHelper ctth;
ctth.mSourceStream = mSource;
ctth.mDestinationStream = mDestination;
mGain.ConvertEventTimesToTicks(ConvertTimeToTickHelper::Convert, &ctth);
break;
default:
NS_ERROR("Bad GainNodeEngine TimelineParameter");
}
}
virtual void ProduceAudioBlock(AudioNodeStream* aStream,
const AudioChunk& aInput,
AudioChunk* aOutput,
bool* aFinished)
{
MOZ_ASSERT(mSource == aStream, "Invalid source stream");
*aOutput = aInput;
if (mGain.HasSimpleValue()) {
// Optimize the case where we only have a single value set as the volume
aOutput->mVolume *= mGain.GetValue();
} else {
// First, compute a vector of gains for each track tick based on the
// timeline at hand, and then for each channel, multiply the values
// in the buffer with the gain vector.
// Compute the gain values for the duration of the input AudioChunk
// XXX we need to add a method to AudioEventTimeline to compute this buffer directly.
float computedGain[WEBAUDIO_BLOCK_SIZE];
for (size_t counter = 0; counter < WEBAUDIO_BLOCK_SIZE; ++counter) {
TrackTicks tick = aStream->GetCurrentPosition() + counter;
computedGain[counter] = mGain.GetValueAtTime<TrackTicks>(tick);
}
// Apply the gain to the output buffer
for (size_t channel = 0; channel < aOutput->mChannelData.Length(); ++channel) {
float* buffer = static_cast<float*> (const_cast<void*>
(aOutput->mChannelData[channel]));
AudioBlockCopyChannelWithScale(buffer, computedGain, buffer);
}
}
}
AudioNodeStream* mSource;
AudioNodeStream* mDestination;
AudioParamTimeline mGain;
};
GainNode::GainNode(AudioContext* aContext)
: AudioNode(aContext)
, mGain(new AudioParam(this, SendGainToStream, 1.0f, 0.0f, 1.0f))
{
GainNodeEngine* engine = new GainNodeEngine(aContext->Destination());
mStream = aContext->Graph()->CreateAudioNodeStream(engine);
engine->SetSourceStream(static_cast<AudioNodeStream*> (mStream.get()));
}
GainNode::~GainNode()
{
DestroyMediaStream();
}
JSObject*
GainNode::WrapObject(JSContext* aCx, JSObject* aScope)
{
return GainNodeBinding::Wrap(aCx, aScope, this);
}
void
GainNode::SendGainToStream(AudioNode* aNode)
{
GainNode* This = static_cast<GainNode*>(aNode);
AudioNodeStream* ns = static_cast<AudioNodeStream*>(This->mStream.get());
ns->SetTimelineParameter(GainNodeEngine::GAIN, *This->mGain);
}
}
}