Bug 971528 - Allocate given number of channels for WebRTC mic source. r=jesup

MozReview-Commit-ID: EVii9ACkjBu

--HG--
extra : rebase_source : 87b947d1b6321bbf649876fb3ce5f8d3ff67f371
This commit is contained in:
Alex Chronopoulos 2017-05-29 13:48:06 +03:00
parent 65f61e22ec
commit c62f6bd444
2 changed files with 21 additions and 4 deletions

View File

@ -153,6 +153,7 @@ public:
virtual int GetRecordingDeviceName(int aIndex, char (&aStrNameUTF8)[128],
char aStrGuidUTF8[128]) = 0;
virtual int GetRecordingDeviceStatus(bool& aIsAvailable) = 0;
virtual int GetChannelCount(int aDeviceIndex, uint32_t& aChannels) = 0;
virtual void StartRecording(SourceMediaStream *aStream, AudioDataListener *aListener) = 0;
virtual void StopRecording(SourceMediaStream *aStream) = 0;
virtual int SetRecordingDevice(int aIndex) = 0;
@ -264,6 +265,11 @@ public:
return 0;
}
int GetChannelCount(int aDeviceIndex, uint32_t& aChannels)
{
return GetDeviceMaxChannels(aDeviceIndex, aChannels);
}
static int GetDeviceMaxChannels(int aDeviceIndex, uint32_t& aChannels)
{
#ifdef MOZ_WIDGET_ANDROID
@ -379,6 +385,12 @@ public:
return 0;
}
int GetChannelCount(int aDeviceIndex, uint32_t& aChannels)
{
aChannels = 1; // default to mono
return 0;
}
void StartRecording(SourceMediaStream *aStream, AudioDataListener *aListener) {}
void StopRecording(SourceMediaStream *aStream) {}

View File

@ -616,11 +616,11 @@ MediaEngineWebRTCMicrophoneSource::InsertInGraph(const T* aBuffer,
RefPtr<SharedBuffer> buffer =
SharedBuffer::Create(aFrames * aChannels * sizeof(T));
AutoTArray<const T*, 8> channels;
channels.SetLength(aChannels);
if (aChannels == 1) {
PodCopy(static_cast<T*>(buffer->Data()), aBuffer, aFrames);
channels.AppendElement(static_cast<T*>(buffer->Data()));
} else {
channels.SetLength(aChannels);
AutoTArray<T*, 8> write_channels;
write_channels.SetLength(aChannels);
T * samples = static_cast<T*>(buffer->Data());
@ -637,6 +637,7 @@ MediaEngineWebRTCMicrophoneSource::InsertInGraph(const T* aBuffer,
write_channels.Elements());
}
MOZ_ASSERT(aChannels == channels.Length());
segment->AppendFrames(buffer.forget(), channels, aFrames,
mPrincipalHandles[i]);
segment->GetStartTime(insertTime);
@ -789,12 +790,16 @@ MediaEngineWebRTCMicrophoneSource::AllocChannel()
}
#endif // MOZ_B2G
// Set "codec" to PCM, 32kHz on 1 channel
// Set "codec" to PCM, 32kHz on device's channels
ScopedCustomReleasePtr<webrtc::VoECodec> ptrVoECodec(webrtc::VoECodec::GetInterface(mVoiceEngine));
if (ptrVoECodec) {
webrtc::CodecInst codec;
strcpy(codec.plname, ENCODING);
codec.channels = CHANNELS;
uint32_t channels = 0;
if (mAudioInput->GetChannelCount(mCapIndex, channels) == 0) {
codec.channels = channels;
}
MOZ_ASSERT(mSampleFrequency == 16000 || mSampleFrequency == 32000);
codec.rate = SAMPLE_RATE(mSampleFrequency);
codec.plfreq = mSampleFrequency;
@ -896,8 +901,8 @@ MediaEngineWebRTCMicrophoneSource::Process(int channel,
if (mState != kStarted)
return;
MOZ_ASSERT(!isStereo);
InsertInGraph<int16_t>(audio10ms, length, 1);
uint32_t channels = isStereo ? 2 : 1;
InsertInGraph<int16_t>(audio10ms, length, channels);
return;
}