From 65f61e22ece26034a86aed7d79e5635ffb4eddad Mon Sep 17 00:00:00 2001 From: Alex Chronopoulos Date: Mon, 29 May 2017 13:26:57 +0300 Subject: [PATCH] Bug 971528 - Expect stereo input in MediaEngineWebRTCMicrophoneSource. r=padenot MozReview-Commit-ID: 5nJAJw7WTEe --HG-- extra : rebase_source : 11e364e927328a772505578ecd0849074d599dab --- dom/media/webrtc/MediaEngineWebRTCAudio.cpp | 37 ++++++++++++++++----- 1 file changed, 28 insertions(+), 9 deletions(-) diff --git a/dom/media/webrtc/MediaEngineWebRTCAudio.cpp b/dom/media/webrtc/MediaEngineWebRTCAudio.cpp index 2e52a26ffaef..324beb8458e6 100644 --- a/dom/media/webrtc/MediaEngineWebRTCAudio.cpp +++ b/dom/media/webrtc/MediaEngineWebRTCAudio.cpp @@ -600,10 +600,6 @@ MediaEngineWebRTCMicrophoneSource::InsertInGraph(const T* aBuffer, if (!mSources[i]) { continue; } - RefPtr buffer = - SharedBuffer::Create(aFrames * aChannels * sizeof(T)); - PodCopy(static_cast(buffer->Data()), - aBuffer, aFrames * aChannels); TimeStamp insertTime; // Make sure we include the stream and the track. @@ -612,12 +608,35 @@ MediaEngineWebRTCMicrophoneSource::InsertInGraph(const T* aBuffer, LATENCY_STREAM_ID(mSources[i].get(), mTrackID), (i+1 < len) ? 0 : 1, insertTime); + // Bug 971528 - Support stereo capture in gUM + MOZ_ASSERT(aChannels == 1 || aChannels == 2, + "GraphDriver only supports mono and stereo audio for now"); + nsAutoPtr segment(new AudioSegment()); - AutoTArray channels; - // XXX Bug 971528 - Support stereo capture in gUM - MOZ_ASSERT(aChannels == 1, - "GraphDriver only supports us stereo audio for now"); - channels.AppendElement(static_cast(buffer->Data())); + RefPtr buffer = + SharedBuffer::Create(aFrames * aChannels * sizeof(T)); + AutoTArray channels; + channels.SetLength(aChannels); + if (aChannels == 1) { + PodCopy(static_cast(buffer->Data()), aBuffer, aFrames); + channels.AppendElement(static_cast(buffer->Data())); + } else { + AutoTArray write_channels; + write_channels.SetLength(aChannels); + T * samples = static_cast(buffer->Data()); + + size_t offset = 0; + for(uint32_t i = 0; i < aChannels; ++i) { + channels[i] = write_channels[i] = samples + offset; + offset += aFrames; + } + + DeinterleaveAndConvertBuffer(aBuffer, + aFrames, + aChannels, + write_channels.Elements()); + } + segment->AppendFrames(buffer.forget(), channels, aFrames, mPrincipalHandles[i]); segment->GetStartTime(insertTime);