Bug 1156472 - Part 5 - Add MediaEngineWebRTCAudioCaptureSource as a new audio source, and "audioCapture" as a new MediaSource. r=jesup,bz

This commit is contained in:
Paul Adenot 2015-07-24 14:28:16 +02:00
parent d95134e72e
commit 6f08789e18
6 changed files with 194 additions and 56 deletions

View File

@ -300,7 +300,8 @@ protected:
NS_IMPL_ISUPPORTS(MediaDevice, nsIMediaDevice)
MediaDevice::MediaDevice(MediaEngineSource* aSource, bool aIsVideo)
: mSource(aSource)
: mMediaSource(aSource->GetMediaSource())
, mSource(aSource)
, mIsVideo(aIsVideo)
{
mSource->GetName(mName);
@ -311,9 +312,7 @@ MediaDevice::MediaDevice(MediaEngineSource* aSource, bool aIsVideo)
VideoDevice::VideoDevice(MediaEngineVideoSource* aSource)
: MediaDevice(aSource, true)
{
mMediaSource = aSource->GetMediaSource();
}
{}
/**
* Helper functions that implement the constraints algorithm from
@ -439,6 +438,8 @@ MediaDevice::GetMediaSource(nsAString& aMediaSource)
{
if (mMediaSource == dom::MediaSourceEnum::Microphone) {
aMediaSource.Assign(NS_LITERAL_STRING("microphone"));
} else if (mMediaSource == dom::MediaSourceEnum::AudioCapture) {
aMediaSource.Assign(NS_LITERAL_STRING("audioCapture"));
} else if (mMediaSource == dom::MediaSourceEnum::Window) { // this will go away
aMediaSource.Assign(NS_LITERAL_STRING("window"));
} else { // all the rest are shared
@ -784,11 +785,52 @@ public:
}
}
#endif
// Create a media stream.
nsRefPtr<nsDOMUserMediaStream> trackunion =
nsDOMUserMediaStream::CreateTrackUnionStream(window, mListener,
mAudioSource, mVideoSource);
if (!trackunion || sInShutdown) {
MediaStreamGraph* msg = MediaStreamGraph::GetInstance();
nsRefPtr<SourceMediaStream> stream = msg->CreateSourceStream(nullptr);
nsRefPtr<DOMLocalMediaStream> domStream;
// AudioCapture is a special case, here, in the sense that we're not really
// using the audio source and the SourceMediaStream, which acts as
// placeholders. We re-route a number of stream internaly in the MSG and mix
// them down instead.
if (mAudioSource &&
mAudioSource->GetMediaSource() == dom::MediaSourceEnum::AudioCapture) {
domStream = DOMLocalMediaStream::CreateAudioCaptureStream(window);
msg->RegisterCaptureStreamForWindow(
mWindowID, domStream->GetStream()->AsProcessedStream());
window->SetAudioCapture(true);
} else {
// Normal case, connect the source stream to the track union stream to
// avoid us blocking
nsRefPtr<nsDOMUserMediaStream> trackunion =
nsDOMUserMediaStream::CreateTrackUnionStream(window, mListener,
mAudioSource, mVideoSource);
trackunion->GetStream()->AsProcessedStream()->SetAutofinish(true);
nsRefPtr<MediaInputPort> port = trackunion->GetStream()->AsProcessedStream()->
AllocateInputPort(stream, MediaInputPort::FLAG_BLOCK_OUTPUT);
trackunion->mSourceStream = stream;
trackunion->mPort = port.forget();
// Log the relationship between SourceMediaStream and TrackUnion stream
// Make sure logger starts before capture
AsyncLatencyLogger::Get(true);
LogLatency(AsyncLatencyLogger::MediaStreamCreate,
reinterpret_cast<uint64_t>(stream.get()),
reinterpret_cast<int64_t>(trackunion->GetStream()));
nsCOMPtr<nsIPrincipal> principal;
if (mPeerIdentity) {
principal = nsNullPrincipal::Create();
trackunion->SetPeerIdentity(mPeerIdentity.forget());
} else {
principal = window->GetExtantDoc()->NodePrincipal();
}
trackunion->CombineWithPrincipal(principal);
domStream = trackunion.forget();
}
if (!domStream || sInShutdown) {
nsCOMPtr<nsIDOMGetUserMediaErrorCallback> onFailure = mOnFailure.forget();
LOG(("Returning error for getUserMedia() - no stream"));
@ -802,36 +844,6 @@ public:
}
return NS_OK;
}
trackunion->AudioConfig(aec_on, (uint32_t) aec,
agc_on, (uint32_t) agc,
noise_on, (uint32_t) noise,
playout_delay);
MediaStreamGraph* gm = MediaStreamGraph::GetInstance();
nsRefPtr<SourceMediaStream> stream = gm->CreateSourceStream(nullptr);
// connect the source stream to the track union stream to avoid us blocking
trackunion->GetStream()->AsProcessedStream()->SetAutofinish(true);
nsRefPtr<MediaInputPort> port = trackunion->GetStream()->AsProcessedStream()->
AllocateInputPort(stream, MediaInputPort::FLAG_BLOCK_OUTPUT);
trackunion->mSourceStream = stream;
trackunion->mPort = port.forget();
// Log the relationship between SourceMediaStream and TrackUnion stream
// Make sure logger starts before capture
AsyncLatencyLogger::Get(true);
LogLatency(AsyncLatencyLogger::MediaStreamCreate,
reinterpret_cast<uint64_t>(stream.get()),
reinterpret_cast<int64_t>(trackunion->GetStream()));
nsCOMPtr<nsIPrincipal> principal;
if (mPeerIdentity) {
principal = nsNullPrincipal::Create();
trackunion->SetPeerIdentity(mPeerIdentity.forget());
} else {
principal = window->GetExtantDoc()->NodePrincipal();
}
trackunion->CombineWithPrincipal(principal);
// The listener was added at the beginning in an inactive state.
// Activate our listener. We'll call Start() on the source when get a callback
@ -841,7 +853,7 @@ public:
// Note: includes JS callbacks; must be released on MainThread
TracksAvailableCallback* tracksAvailableCallback =
new TracksAvailableCallback(mManager, mOnSuccess, mWindowID, trackunion);
new TracksAvailableCallback(mManager, mOnSuccess, mWindowID, domStream);
mListener->AudioConfig(aec_on, (uint32_t) aec,
agc_on, (uint32_t) agc,
@ -852,11 +864,11 @@ public:
// because that can take a while.
// Pass ownership of trackunion to the MediaOperationTask
// to ensure it's kept alive until the MediaOperationTask runs (at least).
MediaManager::PostTask(FROM_HERE,
new MediaOperationTask(MEDIA_START, mListener, trackunion,
tracksAvailableCallback,
mAudioSource, mVideoSource, false, mWindowID,
mOnFailure.forget()));
MediaManager::PostTask(
FROM_HERE, new MediaOperationTask(MEDIA_START, mListener, domStream,
tracksAvailableCallback, mAudioSource,
mVideoSource, false, mWindowID,
mOnFailure.forget()));
// We won't need mOnFailure now.
mOnFailure = nullptr;
@ -2075,7 +2087,7 @@ StopSharingCallback(MediaManager *aThis,
listener->Invalidate();
}
listener->Remove();
listener->StopScreenWindowSharing();
listener->StopSharing();
}
aListeners->Clear();
aThis->RemoveWindowID(aWindowID);
@ -2398,7 +2410,7 @@ MediaManager::Observe(nsISupports* aSubject, const char* aTopic,
uint64_t windowID = PromiseFlatString(Substring(data, strlen("screen:"))).ToInteger64(&rv);
MOZ_ASSERT(NS_SUCCEEDED(rv));
if (NS_SUCCEEDED(rv)) {
LOG(("Revoking Screeen/windowCapture access for window %llu", windowID));
LOG(("Revoking Screen/windowCapture access for window %llu", windowID));
StopScreensharing(windowID);
}
} else {
@ -2579,7 +2591,7 @@ StopScreensharingCallback(MediaManager *aThis,
if (aListeners) {
auto length = aListeners->Length();
for (size_t i = 0; i < length; ++i) {
aListeners->ElementAt(i)->StopScreenWindowSharing();
aListeners->ElementAt(i)->StopSharing();
}
}
}
@ -2741,7 +2753,7 @@ GetUserMediaCallbackMediaStreamListener::Invalidate()
// Doesn't kill audio
// XXX refactor to combine with Invalidate()?
void
GetUserMediaCallbackMediaStreamListener::StopScreenWindowSharing()
GetUserMediaCallbackMediaStreamListener::StopSharing()
{
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
if (mVideoSource && !mStopped &&
@ -2754,6 +2766,13 @@ GetUserMediaCallbackMediaStreamListener::StopScreenWindowSharing()
this, nullptr, nullptr,
nullptr, mVideoSource,
mFinished, mWindowID, nullptr));
} else if (mAudioSource &&
mAudioSource->GetMediaSource() == dom::MediaSourceEnum::AudioCapture) {
nsCOMPtr<nsPIDOMWindow> window = nsGlobalWindow::GetInnerWindowWithId(mWindowID);
MOZ_ASSERT(window);
window->SetAudioCapture(false);
MediaStreamGraph::GetInstance()->UnregisterCaptureStreamForWindow(mWindowID);
mStream->Destroy();
}
}

View File

@ -103,7 +103,7 @@ public:
return mStream->AsSourceStream();
}
void StopScreenWindowSharing();
void StopSharing();
void StopTrack(TrackID aID, bool aIsAudio);

View File

@ -291,6 +291,13 @@ MediaEngineWebRTC::EnumerateAudioDevices(dom::MediaSourceEnum aMediaSource,
// We spawn threads to handle gUM runnables, so we must protect the member vars
MutexAutoLock lock(mMutex);
if (aMediaSource == dom::MediaSourceEnum::AudioCapture) {
nsRefPtr<MediaEngineWebRTCAudioCaptureSource> audioCaptureSource =
new MediaEngineWebRTCAudioCaptureSource(nullptr);
aASources->AppendElement(audioCaptureSource);
return;
}
#ifdef MOZ_WIDGET_ANDROID
jobject context = mozilla::AndroidBridge::Bridge()->GetGlobalContextRef();
@ -358,7 +365,7 @@ MediaEngineWebRTC::EnumerateAudioDevices(dom::MediaSourceEnum aMediaSource,
strcpy(uniqueId,deviceName); // safe given assert and initialization/error-check
}
nsRefPtr<MediaEngineWebRTCMicrophoneSource> aSource;
nsRefPtr<MediaEngineAudioSource> aSource;
NS_ConvertUTF8toUTF16 uuid(uniqueId);
if (mAudioSources.Get(uuid, getter_AddRefs(aSource))) {
// We've already seen this device, just append.
@ -384,9 +391,8 @@ ClearVideoSource (const nsAString&, // unused
}
static PLDHashOperator
ClearAudioSource (const nsAString&, // unused
MediaEngineWebRTCAudioSource* aData,
void *userArg)
ClearAudioSource(const nsAString &, // unused
MediaEngineAudioSource *aData, void *userArg)
{
if (aData) {
aData->Shutdown();

View File

@ -133,6 +133,67 @@ private:
void GetCapability(size_t aIndex, webrtc::CaptureCapability& aOut) override;
};
class MediaEngineWebRTCAudioCaptureSource : public MediaEngineAudioSource
{
public:
NS_DECL_THREADSAFE_ISUPPORTS
explicit MediaEngineWebRTCAudioCaptureSource(const char* aUuid)
: MediaEngineAudioSource(kReleased)
{
}
void GetName(nsAString& aName) override;
void GetUUID(nsACString& aUUID) override;
nsresult Allocate(const dom::MediaTrackConstraints& aConstraints,
const MediaEnginePrefs& aPrefs,
const nsString& aDeviceId) override
{
// Nothing to do here, everything is managed in MediaManager.cpp
return NS_OK;
}
nsresult Deallocate() override
{
// Nothing to do here, everything is managed in MediaManager.cpp
return NS_OK;
}
void Shutdown() override
{
// Nothing to do here, everything is managed in MediaManager.cpp
}
nsresult Start(SourceMediaStream* aMediaStream, TrackID aId) override;
nsresult Stop(SourceMediaStream* aMediaStream, TrackID aId) override;
void SetDirectListeners(bool aDirect) override
{}
nsresult Config(bool aEchoOn, uint32_t aEcho, bool aAgcOn,
uint32_t aAGC, bool aNoiseOn, uint32_t aNoise,
int32_t aPlayoutDelay) override
{
return NS_OK;
}
void NotifyPull(MediaStreamGraph* aGraph, SourceMediaStream* aSource,
TrackID aID, StreamTime aDesiredTime) override
{}
const dom::MediaSourceEnum GetMediaSource() override
{
return dom::MediaSourceEnum::AudioCapture;
}
bool IsFake() override
{
return false;
}
nsresult TakePhoto(PhotoCallback* aCallback) override
{
return NS_ERROR_NOT_IMPLEMENTED;
}
uint32_t GetBestFitnessDistance(
const nsTArray<const dom::MediaTrackConstraintSet*>& aConstraintSets,
const nsString& aDeviceId) override;
protected:
virtual ~MediaEngineWebRTCAudioCaptureSource() { Shutdown(); }
nsCString mUUID;
};
class MediaEngineWebRTCMicrophoneSource : public MediaEngineAudioSource,
public webrtc::VoEMediaProcess,
private MediaConstraintsHelper
@ -297,8 +358,7 @@ private:
// Store devices we've already seen in a hashtable for quick return.
// Maps UUID to MediaEngineSource (one set for audio, one for video).
nsRefPtrHashtable<nsStringHashKey, MediaEngineVideoSource> mVideoSources;
nsRefPtrHashtable<nsStringHashKey, MediaEngineWebRTCMicrophoneSource>
mAudioSources;
nsRefPtrHashtable<nsStringHashKey, MediaEngineAudioSource> mAudioSources;
};
}

View File

@ -44,6 +44,7 @@ extern PRLogModuleInfo* GetMediaManagerLog();
* Webrtc microphone source source.
*/
NS_IMPL_ISUPPORTS0(MediaEngineWebRTCMicrophoneSource)
NS_IMPL_ISUPPORTS0(MediaEngineWebRTCAudioCaptureSource)
// XXX temp until MSG supports registration
StaticRefPtr<AudioOutputObserver> gFarendObserver;
@ -620,4 +621,55 @@ MediaEngineWebRTCMicrophoneSource::Process(int channel,
return;
}
void
MediaEngineWebRTCAudioCaptureSource::GetName(nsAString &aName)
{
aName.AssignLiteral("AudioCapture");
}
void
MediaEngineWebRTCAudioCaptureSource::GetUUID(nsACString &aUUID)
{
nsID uuid;
char uuidBuffer[NSID_LENGTH];
nsCString asciiString;
ErrorResult rv;
rv = nsContentUtils::GenerateUUIDInPlace(uuid);
if (rv.Failed()) {
aUUID.AssignLiteral("");
return;
}
uuid.ToProvidedString(uuidBuffer);
asciiString.AssignASCII(uuidBuffer);
// Remove {} and the null terminator
aUUID.Assign(Substring(asciiString, 1, NSID_LENGTH - 3));
}
nsresult
MediaEngineWebRTCAudioCaptureSource::Start(SourceMediaStream *aMediaStream,
TrackID aId)
{
aMediaStream->AddTrack(aId, 0, new AudioSegment());
return NS_OK;
}
nsresult
MediaEngineWebRTCAudioCaptureSource::Stop(SourceMediaStream *aMediaStream,
TrackID aId)
{
aMediaStream->EndAllTrackAndFinish();
return NS_OK;
}
uint32_t
MediaEngineWebRTCAudioCaptureSource::GetBestFitnessDistance(
const nsTArray<const dom::MediaTrackConstraintSet*>& aConstraintSets,
const nsString& aDeviceId)
{
// There is only one way of capturing audio for now, and it's always adequate.
return 0;
}
}

View File

@ -25,6 +25,7 @@ enum MediaSourceEnum {
"window",
"browser",
"microphone",
"audioCapture",
"other"
};