mirror of
https://github.com/mozilla/gecko-dev.git
synced 2025-02-27 12:50:09 +00:00
Bug 1039529: Add screen and window sharing booleans to MediaCaptureWindowState r=pkerr,gcp
This commit is contained in:
parent
cf6bdfb1db
commit
5a8e25d582
@ -40,6 +40,16 @@ enum {
|
||||
kAudioTrack = 2
|
||||
};
|
||||
|
||||
// includes everything from dom::MediaSourceEnum (really video sources), plus audio sources
|
||||
enum MediaSourceType {
|
||||
Camera = (int) dom::MediaSourceEnum::Camera,
|
||||
Screen = (int) dom::MediaSourceEnum::Screen,
|
||||
Application = (int) dom::MediaSourceEnum::Application,
|
||||
Window, // = (int) dom::MediaSourceEnum::Window, // XXX bug 1038926
|
||||
//Browser = (int) dom::MediaSourceEnum::Browser, // proposed in WG, unclear if it's useful
|
||||
Microphone
|
||||
};
|
||||
|
||||
class MediaEngine
|
||||
{
|
||||
public:
|
||||
@ -55,12 +65,12 @@ public:
|
||||
|
||||
/* Populate an array of video sources in the nsTArray. Also include devices
|
||||
* that are currently unavailable. */
|
||||
virtual void EnumerateVideoDevices(dom::MediaSourceEnum,
|
||||
virtual void EnumerateVideoDevices(MediaSourceType,
|
||||
nsTArray<nsRefPtr<MediaEngineVideoSource> >*) = 0;
|
||||
|
||||
/* Populate an array of audio sources in the nsTArray. Also include devices
|
||||
* that are currently unavailable. */
|
||||
virtual void EnumerateAudioDevices(dom::MediaSourceEnum,
|
||||
virtual void EnumerateAudioDevices(MediaSourceType,
|
||||
nsTArray<nsRefPtr<MediaEngineAudioSource> >*) = 0;
|
||||
|
||||
protected:
|
||||
@ -119,6 +129,9 @@ public:
|
||||
*/
|
||||
virtual bool IsFake() = 0;
|
||||
|
||||
/* Returns the type of media source (camera, microphone, screen, window, etc) */
|
||||
virtual const MediaSourceType GetMediaSource() = 0;
|
||||
|
||||
/* Return false if device is currently allocated or started */
|
||||
bool IsAvailable() {
|
||||
if (mState == kAllocated || mState == kStarted) {
|
||||
@ -185,8 +198,8 @@ class MediaEngineVideoSource : public MediaEngineSource
|
||||
public:
|
||||
virtual ~MediaEngineVideoSource() {}
|
||||
|
||||
virtual const dom::MediaSourceEnum GetMediaSource() {
|
||||
return dom::MediaSourceEnum::Camera;
|
||||
virtual const MediaSourceType GetMediaSource() {
|
||||
return MediaSourceType::Camera;
|
||||
}
|
||||
/* This call reserves but does not start the device. */
|
||||
virtual nsresult Allocate(const VideoTrackConstraintsN &aConstraints,
|
||||
|
@ -478,12 +478,12 @@ MediaEngineDefaultAudioSource::Notify(nsITimer* aTimer)
|
||||
}
|
||||
|
||||
void
|
||||
MediaEngineDefault::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
|
||||
MediaEngineDefault::EnumerateVideoDevices(MediaSourceType aMediaSource,
|
||||
nsTArray<nsRefPtr<MediaEngineVideoSource> >* aVSources) {
|
||||
MutexAutoLock lock(mMutex);
|
||||
|
||||
// only supports camera sources (for now). See Bug 1038241
|
||||
if (aMediaSource != dom::MediaSourceEnum::Camera) {
|
||||
if (aMediaSource != MediaSourceType::Camera) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -498,7 +498,7 @@ MediaEngineDefault::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
|
||||
}
|
||||
|
||||
void
|
||||
MediaEngineDefault::EnumerateAudioDevices(dom::MediaSourceEnum aMediaSource,
|
||||
MediaEngineDefault::EnumerateAudioDevices(MediaSourceType aMediaSource,
|
||||
nsTArray<nsRefPtr<MediaEngineAudioSource> >* aASources) {
|
||||
MutexAutoLock lock(mMutex);
|
||||
int32_t len = mASources.Length();
|
||||
|
@ -61,6 +61,10 @@ public:
|
||||
return true;
|
||||
}
|
||||
|
||||
virtual const MediaSourceType GetMediaSource() {
|
||||
return MediaSourceType::Camera;
|
||||
}
|
||||
|
||||
NS_DECL_THREADSAFE_ISUPPORTS
|
||||
NS_DECL_NSITIMERCALLBACK
|
||||
|
||||
@ -117,6 +121,10 @@ public:
|
||||
return true;
|
||||
}
|
||||
|
||||
virtual const MediaSourceType GetMediaSource() {
|
||||
return MediaSourceType::Microphone;
|
||||
}
|
||||
|
||||
NS_DECL_THREADSAFE_ISUPPORTS
|
||||
NS_DECL_NSITIMERCALLBACK
|
||||
|
||||
@ -138,9 +146,9 @@ public:
|
||||
: mMutex("mozilla::MediaEngineDefault")
|
||||
{}
|
||||
|
||||
virtual void EnumerateVideoDevices(dom::MediaSourceEnum,
|
||||
virtual void EnumerateVideoDevices(MediaSourceType,
|
||||
nsTArray<nsRefPtr<MediaEngineVideoSource> >*);
|
||||
virtual void EnumerateAudioDevices(dom::MediaSourceEnum,
|
||||
virtual void EnumerateAudioDevices(MediaSourceType,
|
||||
nsTArray<nsRefPtr<MediaEngineAudioSource> >*);
|
||||
|
||||
private:
|
||||
|
@ -73,14 +73,14 @@ MediaEngineWebRTC::MediaEngineWebRTC(MediaEnginePrefs &aPrefs)
|
||||
}
|
||||
|
||||
void
|
||||
MediaEngineWebRTC::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
|
||||
MediaEngineWebRTC::EnumerateVideoDevices(MediaSourceType aMediaSource,
|
||||
nsTArray<nsRefPtr<MediaEngineVideoSource> >* aVSources)
|
||||
{
|
||||
// We spawn threads to handle gUM runnables, so we must protect the member vars
|
||||
MutexAutoLock lock(mMutex);
|
||||
|
||||
#ifdef MOZ_B2G_CAMERA
|
||||
if (aMediaSource != dom::MediaSourceEnum::Camera) {
|
||||
if (aMediaSource != MediaSourceType::Camera) {
|
||||
// only supports camera sources
|
||||
return;
|
||||
}
|
||||
@ -138,7 +138,7 @@ MediaEngineWebRTC::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
|
||||
#endif
|
||||
|
||||
switch (aMediaSource) {
|
||||
case dom::MediaSourceEnum::Window:
|
||||
case MediaSourceType::Window:
|
||||
mWinEngineConfig.Set<webrtc::CaptureDeviceInfo>(
|
||||
new webrtc::CaptureDeviceInfo(webrtc::CaptureDeviceType::Window));
|
||||
if (!mWinEngine) {
|
||||
@ -149,7 +149,7 @@ MediaEngineWebRTC::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
|
||||
videoEngine = mWinEngine;
|
||||
videoEngineInit = &mWinEngineInit;
|
||||
break;
|
||||
case dom::MediaSourceEnum::Application:
|
||||
case MediaSourceType::Application:
|
||||
mAppEngineConfig.Set<webrtc::CaptureDeviceInfo>(
|
||||
new webrtc::CaptureDeviceInfo(webrtc::CaptureDeviceType::Application));
|
||||
if (!mAppEngine) {
|
||||
@ -160,7 +160,7 @@ MediaEngineWebRTC::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
|
||||
videoEngine = mAppEngine;
|
||||
videoEngineInit = &mAppEngineInit;
|
||||
break;
|
||||
case dom::MediaSourceEnum::Screen:
|
||||
case MediaSourceType::Screen:
|
||||
mScreenEngineConfig.Set<webrtc::CaptureDeviceInfo>(
|
||||
new webrtc::CaptureDeviceInfo(webrtc::CaptureDeviceType::Screen));
|
||||
if (!mScreenEngine) {
|
||||
@ -171,7 +171,7 @@ MediaEngineWebRTC::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
|
||||
videoEngine = mScreenEngine;
|
||||
videoEngineInit = &mScreenEngineInit;
|
||||
break;
|
||||
case dom::MediaSourceEnum::Camera:
|
||||
case MediaSourceType::Camera:
|
||||
// fall through
|
||||
default:
|
||||
if (!mVideoEngine) {
|
||||
@ -271,7 +271,7 @@ MediaEngineWebRTC::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
|
||||
}
|
||||
|
||||
void
|
||||
MediaEngineWebRTC::EnumerateAudioDevices(dom::MediaSourceEnum aMediaSource,
|
||||
MediaEngineWebRTC::EnumerateAudioDevices(MediaSourceType aMediaSource,
|
||||
nsTArray<nsRefPtr<MediaEngineAudioSource> >* aASources)
|
||||
{
|
||||
ScopedCustomReleasePtr<webrtc::VoEBase> ptrVoEBase;
|
||||
|
@ -96,7 +96,7 @@ class MediaEngineWebRTCVideoSource : public MediaEngineVideoSource
|
||||
public:
|
||||
#ifdef MOZ_B2G_CAMERA
|
||||
MediaEngineWebRTCVideoSource(int aIndex,
|
||||
dom::MediaSourceEnum aMediaSource = dom::MediaSourceEnum::Camera)
|
||||
MediaSourceType aMediaSource = MediaSourceType::Camera)
|
||||
: mCameraControl(nullptr)
|
||||
, mCallbackMonitor("WebRTCCamera.CallbackMonitor")
|
||||
, mRotation(0)
|
||||
@ -127,7 +127,7 @@ public:
|
||||
virtual bool IsTextureSupported() { return false; }
|
||||
|
||||
MediaEngineWebRTCVideoSource(webrtc::VideoEngine* aVideoEnginePtr, int aIndex,
|
||||
dom::MediaSourceEnum aMediaSource = dom::MediaSourceEnum::Camera)
|
||||
MediaSourceType aMediaSource = MediaSourceType::Camera)
|
||||
: mVideoEngine(aVideoEnginePtr)
|
||||
, mCaptureIndex(aIndex)
|
||||
, mFps(-1)
|
||||
@ -169,7 +169,7 @@ public:
|
||||
return false;
|
||||
}
|
||||
|
||||
virtual const dom::MediaSourceEnum GetMediaSource() {
|
||||
virtual const MediaSourceType GetMediaSource() {
|
||||
return mMediaSource;
|
||||
}
|
||||
|
||||
@ -247,7 +247,7 @@ private:
|
||||
int mCaptureIndex;
|
||||
int mFps; // Track rate (30 fps by default)
|
||||
int mMinFps; // Min rate we want to accept
|
||||
dom::MediaSourceEnum mMediaSource; // source of media (camera | application | screen)
|
||||
MediaSourceType mMediaSource; // source of media (camera | application | screen)
|
||||
|
||||
// mMonitor protects mImage access/changes, and transitions of mState
|
||||
// from kStarted to kStopped (which are combined with EndTrack() and
|
||||
@ -327,6 +327,10 @@ public:
|
||||
return false;
|
||||
}
|
||||
|
||||
virtual const MediaSourceType GetMediaSource() {
|
||||
return MediaSourceType::Microphone;
|
||||
}
|
||||
|
||||
// VoEMediaProcess.
|
||||
void Process(int channel, webrtc::ProcessingTypes type,
|
||||
int16_t audio10ms[], int length,
|
||||
@ -390,9 +394,9 @@ public:
|
||||
// before invoking Shutdown on this class.
|
||||
void Shutdown();
|
||||
|
||||
virtual void EnumerateVideoDevices(dom::MediaSourceEnum,
|
||||
virtual void EnumerateVideoDevices(MediaSourceType,
|
||||
nsTArray<nsRefPtr<MediaEngineVideoSource> >*);
|
||||
virtual void EnumerateAudioDevices(dom::MediaSourceEnum,
|
||||
virtual void EnumerateAudioDevices(MediaSourceType,
|
||||
nsTArray<nsRefPtr<MediaEngineAudioSource> >*);
|
||||
private:
|
||||
~MediaEngineWebRTC() {
|
||||
|
@ -46,6 +46,8 @@ public:
|
||||
// treat MediaSource special because it's always required
|
||||
mRequired.mMediaSource = mMediaSource;
|
||||
|
||||
// we guarantee (int) equivalence from MediaSourceEnum ->MediaSourceType
|
||||
// (but not the other way)
|
||||
if (mMediaSource != dom::MediaSourceEnum::Camera && mAdvanced.WasPassed()) {
|
||||
// iterate through advanced, forcing mediaSource to match "root"
|
||||
auto& array = mAdvanced.Value();
|
||||
|
@ -311,7 +311,6 @@ VideoDevice::VideoDevice(MediaEngineVideoSource* aSource)
|
||||
mFacingMode = dom::VideoFacingModeEnum::User;
|
||||
}
|
||||
|
||||
// dom::MediaSourceEnum::Camera;
|
||||
mMediaSource = aSource->GetMediaSource();
|
||||
}
|
||||
|
||||
@ -367,9 +366,14 @@ MediaDevice::GetFacingMode(nsAString& aFacingMode)
|
||||
NS_IMETHODIMP
|
||||
MediaDevice::GetMediaSource(nsAString& aMediaSource)
|
||||
{
|
||||
|
||||
aMediaSource.Assign(NS_ConvertUTF8toUTF16(
|
||||
dom::MediaSourceEnumValues::strings[uint32_t(mMediaSource)].value));
|
||||
if (mMediaSource == MediaSourceType::Microphone) {
|
||||
aMediaSource.Assign(NS_LITERAL_STRING("microphone"));
|
||||
} else if (mMediaSource == MediaSourceType::Window) { // this will go away
|
||||
aMediaSource.Assign(NS_LITERAL_STRING("window"));
|
||||
} else { // all the rest are shared
|
||||
aMediaSource.Assign(NS_ConvertUTF8toUTF16(
|
||||
dom::MediaSourceEnumValues::strings[uint32_t(mMediaSource)].value));
|
||||
}
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
@ -759,7 +763,7 @@ template<class SourceType, class ConstraintsType>
|
||||
static SourceSet *
|
||||
GetSources(MediaEngine *engine,
|
||||
ConstraintsType &aConstraints,
|
||||
void (MediaEngine::* aEnumerate)(dom::MediaSourceEnum, nsTArray<nsRefPtr<SourceType> >*),
|
||||
void (MediaEngine::* aEnumerate)(MediaSourceType, nsTArray<nsRefPtr<SourceType> >*),
|
||||
const char* media_device_name = nullptr)
|
||||
{
|
||||
ScopedDeletePtr<SourceSet> result(new SourceSet);
|
||||
@ -770,7 +774,8 @@ static SourceSet *
|
||||
SourceSet candidateSet;
|
||||
{
|
||||
nsTArray<nsRefPtr<SourceType> > sources;
|
||||
(engine->*aEnumerate)(aConstraints.mMediaSource, &sources);
|
||||
// all MediaSourceEnums are contained in MediaSourceType
|
||||
(engine->*aEnumerate)((MediaSourceType)((int)aConstraints.mMediaSource), &sources);
|
||||
/**
|
||||
* We're allowing multiple tabs to access the same camera for parity
|
||||
* with Chrome. See bug 811757 for some of the issues surrounding
|
||||
@ -1914,7 +1919,8 @@ WindowsHashToArrayFunc (const uint64_t& aId,
|
||||
for (uint32_t i = 0; i < length; ++i) {
|
||||
nsRefPtr<GetUserMediaCallbackMediaStreamListener> listener =
|
||||
aData->ElementAt(i);
|
||||
if (listener->CapturingVideo() || listener->CapturingAudio()) {
|
||||
if (listener->CapturingVideo() || listener->CapturingAudio() ||
|
||||
listener->CapturingScreen() || listener->CapturingWindow()) {
|
||||
capturing = true;
|
||||
break;
|
||||
}
|
||||
@ -1945,24 +1951,29 @@ MediaManager::GetActiveMediaCaptureWindows(nsISupportsArray **aArray)
|
||||
|
||||
NS_IMETHODIMP
|
||||
MediaManager::MediaCaptureWindowState(nsIDOMWindow* aWindow, bool* aVideo,
|
||||
bool* aAudio)
|
||||
bool* aAudio, bool *aScreenShare,
|
||||
bool* aWindowShare)
|
||||
{
|
||||
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
|
||||
*aVideo = false;
|
||||
*aAudio = false;
|
||||
*aScreenShare = false;
|
||||
*aWindowShare = false;
|
||||
|
||||
nsresult rv = MediaCaptureWindowStateInternal(aWindow, aVideo, aAudio);
|
||||
nsresult rv = MediaCaptureWindowStateInternal(aWindow, aVideo, aAudio, aScreenShare, aWindowShare);
|
||||
#ifdef DEBUG
|
||||
nsCOMPtr<nsPIDOMWindow> piWin = do_QueryInterface(aWindow);
|
||||
LOG(("%s: window %lld capturing %s %s", __FUNCTION__, piWin ? piWin->WindowID() : -1,
|
||||
*aVideo ? "video" : "", *aAudio ? "audio" : ""));
|
||||
LOG(("%s: window %lld capturing %s %s %s %s", __FUNCTION__, piWin ? piWin->WindowID() : -1,
|
||||
*aVideo ? "video" : "", *aAudio ? "audio" : "",
|
||||
*aScreenShare ? "screenshare" : "", *aWindowShare ? "windowshare" : ""));
|
||||
#endif
|
||||
return rv;
|
||||
}
|
||||
|
||||
nsresult
|
||||
MediaManager::MediaCaptureWindowStateInternal(nsIDOMWindow* aWindow, bool* aVideo,
|
||||
bool* aAudio)
|
||||
bool* aAudio, bool *aScreenShare,
|
||||
bool* aWindowShare)
|
||||
{
|
||||
// We need to return the union of all streams in all innerwindows that
|
||||
// correspond to that outerwindow.
|
||||
@ -1991,8 +2002,11 @@ MediaManager::MediaCaptureWindowStateInternal(nsIDOMWindow* aWindow, bool* aVide
|
||||
if (listener->CapturingAudio()) {
|
||||
*aAudio = true;
|
||||
}
|
||||
if (*aAudio && *aVideo) {
|
||||
return NS_OK; // no need to continue iterating
|
||||
if (listener->CapturingScreen()) {
|
||||
*aScreenShare = true;
|
||||
}
|
||||
if (listener->CapturingWindow()) {
|
||||
*aWindowShare = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2008,10 +2022,7 @@ MediaManager::MediaCaptureWindowStateInternal(nsIDOMWindow* aWindow, bool* aVide
|
||||
docShell->GetChildAt(i, getter_AddRefs(item));
|
||||
nsCOMPtr<nsPIDOMWindow> win = item ? item->GetWindow() : nullptr;
|
||||
|
||||
MediaCaptureWindowStateInternal(win, aVideo, aAudio);
|
||||
if (*aAudio && *aVideo) {
|
||||
return NS_OK; // no need to continue iterating
|
||||
}
|
||||
MediaCaptureWindowStateInternal(win, aVideo, aAudio, aScreenShare, aWindowShare);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -105,6 +105,7 @@ public:
|
||||
{
|
||||
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
|
||||
return mVideoSource && !mStopped &&
|
||||
mVideoSource->GetMediaSource() == MediaSourceType::Camera &&
|
||||
(!mVideoSource->IsFake() ||
|
||||
Preferences::GetBool("media.navigator.permission.fake"));
|
||||
}
|
||||
@ -115,6 +116,18 @@ public:
|
||||
(!mAudioSource->IsFake() ||
|
||||
Preferences::GetBool("media.navigator.permission.fake"));
|
||||
}
|
||||
bool CapturingScreen()
|
||||
{
|
||||
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
|
||||
return mVideoSource && !mStopped &&
|
||||
mVideoSource->GetMediaSource() == MediaSourceType::Screen;
|
||||
}
|
||||
bool CapturingWindow()
|
||||
{
|
||||
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
|
||||
return mVideoSource && !mStopped &&
|
||||
mVideoSource->GetMediaSource() == MediaSourceType::Window;
|
||||
}
|
||||
|
||||
void SetStopped()
|
||||
{
|
||||
@ -486,7 +499,7 @@ protected:
|
||||
nsString mID;
|
||||
bool mHasFacingMode;
|
||||
dom::VideoFacingModeEnum mFacingMode;
|
||||
dom::MediaSourceEnum mMediaSource;
|
||||
MediaSourceType mMediaSource;
|
||||
nsRefPtr<MediaEngineSource> mSource;
|
||||
};
|
||||
|
||||
@ -579,7 +592,8 @@ private:
|
||||
~MediaManager() {}
|
||||
|
||||
nsresult MediaCaptureWindowStateInternal(nsIDOMWindow* aWindow, bool* aVideo,
|
||||
bool* aAudio);
|
||||
bool* aAudio, bool *aScreenShare,
|
||||
bool* aWindowShare);
|
||||
|
||||
void StopMediaStreams();
|
||||
|
||||
|
@ -12,12 +12,13 @@ interface nsIDOMWindow;
|
||||
#define MEDIAMANAGERSERVICE_CONTRACTID "@mozilla.org/mediaManagerService;1"
|
||||
%}
|
||||
|
||||
[scriptable, builtinclass, uuid(2efff6ab-0e3e-4cc4-8f9b-4aaca59a1140)]
|
||||
[scriptable, builtinclass, uuid(f431b523-4536-4ba7-a2c1-7e1bf670d32a)]
|
||||
interface nsIMediaManagerService : nsISupports
|
||||
{
|
||||
/* return a array of inner windows that have active captures */
|
||||
readonly attribute nsISupportsArray activeMediaCaptureWindows;
|
||||
|
||||
/* Get the capture state for the given window and all descendant windows (iframes, etc) */
|
||||
void mediaCaptureWindowState(in nsIDOMWindow aWindow, out boolean aVideo, out boolean aAudio);
|
||||
void mediaCaptureWindowState(in nsIDOMWindow aWindow, out boolean aVideo, out boolean aAudio,
|
||||
[optional] out boolean aScreenShare, [optional] out boolean aWindowShare);
|
||||
};
|
||||
|
Loading…
x
Reference in New Issue
Block a user