Bug 1104616 - Proxy video capture access to the main process. r=jesup,glandium,mrbkap

This commit is contained in:
Gian-Carlo Pascutto 2015-08-31 14:51:29 +02:00
parent db2d2576d7
commit 19500b256d
24 changed files with 2929 additions and 684 deletions

View File

@ -0,0 +1,741 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set sw=2 ts=8 et ft=cpp : */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "CamerasChild.h"
#include "CamerasUtils.h"
#include "webrtc/video_engine/include/vie_capture.h"
#undef FF
#include "mozilla/Assertions.h"
#include "mozilla/ipc/BackgroundChild.h"
#include "mozilla/ipc/PBackgroundChild.h"
#include "mozilla/Logging.h"
#include "mozilla/SyncRunnable.h"
#include "mozilla/WeakPtr.h"
#include "mozilla/unused.h"
#include "MediaUtils.h"
#include "nsThreadUtils.h"
#undef LOG
#undef LOG_ENABLED
PRLogModuleInfo *gCamerasChildLog;
#define LOG(args) MOZ_LOG(gCamerasChildLog, mozilla::LogLevel::Debug, args)
#define LOG_ENABLED() MOZ_LOG_TEST(gCamerasChildLog, mozilla::LogLevel::Debug)
namespace mozilla {
namespace camera {
// We emulate the sync webrtc.org API with the help of singleton
// CamerasSingleton, which manages a pointer to an IPC object, a thread
// where IPC operations should run on, and a mutex.
// The static function Cameras() will use that Singleton to set up,
// if needed, both the thread and the associated IPC objects and return
// a pointer to the IPC object. Users can then do IPC calls on that object
// after dispatching them to aforementioned thread.
// 2 Threads are involved in this code:
// - the MediaManager thread, which will call the (static, sync API) functions
// through MediaEngineRemoteVideoSource
// - the Cameras IPC thread, which will be doing our IPC to the parent process
// via PBackground
// Our main complication is that we emulate a sync API while (having to do)
// async messaging. We dispatch the messages to another thread to send them
// async and hold a Monitor to wait for the result to be asynchronously received
// again. The requirement for async messaging originates on the parent side:
// it's not reasonable to block all PBackground IPC there while waiting for
// something like device enumeration to complete.
class CamerasSingleton {
public:
CamerasSingleton()
: mCamerasMutex("CamerasSingleton::mCamerasMutex"),
mCameras(nullptr),
mCamerasChildThread(nullptr) {
if (!gCamerasChildLog) {
gCamerasChildLog = PR_NewLogModule("CamerasChild");
}
LOG(("CamerasSingleton: %p", this));
}
~CamerasSingleton() {
LOG(("~CamerasSingleton: %p", this));
}
static CamerasSingleton& GetInstance() {
static CamerasSingleton instance;
return instance;
}
static OffTheBooksMutex& Mutex() {
return GetInstance().mCamerasMutex;
}
static CamerasChild*& Child() {
GetInstance().Mutex().AssertCurrentThreadOwns();
return GetInstance().mCameras;
}
static nsCOMPtr<nsIThread>& Thread() {
GetInstance().Mutex().AssertCurrentThreadOwns();
return GetInstance().mCamerasChildThread;
}
private:
// Reinitializing CamerasChild will change the pointers below.
// We don't want this to happen in the middle of preparing IPC.
// We will be alive on destruction, so this needs to be off the books.
mozilla::OffTheBooksMutex mCamerasMutex;
// This is owned by the IPC code, and the same code controls the lifetime.
// It will set and clear this pointer as appropriate in setup/teardown.
// We'd normally make this a WeakPtr but unfortunately the IPC code already
// uses the WeakPtr mixin in a protected base class of CamerasChild, and in
// any case the object becomes unusable as soon as IPC is tearing down, which
// will be before actual destruction.
CamerasChild* mCameras;
nsCOMPtr<nsIThread> mCamerasChildThread;
};
class InitializeIPCThread : public nsRunnable
{
public:
InitializeIPCThread()
: mCamerasChild(nullptr) {}
NS_IMETHOD Run() override {
// Try to get the PBackground handle
ipc::PBackgroundChild* existingBackgroundChild =
ipc::BackgroundChild::GetForCurrentThread();
// If it's not spun up yet, block until it is, and retry
if (!existingBackgroundChild) {
LOG(("No existingBackgroundChild"));
SynchronouslyCreatePBackground();
existingBackgroundChild =
ipc::BackgroundChild::GetForCurrentThread();
LOG(("BackgroundChild: %p", existingBackgroundChild));
}
// By now PBackground is guaranteed to be up
MOZ_RELEASE_ASSERT(existingBackgroundChild);
// Create CamerasChild
// We will be returning the resulting pointer (synchronously) to our caller.
mCamerasChild =
static_cast<mozilla::camera::CamerasChild*>(existingBackgroundChild->SendPCamerasConstructor());
return NS_OK;
}
CamerasChild* GetCamerasChild() {
MOZ_ASSERT(mCamerasChild);
return mCamerasChild;
}
private:
CamerasChild* mCamerasChild;
};
static CamerasChild*
Cameras() {
OffTheBooksMutexAutoLock lock(CamerasSingleton::Mutex());
if (!CamerasSingleton::Child()) {
MOZ_ASSERT(!NS_IsMainThread(), "Should not be on the main Thread");
if (!gCamerasChildLog) {
gCamerasChildLog = PR_NewLogModule("CamerasChild");
}
MOZ_ASSERT(!CamerasSingleton::Thread());
LOG(("No sCameras, setting up IPC Thread"));
nsresult rv = NS_NewNamedThread("Cameras IPC",
getter_AddRefs(CamerasSingleton::Thread()));
if (NS_FAILED(rv)) {
LOG(("Error launching IPC Thread"));
return nullptr;
}
// At this point we are in the MediaManager thread, and the thread we are
// dispatching to is the specific Cameras IPC thread that was just made
// above, so now we will fire off a runnable to run
// SynchronouslyCreatePBackground there, while we block in this thread.
// We block until the following happens in the Cameras IPC thread:
// 1) Creation of PBackground finishes
// 2) Creation of PCameras finishes by sending a message to the parent
nsRefPtr<InitializeIPCThread> runnable = new InitializeIPCThread();
nsRefPtr<SyncRunnable> sr = new SyncRunnable(runnable);
sr->DispatchToThread(CamerasSingleton::Thread());
CamerasSingleton::Child() = runnable->GetCamerasChild();
}
MOZ_ASSERT(CamerasSingleton::Child());
return CamerasSingleton::Child();
}
bool
CamerasChild::RecvReplyFailure(void)
{
LOG((__PRETTY_FUNCTION__));
MonitorAutoLock monitor(mReplyMonitor);
mReceivedReply = true;
mReplySuccess = false;
monitor.Notify();
return true;
}
bool
CamerasChild::RecvReplySuccess(void)
{
LOG((__PRETTY_FUNCTION__));
MonitorAutoLock monitor(mReplyMonitor);
mReceivedReply = true;
mReplySuccess = true;
monitor.Notify();
return true;
}
int NumberOfCapabilities(CaptureEngine aCapEngine, const char* deviceUniqueIdUTF8)
{
return Cameras()->NumberOfCapabilities(aCapEngine, deviceUniqueIdUTF8);
}
bool
CamerasChild::RecvReplyNumberOfCapabilities(const int& numdev)
{
LOG((__PRETTY_FUNCTION__));
MonitorAutoLock monitor(mReplyMonitor);
mReceivedReply = true;
mReplySuccess = true;
mReplyInteger = numdev;
monitor.Notify();
return true;
}
bool
CamerasChild::DispatchToParent(nsIRunnable* aRunnable,
MonitorAutoLock& aMonitor)
{
{
OffTheBooksMutexAutoLock lock(CamerasSingleton::Mutex());
CamerasSingleton::Thread()->Dispatch(aRunnable, NS_DISPATCH_NORMAL);
}
// We can't see if the send worked, so we need to be able to bail
// out on shutdown (when it failed and we won't get a reply).
if (!mIPCIsAlive) {
return false;
}
// Guard against spurious wakeups.
mReceivedReply = false;
// Wait for a reply
do {
aMonitor.Wait();
} while (!mReceivedReply && mIPCIsAlive);
if (!mReplySuccess) {
return false;
}
return true;
}
int
CamerasChild::NumberOfCapabilities(CaptureEngine aCapEngine,
const char* deviceUniqueIdUTF8)
{
// Prevents multiple outstanding requests from happening.
MutexAutoLock requestLock(mRequestMutex);
LOG((__PRETTY_FUNCTION__));
LOG(("NumberOfCapabilities for %s", deviceUniqueIdUTF8));
nsCString unique_id(deviceUniqueIdUTF8);
nsCOMPtr<nsIRunnable> runnable =
media::NewRunnableFrom([this, aCapEngine, unique_id]() -> nsresult {
if (this->SendNumberOfCapabilities(aCapEngine, unique_id)) {
return NS_OK;
}
return NS_ERROR_FAILURE;
});
// Prevent concurrent use of the reply variables. Note
// that this is unlocked while waiting for the reply to be
// filled in, necessitating the first Mutex above.
MonitorAutoLock monitor(mReplyMonitor);
if (!DispatchToParent(runnable, monitor)) {
LOG(("Get capture capability count failed"));
return 0;
}
LOG(("Capture capability count: %d", mReplyInteger));
return mReplyInteger;
}
int NumberOfCaptureDevices(CaptureEngine aCapEngine)
{
return Cameras()->NumberOfCaptureDevices(aCapEngine);
}
int
CamerasChild::NumberOfCaptureDevices(CaptureEngine aCapEngine)
{
MutexAutoLock requestLock(mRequestMutex);
LOG((__PRETTY_FUNCTION__));
nsCOMPtr<nsIRunnable> runnable =
media::NewRunnableFrom([this, aCapEngine]() -> nsresult {
if (this->SendNumberOfCaptureDevices(aCapEngine)) {
return NS_OK;
}
return NS_ERROR_FAILURE;
});
MonitorAutoLock monitor(mReplyMonitor);
if (!DispatchToParent(runnable, monitor)) {
LOG(("Get NumberOfCaptureDevices failed"));
return 0;
}
LOG(("Capture Devices: %d", mReplyInteger));
return mReplyInteger;
}
bool
CamerasChild::RecvReplyNumberOfCaptureDevices(const int& numdev)
{
LOG((__PRETTY_FUNCTION__));
MonitorAutoLock monitor(mReplyMonitor);
mReceivedReply = true;
mReplySuccess = true;
mReplyInteger = numdev;
monitor.Notify();
return true;
}
int GetCaptureCapability(CaptureEngine aCapEngine, const char* unique_idUTF8,
const unsigned int capability_number,
webrtc::CaptureCapability& capability)
{
return Cameras()->GetCaptureCapability(aCapEngine,
unique_idUTF8,
capability_number,
capability);
}
int
CamerasChild::GetCaptureCapability(CaptureEngine aCapEngine,
const char* unique_idUTF8,
const unsigned int capability_number,
webrtc::CaptureCapability& capability)
{
MutexAutoLock requestLock(mRequestMutex);
LOG(("GetCaptureCapability: %s %d", unique_idUTF8, capability_number));
nsCString unique_id(unique_idUTF8);
nsCOMPtr<nsIRunnable> runnable =
media::NewRunnableFrom([this, aCapEngine, unique_id, capability_number]() -> nsresult {
if (this->SendGetCaptureCapability(aCapEngine, unique_id, capability_number)) {
return NS_OK;
}
return NS_ERROR_FAILURE;
});
MonitorAutoLock monitor(mReplyMonitor);
if (!DispatchToParent(runnable, monitor)) {
return -1;
}
capability = mReplyCapability;
return 0;
}
bool
CamerasChild::RecvReplyGetCaptureCapability(const CaptureCapability& ipcCapability)
{
LOG((__PRETTY_FUNCTION__));
MonitorAutoLock monitor(mReplyMonitor);
mReceivedReply = true;
mReplySuccess = true;
mReplyCapability.width = ipcCapability.width();
mReplyCapability.height = ipcCapability.height();
mReplyCapability.maxFPS = ipcCapability.maxFPS();
mReplyCapability.expectedCaptureDelay = ipcCapability.expectedCaptureDelay();
mReplyCapability.rawType = static_cast<webrtc::RawVideoType>(ipcCapability.rawType());
mReplyCapability.codecType = static_cast<webrtc::VideoCodecType>(ipcCapability.codecType());
mReplyCapability.interlaced = ipcCapability.interlaced();
monitor.Notify();
return true;
}
int GetCaptureDevice(CaptureEngine aCapEngine,
unsigned int list_number, char* device_nameUTF8,
const unsigned int device_nameUTF8Length,
char* unique_idUTF8,
const unsigned int unique_idUTF8Length)
{
return Cameras()->GetCaptureDevice(aCapEngine,
list_number,
device_nameUTF8,
device_nameUTF8Length,
unique_idUTF8,
unique_idUTF8Length);
}
int
CamerasChild::GetCaptureDevice(CaptureEngine aCapEngine,
unsigned int list_number, char* device_nameUTF8,
const unsigned int device_nameUTF8Length,
char* unique_idUTF8,
const unsigned int unique_idUTF8Length)
{
MutexAutoLock requestLock(mRequestMutex);
LOG((__PRETTY_FUNCTION__));
nsCOMPtr<nsIRunnable> runnable =
media::NewRunnableFrom([this, aCapEngine, list_number]() -> nsresult {
if (this->SendGetCaptureDevice(aCapEngine, list_number)) {
return NS_OK;
}
return NS_ERROR_FAILURE;
});
MonitorAutoLock monitor(mReplyMonitor);
if (!DispatchToParent(runnable, monitor)) {
LOG(("GetCaptureDevice failed"));
return -1;
}
base::strlcpy(device_nameUTF8, mReplyDeviceName.get(), device_nameUTF8Length);
base::strlcpy(unique_idUTF8, mReplyDeviceID.get(), unique_idUTF8Length);
LOG(("Got %s name %s id", device_nameUTF8, unique_idUTF8));
return 0;
}
bool
CamerasChild::RecvReplyGetCaptureDevice(const nsCString& device_name,
const nsCString& device_id)
{
LOG((__PRETTY_FUNCTION__));
MonitorAutoLock monitor(mReplyMonitor);
mReceivedReply = true;
mReplySuccess = true;
mReplyDeviceName = device_name;
mReplyDeviceID = device_id;
monitor.Notify();
return true;
}
int AllocateCaptureDevice(CaptureEngine aCapEngine,
const char* unique_idUTF8,
const unsigned int unique_idUTF8Length,
int& capture_id)
{
return Cameras()->AllocateCaptureDevice(aCapEngine,
unique_idUTF8,
unique_idUTF8Length,
capture_id);
}
int
CamerasChild::AllocateCaptureDevice(CaptureEngine aCapEngine,
const char* unique_idUTF8,
const unsigned int unique_idUTF8Length,
int& capture_id)
{
MutexAutoLock requestLock(mRequestMutex);
LOG((__PRETTY_FUNCTION__));
nsCString unique_id(unique_idUTF8);
nsCOMPtr<nsIRunnable> runnable =
media::NewRunnableFrom([this, aCapEngine, unique_id]() -> nsresult {
if (this->SendAllocateCaptureDevice(aCapEngine, unique_id)) {
return NS_OK;
}
return NS_ERROR_FAILURE;
});
MonitorAutoLock monitor(mReplyMonitor);
if (!DispatchToParent(runnable, monitor)) {
LOG(("AllocateCaptureDevice failed"));
return -1;
}
LOG(("Capture Device allocated: %d", mReplyInteger));
capture_id = mReplyInteger;
return 0;
}
bool
CamerasChild::RecvReplyAllocateCaptureDevice(const int& numdev)
{
LOG((__PRETTY_FUNCTION__));
MonitorAutoLock monitor(mReplyMonitor);
mReceivedReply = true;
mReplySuccess = true;
mReplyInteger = numdev;
monitor.Notify();
return true;
}
int ReleaseCaptureDevice(CaptureEngine aCapEngine, const int capture_id)
{
return Cameras()->ReleaseCaptureDevice(aCapEngine, capture_id);
}
int
CamerasChild::ReleaseCaptureDevice(CaptureEngine aCapEngine,
const int capture_id)
{
MutexAutoLock requestLock(mRequestMutex);
LOG((__PRETTY_FUNCTION__));
nsCOMPtr<nsIRunnable> runnable =
media::NewRunnableFrom([this, aCapEngine, capture_id]() -> nsresult {
if (this->SendReleaseCaptureDevice(aCapEngine, capture_id)) {
return NS_OK;
}
return NS_ERROR_FAILURE;
});
MonitorAutoLock monitor(mReplyMonitor);
if (!DispatchToParent(runnable, monitor)) {
return -1;
}
return 0;
}
void
CamerasChild::AddCallback(const CaptureEngine aCapEngine, const int capture_id,
webrtc::ExternalRenderer* render)
{
MutexAutoLock lock(mCallbackMutex);
CapturerElement ce;
ce.engine = aCapEngine;
ce.id = capture_id;
ce.callback = render;
mCallbacks.AppendElement(ce);
}
void
CamerasChild::RemoveCallback(const CaptureEngine aCapEngine, const int capture_id)
{
MutexAutoLock lock(mCallbackMutex);
for (unsigned int i = 0; i < mCallbacks.Length(); i++) {
CapturerElement ce = mCallbacks[i];
if (ce.engine == aCapEngine && ce.id == capture_id) {
mCallbacks.RemoveElementAt(i);
break;
}
}
}
int StartCapture(CaptureEngine aCapEngine,
const int capture_id,
webrtc::CaptureCapability& webrtcCaps,
webrtc::ExternalRenderer* cb)
{
return Cameras()->StartCapture(aCapEngine,
capture_id,
webrtcCaps,
cb);
}
int
CamerasChild::StartCapture(CaptureEngine aCapEngine,
const int capture_id,
webrtc::CaptureCapability& webrtcCaps,
webrtc::ExternalRenderer* cb)
{
MutexAutoLock requestLock(mRequestMutex);
LOG((__PRETTY_FUNCTION__));
AddCallback(aCapEngine, capture_id, cb);
CaptureCapability capCap(webrtcCaps.width,
webrtcCaps.height,
webrtcCaps.maxFPS,
webrtcCaps.expectedCaptureDelay,
webrtcCaps.rawType,
webrtcCaps.codecType,
webrtcCaps.interlaced);
nsCOMPtr<nsIRunnable> runnable =
media::NewRunnableFrom([this, aCapEngine, capture_id, capCap]() -> nsresult {
if (this->SendStartCapture(aCapEngine, capture_id, capCap)) {
return NS_OK;
}
return NS_ERROR_FAILURE;
});
MonitorAutoLock monitor(mReplyMonitor);
if (!DispatchToParent(runnable, monitor)) {
return -1;
}
return 0;
}
int StopCapture(CaptureEngine aCapEngine, const int capture_id)
{
return Cameras()->StopCapture(aCapEngine, capture_id);
}
int
CamerasChild::StopCapture(CaptureEngine aCapEngine, const int capture_id)
{
MutexAutoLock requestLock(mRequestMutex);
LOG((__PRETTY_FUNCTION__));
nsCOMPtr<nsIRunnable> runnable =
media::NewRunnableFrom([this, aCapEngine, capture_id]() -> nsresult {
if (this->SendStopCapture(aCapEngine, capture_id)) {
return NS_OK;
}
return NS_ERROR_FAILURE;
});
MonitorAutoLock monitor(mReplyMonitor);
if (!DispatchToParent(runnable, monitor)) {
return -1;
}
RemoveCallback(aCapEngine, capture_id);
return 0;
}
void
Shutdown(void)
{
{
OffTheBooksMutexAutoLock lock(CamerasSingleton::Mutex());
if (!CamerasSingleton::Child()) {
// We don't want to cause everything to get fired up if we're
// really already shut down.
LOG(("Shutdown when already shut down"));
return;
}
}
Cameras()->Shutdown();
}
class ShutdownRunnable : public nsRunnable {
public:
ShutdownRunnable(nsRefPtr<nsRunnable> aReplyEvent,
nsIThread* aReplyThread)
: mReplyEvent(aReplyEvent), mReplyThread(aReplyThread) {};
NS_IMETHOD Run() override {
LOG(("Closing BackgroundChild"));
ipc::BackgroundChild::CloseForCurrentThread();
LOG(("PBackground thread exists, shutting down thread"));
mReplyThread->Dispatch(mReplyEvent, NS_DISPATCH_NORMAL);
return NS_OK;
}
private:
nsRefPtr<nsRunnable> mReplyEvent;
nsIThread* mReplyThread;
};
void
CamerasChild::Shutdown()
{
{
MonitorAutoLock monitor(mReplyMonitor);
mIPCIsAlive = false;
monitor.NotifyAll();
}
OffTheBooksMutexAutoLock lock(CamerasSingleton::Mutex());
if (CamerasSingleton::Thread()) {
LOG(("Dispatching actor deletion"));
// Delete the parent actor.
nsRefPtr<nsRunnable> deleteRunnable =
// CamerasChild (this) will remain alive and is only deleted by the
// IPC layer when SendAllDone returns.
media::NewRunnableFrom([this]() -> nsresult {
unused << this->SendAllDone();
return NS_OK;
});
CamerasSingleton::Thread()->Dispatch(deleteRunnable, NS_DISPATCH_NORMAL);
LOG(("PBackground thread exists, dispatching close"));
// Dispatch closing the IPC thread back to us when the
// BackgroundChild is closed.
nsRefPtr<nsRunnable> event =
new ThreadDestructor(CamerasSingleton::Thread());
nsRefPtr<ShutdownRunnable> runnable =
new ShutdownRunnable(event, NS_GetCurrentThread());
CamerasSingleton::Thread()->Dispatch(runnable, NS_DISPATCH_NORMAL);
} else {
LOG(("Shutdown called without PBackground thread"));
}
LOG(("Erasing sCameras & thread refs (original thread)"));
CamerasSingleton::Child() = nullptr;
CamerasSingleton::Thread() = nullptr;
}
bool
CamerasChild::RecvDeliverFrame(const int& capEngine,
const int& capId,
mozilla::ipc::Shmem&& shmem,
const int& size,
const uint32_t& time_stamp,
const int64_t& ntp_time,
const int64_t& render_time)
{
MutexAutoLock lock(mCallbackMutex);
CaptureEngine capEng = static_cast<CaptureEngine>(capEngine);
if (Callback(capEng, capId)) {
unsigned char* image = shmem.get<unsigned char>();
Callback(capEng, capId)->DeliverFrame(image, size,
time_stamp,
ntp_time, render_time,
nullptr);
} else {
LOG(("DeliverFrame called with dead callback"));
}
SendReleaseFrame(shmem);
return true;
}
bool
CamerasChild::RecvFrameSizeChange(const int& capEngine,
const int& capId,
const int& w, const int& h)
{
LOG((__PRETTY_FUNCTION__));
MutexAutoLock lock(mCallbackMutex);
CaptureEngine capEng = static_cast<CaptureEngine>(capEngine);
if (Callback(capEng, capId)) {
Callback(capEng, capId)->FrameSizeChange(w, h, 0);
} else {
LOG(("Frame size change with dead callback"));
}
return true;
}
void
CamerasChild::ActorDestroy(ActorDestroyReason aWhy)
{
MonitorAutoLock monitor(mReplyMonitor);
mIPCIsAlive = false;
// Hopefully prevent us from getting stuck
// on replies that'll never come.
monitor.NotifyAll();
}
CamerasChild::CamerasChild()
: mCallbackMutex("mozilla::cameras::CamerasChild::mCallbackMutex"),
mIPCIsAlive(true),
mRequestMutex("mozilla::cameras::CamerasChild::mRequestMutex"),
mReplyMonitor("mozilla::cameras::CamerasChild::mReplyMonitor")
{
if (!gCamerasChildLog) {
gCamerasChildLog = PR_NewLogModule("CamerasChild");
}
LOG(("CamerasChild: %p", this));
MOZ_COUNT_CTOR(CamerasChild);
}
CamerasChild::~CamerasChild()
{
LOG(("~CamerasChild: %p", this));
Shutdown();
MOZ_COUNT_DTOR(CamerasChild);
}
webrtc::ExternalRenderer* CamerasChild::Callback(CaptureEngine aCapEngine,
int capture_id)
{
for (unsigned int i = 0; i < mCallbacks.Length(); i++) {
CapturerElement ce = mCallbacks[i];
if (ce.engine == aCapEngine && ce.id == capture_id) {
return ce.callback;
}
}
return nullptr;
}
}
}

View File

@ -0,0 +1,175 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set sw=2 ts=8 et ft=cpp : */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef mozilla_CamerasChild_h
#define mozilla_CamerasChild_h
#include "mozilla/Pair.h"
#include "mozilla/dom/ContentChild.h"
#include "mozilla/camera/PCamerasChild.h"
#include "mozilla/camera/PCamerasParent.h"
#include "mozilla/Mutex.h"
#include "nsCOMPtr.h"
// conflicts with #include of scoped_ptr.h
#undef FF
#include "webrtc/common.h"
// Video Engine
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_render.h"
namespace mozilla {
namespace ipc {
class BackgroundChildImpl;
}
namespace camera {
enum CaptureEngine : int {
InvalidEngine = 0,
ScreenEngine,
BrowserEngine,
WinEngine,
AppEngine,
CameraEngine,
MaxEngine
};
struct CapturerElement {
CaptureEngine engine;
int id;
webrtc::ExternalRenderer* callback;
};
// statically mirror webrtc.org ViECapture API
// these are called via MediaManager->MediaEngineRemoteVideoSource
// on the MediaManager thread
int NumberOfCapabilities(CaptureEngine aCapEngine,
const char* deviceUniqueIdUTF8);
int GetCaptureCapability(CaptureEngine aCapEngine,
const char* unique_idUTF8,
const unsigned int capability_number,
webrtc::CaptureCapability& capability);
int NumberOfCaptureDevices(CaptureEngine aCapEngine);
int GetCaptureDevice(CaptureEngine aCapEngine,
unsigned int list_number, char* device_nameUTF8,
const unsigned int device_nameUTF8Length,
char* unique_idUTF8,
const unsigned int unique_idUTF8Length);
int AllocateCaptureDevice(CaptureEngine aCapEngine,
const char* unique_idUTF8,
const unsigned int unique_idUTF8Length,
int& capture_id);
int ReleaseCaptureDevice(CaptureEngine aCapEngine,
const int capture_id);
int StartCapture(CaptureEngine aCapEngine,
const int capture_id, webrtc::CaptureCapability& capability,
webrtc::ExternalRenderer* func);
int StopCapture(CaptureEngine aCapEngine, const int capture_id);
void Shutdown(void);
class CamerasChild final : public PCamerasChild
{
friend class mozilla::ipc::BackgroundChildImpl;
public:
// We are owned by the PBackground thread only. CamerasSingleton
// takes a non-owning reference.
NS_INLINE_DECL_REFCOUNTING(CamerasChild)
// IPC messages recevied, received on the PBackground thread
// these are the actual callbacks with data
virtual bool RecvDeliverFrame(const int&, const int&, mozilla::ipc::Shmem&&,
const int&, const uint32_t&, const int64_t&,
const int64_t&) override;
virtual bool RecvFrameSizeChange(const int&, const int&,
const int& w, const int& h) override;
// these are response messages to our outgoing requests
virtual bool RecvReplyNumberOfCaptureDevices(const int&) override;
virtual bool RecvReplyNumberOfCapabilities(const int&) override;
virtual bool RecvReplyAllocateCaptureDevice(const int&) override;
virtual bool RecvReplyGetCaptureCapability(const CaptureCapability& capability) override;
virtual bool RecvReplyGetCaptureDevice(const nsCString& device_name,
const nsCString& device_id) override;
virtual bool RecvReplyFailure(void) override;
virtual bool RecvReplySuccess(void) override;
virtual void ActorDestroy(ActorDestroyReason aWhy) override;
// the webrtc.org ViECapture calls are mirrored here, but with access
// to a specific PCameras instance to communicate over. These also
// run on the MediaManager thread
int NumberOfCaptureDevices(CaptureEngine aCapEngine);
int NumberOfCapabilities(CaptureEngine aCapEngine,
const char* deviceUniqueIdUTF8);
int ReleaseCaptureDevice(CaptureEngine aCapEngine,
const int capture_id);
int StartCapture(CaptureEngine aCapEngine,
const int capture_id, webrtc::CaptureCapability& capability,
webrtc::ExternalRenderer* func);
int StopCapture(CaptureEngine aCapEngine, const int capture_id);
int AllocateCaptureDevice(CaptureEngine aCapEngine,
const char* unique_idUTF8,
const unsigned int unique_idUTF8Length,
int& capture_id);
int GetCaptureCapability(CaptureEngine aCapEngine,
const char* unique_idUTF8,
const unsigned int capability_number,
webrtc::CaptureCapability& capability);
int GetCaptureDevice(CaptureEngine aCapEngine,
unsigned int list_number, char* device_nameUTF8,
const unsigned int device_nameUTF8Length,
char* unique_idUTF8,
const unsigned int unique_idUTF8Length);
void Shutdown();
webrtc::ExternalRenderer* Callback(CaptureEngine aCapEngine, int capture_id);
void AddCallback(const CaptureEngine aCapEngine, const int capture_id,
webrtc::ExternalRenderer* render);
void RemoveCallback(const CaptureEngine aCapEngine, const int capture_id);
private:
CamerasChild();
~CamerasChild();
// Dispatch a Runnable to the PCamerasParent, by executing it on the
// decidecated Cameras IPC/PBackground thread.
bool DispatchToParent(nsIRunnable* aRunnable,
MonitorAutoLock& aMonitor);
nsTArray<CapturerElement> mCallbacks;
// Protects the callback arrays
Mutex mCallbackMutex;
bool mIPCIsAlive;
// Hold to prevent multiple outstanding requests. We don't use
// request IDs so we only support one at a time. Don't want try
// to use the webrtc.org API from multiple threads simultanously.
// The monitor below isn't sufficient for this, as it will drop
// the lock when Wait-ing for a response, allowing us to send a new
// request. The Notify on receiving the response will then unblock
// both waiters and one will be guaranteed to get the wrong result.
// Take this one before taking mReplyMonitor.
Mutex mRequestMutex;
// Hold to wait for an async response to our calls
Monitor mReplyMonitor;
// Async resposne valid?
bool mReceivedReply;
// Aynsc reponses data contents;
bool mReplySuccess;
int mReplyInteger;
webrtc::CaptureCapability mReplyCapability;
nsCString mReplyDeviceName;
nsCString mReplyDeviceID;
};
} // namespace camera
} // namespace mozilla
#endif // mozilla_CamerasChild_h

View File

@ -0,0 +1,821 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set sw=2 ts=8 et ft=cpp : */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "CamerasParent.h"
#include "CamerasUtils.h"
#include "MediaEngine.h"
#include "MediaUtils.h"
#include "mozilla/Assertions.h"
#include "mozilla/unused.h"
#include "mozilla/Logging.h"
#include "mozilla/ipc/BackgroundParent.h"
#include "nsThreadUtils.h"
#undef LOG
#undef LOG_ENABLED
PRLogModuleInfo *gCamerasParentLog;
#define LOG(args) MOZ_LOG(gCamerasParentLog, mozilla::LogLevel::Debug, args)
#define LOG_ENABLED() MOZ_LOG_TEST(gCamerasParentLog, mozilla::LogLevel::Debug)
namespace mozilla {
namespace camera {
// 3 threads are involved in this code:
// - the main thread for some setups, and occassionally for video capture setup
// calls that don't work correctly elsewhere.
// - the IPC thread on which PBackground is running and which receives and
// sends messages
// - a thread which will execute the actual (possibly slow) camera access
// called "VideoCapture". On Windows this is a thread with an event loop
// suitable for UI access.
class FrameSizeChangeRunnable : public nsRunnable {
public:
FrameSizeChangeRunnable(CamerasParent *aParent, CaptureEngine capEngine,
int cap_id, unsigned int aWidth, unsigned int aHeight)
: mParent(aParent), mCapEngine(capEngine), mCapId(cap_id),
mWidth(aWidth), mHeight(aHeight) {}
NS_IMETHOD Run() {
if (mParent->IsShuttingDown()) {
// Communication channel is being torn down
LOG(("FrameSizeChangeRunnable is active without active Child"));
mResult = 0;
return NS_OK;
}
if (!mParent->SendFrameSizeChange(mCapEngine, mCapId, mWidth, mHeight)) {
mResult = -1;
} else {
mResult = 0;
}
return NS_OK;
}
int GetResult() {
return mResult;
}
private:
nsRefPtr<CamerasParent> mParent;
CaptureEngine mCapEngine;
int mCapId;
unsigned int mWidth;
unsigned int mHeight;
int mResult;
};
int
CallbackHelper::FrameSizeChange(unsigned int w, unsigned int h,
unsigned int streams)
{
LOG(("CallbackHelper Video FrameSizeChange: %ux%u", w, h));
nsRefPtr<FrameSizeChangeRunnable> runnable =
new FrameSizeChangeRunnable(mParent, mCapEngine, mCapturerId, w, h);
MOZ_ASSERT(mParent);
nsIThread * thread = mParent->GetBackgroundThread();
MOZ_ASSERT(thread != nullptr);
thread->Dispatch(runnable, NS_DISPATCH_NORMAL);
return 0;
}
class DeliverFrameRunnable : public nsRunnable {
public:
DeliverFrameRunnable(CamerasParent *aParent,
CaptureEngine engine,
int cap_id,
ShmemBuffer buffer,
unsigned char* altbuffer,
int size,
uint32_t time_stamp,
int64_t ntp_time,
int64_t render_time)
: mParent(aParent), mCapEngine(engine), mCapId(cap_id), mBuffer(Move(buffer)),
mSize(size), mTimeStamp(time_stamp), mNtpTime(ntp_time),
mRenderTime(render_time) {
// No ShmemBuffer (of the right size) was available, so make an
// extra buffer here. We have no idea when we are going to run and
// it will be potentially long after the webrtc frame callback has
// returned, so the copy needs to be no later than here.
// We will need to copy this back into a Shmem later on so we prefer
// using ShmemBuffers to avoid the extra copy.
if (altbuffer != nullptr) {
mAlternateBuffer.reset(new unsigned char[size]);
memcpy(mAlternateBuffer.get(), altbuffer, size);
}
};
NS_IMETHOD Run() {
if (mParent->IsShuttingDown()) {
// Communication channel is being torn down
mResult = 0;
return NS_OK;
}
if (!mParent->DeliverFrameOverIPC(mCapEngine, mCapId,
Move(mBuffer), mAlternateBuffer.get(),
mSize, mTimeStamp,
mNtpTime, mRenderTime)) {
mResult = -1;
} else {
mResult = 0;
}
return NS_OK;
}
int GetResult() {
return mResult;
}
private:
nsRefPtr<CamerasParent> mParent;
CaptureEngine mCapEngine;
int mCapId;
ShmemBuffer mBuffer;
mozilla::UniquePtr<unsigned char[]> mAlternateBuffer;
int mSize;
uint32_t mTimeStamp;
int64_t mNtpTime;
int64_t mRenderTime;
int mResult;
};
int
CamerasParent::DeliverFrameOverIPC(CaptureEngine cap_engine,
int cap_id,
ShmemBuffer buffer,
unsigned char* altbuffer,
int size,
uint32_t time_stamp,
int64_t ntp_time,
int64_t render_time)
{
// No ShmemBuffers were available, so construct one now of the right size
// and copy into it. That is an extra copy, but we expect this to be
// the exceptional case, because we just assured the next call *will* have a
// buffer of the right size.
if (altbuffer != nullptr) {
// Get a shared memory buffer from the pool, at least size big
ShmemBuffer shMemBuff = mShmemPool.Get(this, size);
if (!shMemBuff.Valid()) {
LOG(("Video shmem is not writeable in DeliverFrame"));
// We can skip this frame if we run out of buffers, it's not a real error.
return 0;
}
// get() and Size() check for proper alignment of the segment
memcpy(shMemBuff.GetBytes(), altbuffer, size);
if (!SendDeliverFrame(cap_engine, cap_id,
shMemBuff.Get(), size,
time_stamp, ntp_time, render_time)) {
return -1;
}
} else {
// ShmemBuffer was available, we're all good. A single copy happened
// in the original webrtc callback.
if (!SendDeliverFrame(cap_engine, cap_id,
buffer.Get(), size,
time_stamp, ntp_time, render_time)) {
return -1;
}
}
return 0;
}
ShmemBuffer
CamerasParent::GetBuffer(size_t aSize)
{
return mShmemPool.GetIfAvailable(aSize);
}
int
CallbackHelper::DeliverFrame(unsigned char* buffer,
int size,
uint32_t time_stamp,
int64_t ntp_time,
int64_t render_time,
void *handle)
{
// Get a shared memory buffer to copy the frame data into
ShmemBuffer shMemBuffer = mParent->GetBuffer(size);
if (!shMemBuffer.Valid()) {
// Either we ran out of buffers or they're not the right size yet
LOG(("Video shmem is not available in DeliverFrame"));
// We will do the copy into a(n extra) temporary buffer inside
// the DeliverFrameRunnable constructor.
} else {
// Shared memory buffers of the right size are available, do the copy here.
memcpy(shMemBuffer.GetBytes(), buffer, size);
// Mark the original buffer as cleared.
buffer = nullptr;
}
nsRefPtr<DeliverFrameRunnable> runnable =
new DeliverFrameRunnable(mParent, mCapEngine, mCapturerId,
Move(shMemBuffer), buffer, size, time_stamp,
ntp_time, render_time);
MOZ_ASSERT(mParent);
nsIThread* thread = mParent->GetBackgroundThread();
MOZ_ASSERT(thread != nullptr);
thread->Dispatch(runnable, NS_DISPATCH_NORMAL);
return 0;
}
bool
CamerasParent::RecvReleaseFrame(mozilla::ipc::Shmem&& s) {
mShmemPool.Put(ShmemBuffer(s));
return true;
}
bool
CamerasParent::SetupEngine(CaptureEngine aCapEngine)
{
EngineHelper *helper = &mEngines[aCapEngine];
// Already initialized
if (helper->mEngine) {
return true;
}
webrtc::CaptureDeviceInfo *captureDeviceInfo = nullptr;
switch (aCapEngine) {
case ScreenEngine:
captureDeviceInfo =
new webrtc::CaptureDeviceInfo(webrtc::CaptureDeviceType::Screen);
break;
case BrowserEngine:
captureDeviceInfo =
new webrtc::CaptureDeviceInfo(webrtc::CaptureDeviceType::Browser);
break;
case WinEngine:
captureDeviceInfo =
new webrtc::CaptureDeviceInfo(webrtc::CaptureDeviceType::Window);
break;
case AppEngine:
captureDeviceInfo =
new webrtc::CaptureDeviceInfo(webrtc::CaptureDeviceType::Application);
break;
case CameraEngine:
captureDeviceInfo =
new webrtc::CaptureDeviceInfo(webrtc::CaptureDeviceType::Camera);
break;
default:
LOG(("Invalid webrtc Video engine"));
MOZ_CRASH();
break;
}
helper->mConfig.Set<webrtc::CaptureDeviceInfo>(captureDeviceInfo);
helper->mEngine = webrtc::VideoEngine::Create(helper->mConfig);
if (!helper->mEngine) {
LOG(("VideoEngine::Create failed"));
return false;
}
helper->mPtrViEBase = webrtc::ViEBase::GetInterface(helper->mEngine);
if (!helper->mPtrViEBase) {
LOG(("ViEBase::GetInterface failed"));
return false;
}
if (helper->mPtrViEBase->Init() < 0) {
LOG(("ViEBase::Init failed"));
return false;
}
helper->mPtrViECapture = webrtc::ViECapture::GetInterface(helper->mEngine);
if (!helper->mPtrViECapture) {
LOG(("ViECapture::GetInterface failed"));
return false;
}
helper->mPtrViERender = webrtc::ViERender::GetInterface(helper->mEngine);
if (!helper->mPtrViERender) {
LOG(("ViERender::GetInterface failed"));
return false;
}
return true;
}
void
CamerasParent::CloseEngines()
{
{
MutexAutoLock lock(mCallbackMutex);
// Stop the callers
while (mCallbacks.Length()) {
auto capEngine = mCallbacks[0]->mCapEngine;
auto capNum = mCallbacks[0]->mCapturerId;
LOG(("Forcing shutdown of engine %d, capturer %d", capEngine, capNum));
{
MutexAutoUnlock unlock(mCallbackMutex);
RecvStopCapture(capEngine, capNum);
RecvReleaseCaptureDevice(capEngine, capNum);
}
// The callbacks list might have changed while we released the lock,
// but note that due to the loop construct this will not break us.
}
}
for (int i = 0; i < CaptureEngine::MaxEngine; i++) {
if (mEngines[i].mEngineIsRunning) {
LOG(("Being closed down while engine %d is running!", i));
}
if (mEngines[i].mPtrViERender) {
mEngines[i].mPtrViERender->Release();
mEngines[i].mPtrViERender = nullptr;
}
if (mEngines[i].mPtrViECapture) {
mEngines[i].mPtrViECapture->Release();
mEngines[i].mPtrViECapture = nullptr;
}
if(mEngines[i].mPtrViEBase) {
mEngines[i].mPtrViEBase->Release();
mEngines[i].mPtrViEBase = nullptr;
}
}
}
bool
CamerasParent::EnsureInitialized(int aEngine)
{
LOG((__PRETTY_FUNCTION__));
CaptureEngine capEngine = static_cast<CaptureEngine>(aEngine);
if (!SetupEngine(capEngine)) {
return false;
}
return true;
}
// Dispatch the runnable to do the camera operation on the
// specific Cameras thread, preventing us from blocking, and
// chain a runnable to send back the result on the IPC thread.
// It would be nice to get rid of the code duplication here,
// perhaps via Promises.
bool
CamerasParent::RecvNumberOfCaptureDevices(const int& aCapEngine)
{
LOG((__PRETTY_FUNCTION__));
if (!EnsureInitialized(aCapEngine)) {
LOG(("RecvNumberOfCaptureDevices fails to initialize"));
unused << SendReplyFailure();
return false;
}
nsRefPtr<CamerasParent> self(this);
nsRefPtr<nsRunnable> webrtc_runnable =
media::NewRunnableFrom([self, aCapEngine]() -> nsresult {
int num = self->mEngines[aCapEngine].mPtrViECapture->NumberOfCaptureDevices();
nsRefPtr<nsIRunnable> ipc_runnable =
media::NewRunnableFrom([self, num]() -> nsresult {
if (self->IsShuttingDown()) {
return NS_ERROR_FAILURE;
}
if (num < 0) {
LOG(("RecvNumberOfCaptureDevices couldn't find devices"));
unused << self->SendReplyFailure();
return NS_ERROR_FAILURE;
} else {
LOG(("RecvNumberOfCaptureDevices: %d", num));
unused << self->SendReplyNumberOfCaptureDevices(num);
return NS_OK;
}
});
self->mPBackgroundThread->Dispatch(ipc_runnable, NS_DISPATCH_NORMAL);
return NS_OK;
});
mVideoCaptureThread->message_loop()->PostTask(FROM_HERE, new RunnableTask(webrtc_runnable));
return true;
}
bool
CamerasParent::RecvNumberOfCapabilities(const int& aCapEngine,
const nsCString& unique_id)
{
LOG((__PRETTY_FUNCTION__));
if (!EnsureInitialized(aCapEngine)) {
LOG(("RecvNumberOfCapabilities fails to initialize"));
unused << SendReplyFailure();
return false;
}
LOG(("Getting caps for %s", unique_id.get()));
nsRefPtr<CamerasParent> self(this);
nsRefPtr<nsRunnable> webrtc_runnable =
media::NewRunnableFrom([self, unique_id, aCapEngine]() -> nsresult {
int num =
self->mEngines[aCapEngine].mPtrViECapture->NumberOfCapabilities(
unique_id.get(),
MediaEngineSource::kMaxUniqueIdLength);
nsRefPtr<nsIRunnable> ipc_runnable =
media::NewRunnableFrom([self, num]() -> nsresult {
if (self->IsShuttingDown()) {
return NS_ERROR_FAILURE;
}
if (num < 0) {
LOG(("RecvNumberOfCapabilities couldn't find capabilities"));
unused << self->SendReplyFailure();
return NS_ERROR_FAILURE;
} else {
LOG(("RecvNumberOfCapabilities: %d", num));
}
unused << self->SendReplyNumberOfCapabilities(num);
return NS_OK;
});
self->mPBackgroundThread->Dispatch(ipc_runnable, NS_DISPATCH_NORMAL);
return NS_OK;
});
mVideoCaptureThread->message_loop()->PostTask(FROM_HERE, new RunnableTask(webrtc_runnable));
return true;
}
bool
CamerasParent::RecvGetCaptureCapability(const int &aCapEngine,
const nsCString& unique_id,
const int& num)
{
LOG((__PRETTY_FUNCTION__));
if (!EnsureInitialized(aCapEngine)) {
LOG(("Fails to initialize"));
unused << SendReplyFailure();
return false;
}
LOG(("RecvGetCaptureCapability: %s %d", unique_id.get(), num));
nsRefPtr<CamerasParent> self(this);
nsRefPtr<nsRunnable> webrtc_runnable =
media::NewRunnableFrom([self, unique_id, aCapEngine, num]() -> nsresult {
webrtc::CaptureCapability webrtcCaps;
int error = self->mEngines[aCapEngine].mPtrViECapture->GetCaptureCapability(
unique_id.get(), MediaEngineSource::kMaxUniqueIdLength, num, webrtcCaps);
nsRefPtr<nsIRunnable> ipc_runnable =
media::NewRunnableFrom([self, webrtcCaps, error]() -> nsresult {
if (self->IsShuttingDown()) {
return NS_ERROR_FAILURE;
}
CaptureCapability capCap(webrtcCaps.width,
webrtcCaps.height,
webrtcCaps.maxFPS,
webrtcCaps.expectedCaptureDelay,
webrtcCaps.rawType,
webrtcCaps.codecType,
webrtcCaps.interlaced);
LOG(("Capability: %u %u %u %u %d %d",
webrtcCaps.width,
webrtcCaps.height,
webrtcCaps.maxFPS,
webrtcCaps.expectedCaptureDelay,
webrtcCaps.rawType,
webrtcCaps.codecType));
if (error) {
unused << self->SendReplyFailure();
return NS_ERROR_FAILURE;
}
unused << self->SendReplyGetCaptureCapability(capCap);
return NS_OK;
});
self->mPBackgroundThread->Dispatch(ipc_runnable, NS_DISPATCH_NORMAL);
return NS_OK;
});
mVideoCaptureThread->message_loop()->PostTask(FROM_HERE, new RunnableTask(webrtc_runnable));
return true;
}
bool
CamerasParent::RecvGetCaptureDevice(const int& aCapEngine,
const int& aListNumber)
{
LOG((__PRETTY_FUNCTION__));
if (!EnsureInitialized(aCapEngine)) {
LOG(("Fails to initialize"));
unused << SendReplyFailure();
return false;
}
LOG(("RecvGetCaptureDevice"));
nsRefPtr<CamerasParent> self(this);
nsRefPtr<nsRunnable> webrtc_runnable =
media::NewRunnableFrom([self, aCapEngine, aListNumber]() -> nsresult {
char deviceName[MediaEngineSource::kMaxDeviceNameLength];
char deviceUniqueId[MediaEngineSource::kMaxUniqueIdLength];
nsCString name;
nsCString uniqueId;
int error =
self->mEngines[aCapEngine].mPtrViECapture->GetCaptureDevice(aListNumber,
deviceName,
sizeof(deviceName),
deviceUniqueId,
sizeof(deviceUniqueId));
if (!error) {
name.Assign(deviceName);
uniqueId.Assign(deviceUniqueId);
}
nsRefPtr<nsIRunnable> ipc_runnable =
media::NewRunnableFrom([self, error, name, uniqueId]() -> nsresult {
if (self->IsShuttingDown()) {
return NS_ERROR_FAILURE;
}
if (error) {
LOG(("GetCaptureDevice failed: %d", error));
unused << self->SendReplyFailure();
return NS_ERROR_FAILURE;
}
LOG(("Returning %s name %s id", name.get(), uniqueId.get()));
unused << self->SendReplyGetCaptureDevice(name, uniqueId);
return NS_OK;
});
self->mPBackgroundThread->Dispatch(ipc_runnable, NS_DISPATCH_NORMAL);
return NS_OK;
});
mVideoCaptureThread->message_loop()->PostTask(FROM_HERE, new RunnableTask(webrtc_runnable));
return true;
}
bool
CamerasParent::RecvAllocateCaptureDevice(const int& aCapEngine,
const nsCString& unique_id)
{
LOG((__PRETTY_FUNCTION__));
if (!EnsureInitialized(aCapEngine)) {
LOG(("Fails to initialize"));
unused << SendReplyFailure();
return false;
}
nsRefPtr<CamerasParent> self(this);
nsRefPtr<nsRunnable> webrtc_runnable =
media::NewRunnableFrom([self, aCapEngine, unique_id]() -> nsresult {
int numdev;
int error = self->mEngines[aCapEngine].mPtrViECapture->AllocateCaptureDevice(
unique_id.get(), MediaEngineSource::kMaxUniqueIdLength, numdev);
nsRefPtr<nsIRunnable> ipc_runnable =
media::NewRunnableFrom([self, numdev, error]() -> nsresult {
if (self->IsShuttingDown()) {
return NS_ERROR_FAILURE;
}
if (error) {
unused << self->SendReplyFailure();
return NS_ERROR_FAILURE;
} else {
LOG(("Allocated device nr %d", numdev));
unused << self->SendReplyAllocateCaptureDevice(numdev);
return NS_OK;
}
});
self->mPBackgroundThread->Dispatch(ipc_runnable, NS_DISPATCH_NORMAL);
return NS_OK;
});
mVideoCaptureThread->message_loop()->PostTask(FROM_HERE, new RunnableTask(webrtc_runnable));
return true;
}
bool
CamerasParent::RecvReleaseCaptureDevice(const int& aCapEngine,
const int& numdev)
{
LOG((__PRETTY_FUNCTION__));
if (!EnsureInitialized(aCapEngine)) {
LOG(("Fails to initialize"));
unused << SendReplyFailure();
return false;
}
nsRefPtr<CamerasParent> self(this);
nsRefPtr<nsRunnable> webrtc_runnable =
media::NewRunnableFrom([self, aCapEngine, numdev]() -> nsresult {
LOG(("RecvReleaseCamera device nr %d", numdev));
int error = self->mEngines[aCapEngine].mPtrViECapture->ReleaseCaptureDevice(numdev);
nsRefPtr<nsIRunnable> ipc_runnable =
media::NewRunnableFrom([self, error, numdev]() -> nsresult {
if (self->IsShuttingDown()) {
return NS_ERROR_FAILURE;
}
if (error) {
unused << self->SendReplyFailure();
return NS_ERROR_FAILURE;
} else {
unused << self->SendReplySuccess();
LOG(("Freed device nr %d", numdev));
return NS_OK;
}
});
self->mPBackgroundThread->Dispatch(ipc_runnable, NS_DISPATCH_NORMAL);
return NS_OK;
});
#ifndef XP_MACOSX
mVideoCaptureThread->message_loop()->PostTask(FROM_HERE, new RunnableTask(webrtc_runnable));
#else
// Mac OS X hangs on shutdown if we don't do this on the main thread.
NS_DispatchToMainThread(webrtc_runnable);
#endif
return true;
}
bool
CamerasParent::RecvStartCapture(const int& aCapEngine,
const int& capnum,
const CaptureCapability& ipcCaps)
{
LOG((__PRETTY_FUNCTION__));
if (!EnsureInitialized(aCapEngine)) {
LOG(("Failure to initialize"));
unused << SendReplyFailure();
return false;
}
nsRefPtr<CamerasParent> self(this);
nsRefPtr<nsRunnable> webrtc_runnable =
media::NewRunnableFrom([self, aCapEngine, capnum, ipcCaps]() -> nsresult {
MutexAutoLock lock(self->mCallbackMutex);
auto cbh = self->mCallbacks.AppendElement(
new CallbackHelper(static_cast<CaptureEngine>(aCapEngine), capnum, self));
auto render = static_cast<webrtc::ExternalRenderer*>(*cbh);
EngineHelper* helper = &self->mEngines[aCapEngine];
int error =
helper->mPtrViERender->AddRenderer(capnum, webrtc::kVideoI420, render);
if (!error) {
error = helper->mPtrViERender->StartRender(capnum);
}
webrtc::CaptureCapability capability;
capability.width = ipcCaps.width();
capability.height = ipcCaps.height();
capability.maxFPS = ipcCaps.maxFPS();
capability.expectedCaptureDelay = ipcCaps.expectedCaptureDelay();
capability.rawType = static_cast<webrtc::RawVideoType>(ipcCaps.rawType());
capability.codecType = static_cast<webrtc::VideoCodecType>(ipcCaps.codecType());
capability.interlaced = ipcCaps.interlaced();
if (!error) {
error = helper->mPtrViECapture->StartCapture(capnum, capability);
}
if (!error) {
helper->mEngineIsRunning = true;
}
nsRefPtr<nsIRunnable> ipc_runnable =
media::NewRunnableFrom([self, error]() -> nsresult {
if (self->IsShuttingDown()) {
return NS_ERROR_FAILURE;
}
if (!error) {
unused << self->SendReplySuccess();
return NS_OK;
} else {
unused << self->SendReplyFailure();
return NS_ERROR_FAILURE;
}
});
self->mPBackgroundThread->Dispatch(ipc_runnable, NS_DISPATCH_NORMAL);
return NS_OK;
});
mVideoCaptureThread->message_loop()->PostTask(FROM_HERE, new RunnableTask(webrtc_runnable));
return true;
}
bool
CamerasParent::RecvStopCapture(const int& aCapEngine,
const int& capnum)
{
LOG((__PRETTY_FUNCTION__));
if (!EnsureInitialized(aCapEngine)) {
LOG(("Failure to initialize"));
unused << SendReplyFailure();
return false;
}
nsRefPtr<CamerasParent> self(this);
nsRefPtr<nsRunnable> webrtc_runnable =
media::NewRunnableFrom([self, aCapEngine, capnum]() -> nsresult {
self->mEngines[aCapEngine].mPtrViECapture->StopCapture(capnum);
self->mEngines[aCapEngine].mPtrViERender->StopRender(capnum);
self->mEngines[aCapEngine].mPtrViERender->RemoveRenderer(capnum);
self->mEngines[aCapEngine].mEngineIsRunning = false;
MutexAutoLock lock(self->mCallbackMutex);
for (unsigned int i = 0; i < self->mCallbacks.Length(); i++) {
if (self->mCallbacks[i]->mCapEngine == aCapEngine
&& self->mCallbacks[i]->mCapturerId == capnum) {
delete self->mCallbacks[i];
self->mCallbacks.RemoveElementAt(i);
break;
}
}
return NS_OK;
});
mVideoCaptureThread->message_loop()->PostTask(FROM_HERE, new RunnableTask(webrtc_runnable));
return SendReplySuccess();
}
bool
CamerasParent::RecvAllDone()
{
LOG((__PRETTY_FUNCTION__));
// Don't try to send anything to the child now
mChildIsAlive = false;
return Send__delete__(this);
}
void CamerasParent::DoShutdown()
{
LOG((__PRETTY_FUNCTION__));
CloseEngines();
for (int i = 0; i < CaptureEngine::MaxEngine; i++) {
if (mEngines[i].mEngine) {
mEngines[i].mEngine->SetTraceCallback(nullptr);
webrtc::VideoEngine::Delete(mEngines[i].mEngine);
mEngines[i].mEngine = nullptr;
}
}
mShmemPool.Cleanup(this);
mPBackgroundThread = nullptr;
if (mVideoCaptureThread) {
if (mVideoCaptureThread->IsRunning()) {
mVideoCaptureThread->Stop();
}
delete mVideoCaptureThread;
mVideoCaptureThread = nullptr;
}
}
void
CamerasParent::ActorDestroy(ActorDestroyReason aWhy)
{
// No more IPC from here
LOG((__PRETTY_FUNCTION__));
// We don't want to receive callbacks or anything if we can't
// forward them anymore anyway.
mChildIsAlive = false;
mDestroyed = true;
CloseEngines();
}
CamerasParent::CamerasParent()
: mCallbackMutex("CamerasParent.mCallbackMutex"),
mShmemPool(CaptureEngine::MaxEngine),
mVideoCaptureThread(nullptr),
mChildIsAlive(true),
mDestroyed(false)
{
if (!gCamerasParentLog) {
gCamerasParentLog = PR_NewLogModule("CamerasParent");
}
LOG(("CamerasParent: %p", this));
mPBackgroundThread = NS_GetCurrentThread();
MOZ_ASSERT(mPBackgroundThread != nullptr, "GetCurrentThread failed");
LOG(("Spinning up WebRTC Cameras Thread"));
mVideoCaptureThread = new base::Thread("VideoCapture");
base::Thread::Options options;
#if defined(_WIN32)
options.message_loop_type = MessageLoop::TYPE_MOZILLA_NONMAINUITHREAD;
#else
options.message_loop_type = MessageLoop::TYPE_MOZILLA_NONMAINTHREAD;
#endif
if (!mVideoCaptureThread->StartWithOptions(options)) {
MOZ_CRASH();
}
MOZ_COUNT_CTOR(CamerasParent);
}
CamerasParent::~CamerasParent()
{
LOG(("~CamerasParent: %p", this));
MOZ_COUNT_DTOR(CamerasParent);
DoShutdown();
}
already_AddRefed<CamerasParent>
CamerasParent::Create() {
mozilla::ipc::AssertIsOnBackgroundThread();
nsRefPtr<CamerasParent> camerasParent = new CamerasParent();
return camerasParent.forget();
}
}
}

View File

@ -0,0 +1,145 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set sw=2 ts=8 et ft=cpp : */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef mozilla_CamerasParent_h
#define mozilla_CamerasParent_h
#include "mozilla/dom/ContentParent.h"
#include "mozilla/camera/PCamerasParent.h"
#include "mozilla/ipc/Shmem.h"
#include "mozilla/ShmemPool.h"
// conflicts with #include of scoped_ptr.h
#undef FF
#include "webrtc/common.h"
// Video Engine
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_render.h"
#include "CamerasChild.h"
#include "base/thread.h"
namespace mozilla {
namespace camera {
class CamerasParent;
class CallbackHelper : public webrtc::ExternalRenderer
{
public:
CallbackHelper(CaptureEngine aCapEng, int aCapId, CamerasParent *aParent)
: mCapEngine(aCapEng), mCapturerId(aCapId), mParent(aParent) {};
// ViEExternalRenderer implementation. These callbacks end up
// running on the VideoCapture thread.
virtual int FrameSizeChange(unsigned int w, unsigned int h,
unsigned int streams) override;
virtual int DeliverFrame(unsigned char* buffer,
int size,
uint32_t time_stamp,
int64_t ntp_time,
int64_t render_time,
void *handle) override;
virtual bool IsTextureSupported() override { return false; };
friend CamerasParent;
private:
CaptureEngine mCapEngine;
int mCapturerId;
CamerasParent *mParent;
};
class EngineHelper
{
public:
EngineHelper() :
mEngine(nullptr), mPtrViEBase(nullptr), mPtrViECapture(nullptr),
mPtrViERender(nullptr), mEngineIsRunning(false) {};
webrtc::VideoEngine *mEngine;
webrtc::ViEBase *mPtrViEBase;
webrtc::ViECapture *mPtrViECapture;
webrtc::ViERender *mPtrViERender;
// The webrtc code keeps a reference to this one.
webrtc::Config mConfig;
// Engine alive
bool mEngineIsRunning;
};
class CamerasParent : public PCamerasParent
{
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(CamerasParent);
public:
static already_AddRefed<CamerasParent> Create();
// Messages received form the child. These run on the IPC/PBackground thread.
virtual bool RecvAllocateCaptureDevice(const int&, const nsCString&) override;
virtual bool RecvReleaseCaptureDevice(const int&, const int &) override;
virtual bool RecvNumberOfCaptureDevices(const int&) override;
virtual bool RecvNumberOfCapabilities(const int&, const nsCString&) override;
virtual bool RecvGetCaptureCapability(const int&, const nsCString&, const int&) override;
virtual bool RecvGetCaptureDevice(const int&, const int&) override;
virtual bool RecvStartCapture(const int&, const int&, const CaptureCapability&) override;
virtual bool RecvStopCapture(const int&, const int&) override;
virtual bool RecvReleaseFrame(mozilla::ipc::Shmem&&) override;
virtual bool RecvAllDone() override;
virtual void ActorDestroy(ActorDestroyReason aWhy) override;
nsIThread* GetBackgroundThread() { return mPBackgroundThread; };
bool IsShuttingDown() { return !mChildIsAlive || mDestroyed; };
ShmemBuffer GetBuffer(size_t aSize);
// helper to forward to the PBackground thread
int DeliverFrameOverIPC(CaptureEngine capEng,
int cap_id,
ShmemBuffer buffer,
unsigned char* altbuffer,
int size,
uint32_t time_stamp,
int64_t ntp_time,
int64_t render_time);
CamerasParent();
protected:
virtual ~CamerasParent();
bool SetupEngine(CaptureEngine aCapEngine);
void CloseEngines();
bool EnsureInitialized(int aEngine);
void DoShutdown();
EngineHelper mEngines[CaptureEngine::MaxEngine];
nsTArray<CallbackHelper*> mCallbacks;
// Protects the callback arrays
Mutex mCallbackMutex;
// image buffers
mozilla::ShmemPool mShmemPool;
// PBackground parent thread
nsCOMPtr<nsIThread> mPBackgroundThread;
// video processing thread - where webrtc.org capturer code runs
base::Thread* mVideoCaptureThread;
// Shutdown handling
bool mChildIsAlive;
bool mDestroyed;
};
PCamerasParent* CreateCamerasParent();
} // namespace camera
} // namespace mozilla
#endif // mozilla_CameraParent_h

View File

@ -0,0 +1,81 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set sw=2 ts=8 et ft=cpp : */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "mozilla/unused.h"
#include "nsThreadUtils.h"
#include "nsCOMPtr.h"
#include "mozilla/Assertions.h"
#include "mozilla/ipc/BackgroundChild.h"
#include "mozilla/ipc/PBackgroundChild.h"
#include "nsIIPCBackgroundChildCreateCallback.h"
namespace mozilla {
namespace camera {
class WorkerBackgroundChildCallback final :
public nsIIPCBackgroundChildCreateCallback
{
bool* mDone;
public:
explicit WorkerBackgroundChildCallback(bool* aDone)
: mDone(aDone)
{
MOZ_ASSERT(!NS_IsMainThread());
MOZ_ASSERT(mDone);
}
NS_DECL_ISUPPORTS
private:
~WorkerBackgroundChildCallback() { }
virtual void
ActorCreated(PBackgroundChild* aActor) override
{
*mDone = true;
}
virtual void
ActorFailed() override
{
*mDone = true;
}
};
NS_IMPL_ISUPPORTS(WorkerBackgroundChildCallback, nsIIPCBackgroundChildCreateCallback)
nsresult
SynchronouslyCreatePBackground()
{
using mozilla::ipc::BackgroundChild;
MOZ_ASSERT(!BackgroundChild::GetForCurrentThread());
bool done = false;
nsCOMPtr<nsIIPCBackgroundChildCreateCallback> callback =
new WorkerBackgroundChildCallback(&done);
if (NS_WARN_IF(!BackgroundChild::GetOrCreateForCurrentThread(callback))) {
return NS_ERROR_FAILURE;
}
nsIThread *thread = NS_GetCurrentThread();
while (!done) {
if (NS_WARN_IF(!NS_ProcessNextEvent(thread, true /* aMayWait */))) {
return NS_ERROR_FAILURE;
}
}
if (NS_WARN_IF(!BackgroundChild::GetForCurrentThread())) {
return NS_ERROR_FAILURE;
}
return NS_OK;
}
}
}

View File

@ -0,0 +1,60 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set sw=2 ts=8 et ft=cpp : */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef mozilla_CameraUtils_h
#define mozilla_CameraUtils_h
#include "nsThreadUtils.h"
#include "nsCOMPtr.h"
#include "mozilla/UniquePtr.h"
#include "base/thread.h"
namespace mozilla {
namespace camera {
nsresult SynchronouslyCreatePBackground();
class ThreadDestructor : public nsRunnable
{
DISALLOW_COPY_AND_ASSIGN(ThreadDestructor);
public:
explicit ThreadDestructor(nsIThread* aThread)
: mThread(aThread) {}
NS_IMETHOD Run() override
{
if (mThread) {
mThread->Shutdown();
}
return NS_OK;
}
private:
~ThreadDestructor() {}
nsCOMPtr<nsIThread> mThread;
};
class RunnableTask : public Task
{
public:
explicit RunnableTask(nsRunnable* aRunnable)
: mRunnable(aRunnable) {}
void Run() override {
mRunnable->Run();
}
private:
~RunnableTask() {}
nsRefPtr<nsRunnable> mRunnable;
};
}
}
#endif // mozilla_CameraUtils_h

View File

@ -0,0 +1,60 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
include protocol PContent;
include protocol PBackground;
namespace mozilla {
namespace camera {
struct CaptureCapability
{
int width;
int height;
int maxFPS;
int expectedCaptureDelay;
int rawType;
int codecType;
bool interlaced;
};
async protocol PCameras
{
manager PBackground;
child:
async FrameSizeChange(int capEngine, int cap_id, int w, int h);
// transfers ownership of |buffer| from parent to child
async DeliverFrame(int capEngine, int cap_id,
Shmem buffer, int size, uint32_t time_stamp,
int64_t ntp_time, int64_t render_time);
async ReplyNumberOfCaptureDevices(int numdev);
async ReplyNumberOfCapabilities(int numdev);
async ReplyAllocateCaptureDevice(int numdev);
async ReplyGetCaptureCapability(CaptureCapability cap);
async ReplyGetCaptureDevice(nsCString device_name, nsCString device_id);
async ReplyFailure();
async ReplySuccess();
async __delete__();
parent:
async NumberOfCaptureDevices(int engine);
async NumberOfCapabilities(int engine, nsCString deviceUniqueIdUTF8);
async GetCaptureCapability(int engine, nsCString unique_idUTF8, int capability_number);
async GetCaptureDevice(int engine, int num);
async AllocateCaptureDevice(int engine, nsCString unique_idUTF8);
async ReleaseCaptureDevice(int engine, int numdev);
async StartCapture(int engine, int numdev, CaptureCapability capability);
async StopCapture(int engine, int numdev);
// transfers frame back
async ReleaseFrame(Shmem s);
// Ask parent to delete us
async AllDone();
};
} // namespace camera
} // namespace mozilla

View File

@ -0,0 +1,128 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set sw=2 ts=8 et ft=cpp : */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "mozilla/Assertions.h"
#include "mozilla/Logging.h"
#include "mozilla/ShmemPool.h"
#include "mozilla/Move.h"
#undef LOG
#undef LOG_ENABLED
#define LOG(args) MOZ_LOG(gCamerasParentLog, mozilla::LogLevel::Debug, args)
#define LOG_ENABLED() MOZ_LOG_TEST(gCamerasParentLog, mozilla::LogLevel::Debug)
namespace mozilla {
ShmemPool::ShmemPool(size_t aPoolSize)
: mMutex("mozilla::ShmemPool"),
mPoolFree(aPoolSize)
#ifdef DEBUG
,mMaxPoolUse(0)
#endif
{
mShmemPool.SetLength(aPoolSize);
}
mozilla::ShmemBuffer ShmemPool::GetIfAvailable(size_t aSize)
{
MutexAutoLock lock(mMutex);
// Pool is empty, don't block caller.
if (mPoolFree == 0) {
// This isn't initialized, so will be understood as an error.
return ShmemBuffer();
}
ShmemBuffer& res = mShmemPool[mPoolFree - 1];
if (!res.mInitialized) {
return ShmemBuffer();
}
MOZ_ASSERT(res.mShmem.IsWritable(), "Pool in Shmem is not writable?");
if (res.mShmem.Size<char>() < aSize) {
return ShmemBuffer();
}
mPoolFree--;
#ifdef DEBUG
size_t poolUse = mShmemPool.Length() - mPoolFree;
if (poolUse > mMaxPoolUse) {
mMaxPoolUse = poolUse;
LOG(("Maximum ShmemPool use increased: %d buffers", mMaxPoolUse));
}
#endif
return Move(res);
}
template <class T>
mozilla::ShmemBuffer ShmemPool::Get(T* aInstance, size_t aSize)
{
MutexAutoLock lock(mMutex);
// Pool is empty, don't block caller.
if (mPoolFree == 0) {
// This isn't initialized, so will be understood as an error.
return ShmemBuffer();
}
ShmemBuffer res = Move(mShmemPool[mPoolFree - 1]);
if (!res.mInitialized) {
LOG(("Initiaizing new Shmem in pool"));
aInstance->AllocShmem(aSize, SharedMemory::TYPE_BASIC, &res.mShmem);
res.mInitialized = true;
}
MOZ_ASSERT(res.mShmem.IsWritable(), "Pool in Shmem is not writable?");
// Prepare buffer, increase size if needed (we never shrink as we don't
// maintain seperate sized pools and we don't want to keep reallocating)
if (res.mShmem.Size<char>() < aSize) {
LOG(("Size change/increase in Shmem Pool"));
aInstance->DeallocShmem(res.mShmem);
// this may fail; always check return value
if (!aInstance->AllocShmem(aSize, SharedMemory::TYPE_BASIC, &res.mShmem)) {
LOG(("Failure allocating new size Shmem buffer"));
return ShmemBuffer();
}
}
mPoolFree--;
return res;
}
void ShmemPool::Put(ShmemBuffer&& aShmem)
{
MutexAutoLock lock(mMutex);
MOZ_ASSERT(mPoolFree < mShmemPool.Length());
mShmemPool[mPoolFree] = Move(aShmem);
mPoolFree++;
}
template <class T>
void ShmemPool::Cleanup(T* aInstance)
{
MutexAutoLock lock(mMutex);
for (size_t i = 0; i < mShmemPool.Length(); i++) {
if (mShmemPool[i].mInitialized) {
aInstance->DeallocShmem(mShmemPool[i].Get());
mShmemPool[i].mInitialized = false;
}
}
}
ShmemPool::~ShmemPool()
{
#ifdef DEBUG
for (size_t i = 0; i < mShmemPool.Length(); i++) {
MOZ_ASSERT(!mShmemPool[i].Valid());
}
#endif
}
} // namespace mozilla

View File

@ -0,0 +1,85 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set sw=2 ts=8 et ft=cpp : */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef mozilla_ShmemPool_h
#define mozilla_ShmemPool_h
#include "mozilla/ipc/Shmem.h"
#include "mozilla/Mutex.h"
namespace mozilla {
class ShmemPool;
class ShmemBuffer {
public:
ShmemBuffer() : mInitialized(false) {}
explicit ShmemBuffer(mozilla::ipc::Shmem aShmem) {
mInitialized = true;
mShmem = aShmem;
}
ShmemBuffer(ShmemBuffer&& rhs) {
mInitialized = rhs.mInitialized;
mShmem = Move(rhs.mShmem);
}
ShmemBuffer& operator=(ShmemBuffer&& rhs) {
MOZ_ASSERT(&rhs != this, "self-moves are prohibited");
mInitialized = rhs.mInitialized;
mShmem = Move(rhs.mShmem);
return *this;
}
// No copies allowed
ShmemBuffer(const ShmemBuffer&) = delete;
ShmemBuffer& operator=(const ShmemBuffer&) = delete;
bool Valid() {
return mInitialized;
}
char* GetBytes() {
return mShmem.get<char>();
}
mozilla::ipc::Shmem& Get() {
return mShmem;
}
private:
friend class ShmemPool;
bool mInitialized;
mozilla::ipc::Shmem mShmem;
};
class ShmemPool {
public:
explicit ShmemPool(size_t aPoolSize);
~ShmemPool();
// We need to use the allocation/deallocation functions
// of a specific IPC child/parent instance.
template <class T> void Cleanup(T* aInstance);
// These 2 differ in what thread they can run on. GetIfAvailable
// can run anywhere but won't allocate if the right size isn't available.
ShmemBuffer GetIfAvailable(size_t aSize);
template <class T> ShmemBuffer Get(T* aInstance, size_t aSize);
void Put(ShmemBuffer&& aShmem);
private:
Mutex mMutex;
size_t mPoolFree;
#ifdef DEBUG
size_t mMaxPoolUse;
#endif
nsTArray<ShmemBuffer> mShmemPool;
};
} // namespace mozilla
#endif // mozilla_ShmemPool_h

View File

@ -5,15 +5,25 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
if CONFIG['MOZ_WEBRTC']:
EXPORTS += ['LoadManager.h',
EXPORTS += ['CamerasChild.h',
'CamerasParent.h',
'CamerasUtils.h',
'LoadManager.h',
'LoadManagerFactory.h',
'LoadMonitor.h',
]
UNIFIED_SOURCES += ['LoadManager.cpp',
UNIFIED_SOURCES += ['CamerasChild.cpp',
'CamerasParent.cpp',
'CamerasUtils.cpp',
'LoadManager.cpp',
'LoadManagerFactory.cpp',
'LoadMonitor.cpp',
]
IPDL_SOURCES = [
'PCameras.ipdl',
]
LOCAL_INCLUDES += [
'/media/webrtc/signaling',
'/media/webrtc/trunk',
]
@ -37,6 +47,11 @@ if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gonk':
]
]
if CONFIG['_MSC_VER']:
DEFINES['__PRETTY_FUNCTION__'] = '__FUNCSIG__'
EXPORTS.mozilla += ['ShmemPool.h',]
EXPORTS.mozilla.media += ['MediaChild.h',
'MediaParent.h',
'MediaSystemResourceClient.h',
@ -46,7 +61,7 @@ EXPORTS.mozilla.media += ['MediaChild.h',
'MediaSystemResourceMessageUtils.h',
'MediaSystemResourceService.h',
'MediaSystemResourceTypes.h',
'MediaUtils.h'
'MediaUtils.h',
]
UNIFIED_SOURCES += ['MediaChild.cpp',
'MediaParent.cpp',
@ -56,6 +71,7 @@ UNIFIED_SOURCES += ['MediaChild.cpp',
'MediaSystemResourceManagerParent.cpp',
'MediaSystemResourceService.cpp',
'MediaUtils.cpp',
'ShmemPool.cpp',
]
IPDL_SOURCES += [
'PMedia.ipdl',

View File

@ -30,7 +30,6 @@ public:
, mHasDirectListeners(false)
, mCaptureIndex(aIndex)
, mTrackID(0)
, mFps(-1)
{}
@ -50,10 +49,6 @@ public:
return false;
}
virtual const dom::MediaSourceEnum GetMediaSource() override {
return dom::MediaSourceEnum::Camera;
}
virtual nsresult TakePhoto(PhotoCallback* aCallback) override
{
return NS_ERROR_NOT_IMPLEMENTED;
@ -119,7 +114,6 @@ protected:
bool mHasDirectListeners;
int mCaptureIndex;
TrackID mTrackID;
int mFps; // Track rate (30 fps by default)
webrtc::CaptureCapability mCapability; // Doesn't work on OS X.

View File

@ -70,6 +70,9 @@ public:
SourceMediaStream* aSource,
TrackID aId,
StreamTime aDesiredTime) override;
virtual const dom::MediaSourceEnum GetMediaSource() override {
return dom::MediaSourceEnum::Camera;
}
void OnHardwareStateChange(HardwareState aState, nsresult aReason) override;
void GetRotation();

View File

@ -0,0 +1,398 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "MediaEngineRemoteVideoSource.h"
#include "mozilla/RefPtr.h"
#include "VideoUtils.h"
#include "nsIPrefService.h"
#include "MediaTrackConstraints.h"
#include "CamerasChild.h"
extern PRLogModuleInfo* GetMediaManagerLog();
#define LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg)
#define LOGFRAME(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Verbose, msg)
namespace mozilla {
using dom::ConstrainLongRange;
NS_IMPL_ISUPPORTS0(MediaEngineRemoteVideoSource)
MediaEngineRemoteVideoSource::MediaEngineRemoteVideoSource(
int aIndex, mozilla::camera::CaptureEngine aCapEngine,
dom::MediaSourceEnum aMediaSource, const char* aMonitorName)
: MediaEngineCameraVideoSource(aIndex, aMonitorName),
mMediaSource(aMediaSource),
mCapEngine(aCapEngine)
{
MOZ_ASSERT(aMediaSource != dom::MediaSourceEnum::Other);
Init();
}
void
MediaEngineRemoteVideoSource::Init()
{
LOG((__PRETTY_FUNCTION__));
char deviceName[kMaxDeviceNameLength];
char uniqueId[kMaxUniqueIdLength];
if (mozilla::camera::GetCaptureDevice(mCapEngine,
mCaptureIndex,
deviceName, kMaxDeviceNameLength,
uniqueId, kMaxUniqueIdLength)) {
LOG(("Error initializing RemoteVideoSource (GetCaptureDevice)"));
return;
}
SetName(NS_ConvertUTF8toUTF16(deviceName));
SetUUID(uniqueId);
mInitDone = true;
return;
}
void
MediaEngineRemoteVideoSource::Shutdown()
{
LOG((__PRETTY_FUNCTION__));
if (!mInitDone) {
return;
}
if (mState == kStarted) {
SourceMediaStream *source;
bool empty;
while (1) {
{
MonitorAutoLock lock(mMonitor);
empty = mSources.IsEmpty();
if (empty) {
break;
}
source = mSources[0];
}
Stop(source, kVideoTrack); // XXX change to support multiple tracks
}
MOZ_ASSERT(mState == kStopped);
}
if (mState == kAllocated || mState == kStopped) {
Deallocate();
}
mozilla::camera::Shutdown();
mState = kReleased;
mInitDone = false;
return;
}
nsresult
MediaEngineRemoteVideoSource::Allocate(const dom::MediaTrackConstraints& aConstraints,
const MediaEnginePrefs& aPrefs,
const nsString& aDeviceId)
{
LOG((__PRETTY_FUNCTION__));
if (mState == kReleased && mInitDone) {
// Note: if shared, we don't allow a later opener to affect the resolution.
// (This may change depending on spec changes for Constraints/settings)
if (!ChooseCapability(aConstraints, aPrefs, aDeviceId)) {
return NS_ERROR_UNEXPECTED;
}
if (mozilla::camera::AllocateCaptureDevice(mCapEngine,
GetUUID().get(),
kMaxUniqueIdLength, mCaptureIndex)) {
return NS_ERROR_FAILURE;
}
mState = kAllocated;
LOG(("Video device %d allocated", mCaptureIndex));
} else if (MOZ_LOG_TEST(GetMediaManagerLog(), mozilla::LogLevel::Debug)) {
MonitorAutoLock lock(mMonitor);
if (mSources.IsEmpty()) {
LOG(("Video device %d reallocated", mCaptureIndex));
} else {
LOG(("Video device %d allocated shared", mCaptureIndex));
}
}
return NS_OK;
}
nsresult
MediaEngineRemoteVideoSource::Deallocate()
{
LOG((__FUNCTION__));
bool empty;
{
MonitorAutoLock lock(mMonitor);
empty = mSources.IsEmpty();
}
if (empty) {
if (mState != kStopped && mState != kAllocated) {
return NS_ERROR_FAILURE;
}
mozilla::camera::ReleaseCaptureDevice(mCapEngine, mCaptureIndex);
mState = kReleased;
LOG(("Video device %d deallocated", mCaptureIndex));
} else {
LOG(("Video device %d deallocated but still in use", mCaptureIndex));
}
return NS_OK;
}
nsresult
MediaEngineRemoteVideoSource::Start(SourceMediaStream* aStream, TrackID aID)
{
LOG((__PRETTY_FUNCTION__));
if (!mInitDone || !aStream) {
LOG(("No stream or init not done"));
return NS_ERROR_FAILURE;
}
{
MonitorAutoLock lock(mMonitor);
mSources.AppendElement(aStream);
}
aStream->AddTrack(aID, 0, new VideoSegment(), SourceMediaStream::ADDTRACK_QUEUED);
if (mState == kStarted) {
return NS_OK;
}
mImageContainer = layers::LayerManager::CreateImageContainer();
mState = kStarted;
mTrackID = aID;
if (mozilla::camera::StartCapture(mCapEngine,
mCaptureIndex, mCapability, this)) {
LOG(("StartCapture failed"));
return NS_ERROR_FAILURE;
}
return NS_OK;
}
nsresult
MediaEngineRemoteVideoSource::Stop(mozilla::SourceMediaStream* aSource,
mozilla::TrackID aID)
{
LOG((__PRETTY_FUNCTION__));
{
MonitorAutoLock lock(mMonitor);
if (!mSources.RemoveElement(aSource)) {
// Already stopped - this is allowed
return NS_OK;
}
aSource->EndTrack(aID);
if (!mSources.IsEmpty()) {
return NS_OK;
}
if (mState != kStarted) {
return NS_ERROR_FAILURE;
}
mState = kStopped;
// Drop any cached image so we don't start with a stale image on next
// usage
mImage = nullptr;
}
mozilla::camera::StopCapture(mCapEngine, mCaptureIndex);
return NS_OK;
}
void
MediaEngineRemoteVideoSource::NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream* aSource,
TrackID aID, StreamTime aDesiredTime)
{
VideoSegment segment;
MonitorAutoLock lock(mMonitor);
StreamTime delta = aDesiredTime - aSource->GetEndOfAppendedData(aID);
if (delta > 0) {
// nullptr images are allowed
AppendToTrack(aSource, mImage, aID, delta);
}
}
int
MediaEngineRemoteVideoSource::FrameSizeChange(unsigned int w, unsigned int h,
unsigned int streams)
{
mWidth = w;
mHeight = h;
LOG(("MediaEngineRemoteVideoSource Video FrameSizeChange: %ux%u", w, h));
return 0;
}
int
MediaEngineRemoteVideoSource::DeliverFrame(unsigned char* buffer,
int size,
uint32_t time_stamp,
int64_t ntp_time,
int64_t render_time,
void *handle)
{
// Check for proper state.
if (mState != kStarted) {
LOG(("DeliverFrame: video not started"));
return 0;
}
if (mWidth*mHeight + 2*(((mWidth+1)/2)*((mHeight+1)/2)) != size) {
MOZ_ASSERT(false, "Wrong size frame in DeliverFrame!");
return 0;
}
// Create a video frame and append it to the track.
nsRefPtr<layers::Image> image = mImageContainer->CreateImage(ImageFormat::PLANAR_YCBCR);
layers::PlanarYCbCrImage* videoImage = static_cast<layers::PlanarYCbCrImage*>(image.get());
uint8_t* frame = static_cast<uint8_t*> (buffer);
const uint8_t lumaBpp = 8;
const uint8_t chromaBpp = 4;
// Take lots of care to round up!
layers::PlanarYCbCrData data;
data.mYChannel = frame;
data.mYSize = IntSize(mWidth, mHeight);
data.mYStride = (mWidth * lumaBpp + 7)/ 8;
data.mCbCrStride = (mWidth * chromaBpp + 7) / 8;
data.mCbChannel = frame + mHeight * data.mYStride;
data.mCrChannel = data.mCbChannel + ((mHeight+1)/2) * data.mCbCrStride;
data.mCbCrSize = IntSize((mWidth+1)/ 2, (mHeight+1)/ 2);
data.mPicX = 0;
data.mPicY = 0;
data.mPicSize = IntSize(mWidth, mHeight);
data.mStereoMode = StereoMode::MONO;
videoImage->SetData(data);
#ifdef DEBUG
static uint32_t frame_num = 0;
LOGFRAME(("frame %d (%dx%d); timestamp %u, ntp_time %lu, render_time %lu", frame_num++,
mWidth, mHeight, time_stamp, ntp_time, render_time));
#endif
// we don't touch anything in 'this' until here (except for snapshot,
// which has it's own lock)
MonitorAutoLock lock(mMonitor);
// implicitly releases last image
mImage = image.forget();
// Push the frame into the MSG with a minimal duration. This will likely
// mean we'll still get NotifyPull calls which will then return the same
// frame again with a longer duration. However, this means we won't
// fail to get the frame in and drop frames.
// XXX The timestamp for the frame should be based on the Capture time,
// not the MSG time, and MSG should never, ever block on a (realtime)
// video frame (or even really for streaming - audio yes, video probably no).
// Note that MediaPipeline currently ignores the timestamps from MSG
uint32_t len = mSources.Length();
for (uint32_t i = 0; i < len; i++) {
if (mSources[i]) {
AppendToTrack(mSources[i], mImage, mTrackID, 1); // shortest possible duration
}
}
return 0;
}
size_t
MediaEngineRemoteVideoSource::NumCapabilities()
{
int num = mozilla::camera::NumberOfCapabilities(mCapEngine, GetUUID().get());
if (num > 0) {
return num;
}
switch(mMediaSource) {
case dom::MediaSourceEnum::Camera:
#ifdef XP_MACOSX
// Mac doesn't support capabilities.
//
// Hardcode generic desktop capabilities modeled on OSX camera.
// Note: Values are empirically picked to be OSX friendly, as on OSX, values
// other than these cause the source to not produce.
if (mHardcodedCapabilities.IsEmpty()) {
for (int i = 0; i < 9; i++) {
webrtc::CaptureCapability c;
c.width = 1920 - i*128;
c.height = 1080 - i*72;
c.maxFPS = 30;
mHardcodedCapabilities.AppendElement(c);
}
for (int i = 0; i < 16; i++) {
webrtc::CaptureCapability c;
c.width = 640 - i*40;
c.height = 480 - i*30;
c.maxFPS = 30;
mHardcodedCapabilities.AppendElement(c);
}
}
break;
#endif
default:
webrtc::CaptureCapability c;
// The default for devices that don't return discrete capabilities: treat
// them as supporting all capabilities orthogonally. E.g. screensharing.
c.width = 0; // 0 = accept any value
c.height = 0;
c.maxFPS = 0;
mHardcodedCapabilities.AppendElement(c);
break;
}
return mHardcodedCapabilities.Length();
}
void
MediaEngineRemoteVideoSource::GetCapability(size_t aIndex,
webrtc::CaptureCapability& aOut)
{
if (!mHardcodedCapabilities.IsEmpty()) {
MediaEngineCameraVideoSource::GetCapability(aIndex, aOut);
}
mozilla::camera::GetCaptureCapability(mCapEngine,
GetUUID().get(),
aIndex,
aOut);
}
void MediaEngineRemoteVideoSource::Refresh(int aIndex) {
// NOTE: mCaptureIndex might have changed when allocated!
// Use aIndex to update information, but don't change mCaptureIndex!!
// Caller looked up this source by uniqueId, so it shouldn't change
char deviceName[kMaxDeviceNameLength];
char uniqueId[kMaxUniqueIdLength];
if (mozilla::camera::GetCaptureDevice(mCapEngine,
aIndex,
deviceName, sizeof(deviceName),
uniqueId, sizeof(uniqueId))) {
return;
}
SetName(NS_ConvertUTF8toUTF16(deviceName));
#ifdef DEBUG
MOZ_ASSERT(GetUUID().Equals(uniqueId));
#endif
}
}

View File

@ -0,0 +1,101 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set sw=2 ts=8 et ft=cpp : */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MEDIAENGINE_REMOTE_VIDEO_SOURCE_H_
#define MEDIAENGINE_REMOTE_VIDEO_SOURCE_H_
#include "prcvar.h"
#include "prthread.h"
#include "nsIThread.h"
#include "nsIRunnable.h"
#include "mozilla/Mutex.h"
#include "mozilla/Monitor.h"
#include "nsCOMPtr.h"
#include "nsThreadUtils.h"
#include "DOMMediaStream.h"
#include "nsDirectoryServiceDefs.h"
#include "nsComponentManagerUtils.h"
#include "VideoUtils.h"
#include "MediaEngineCameraVideoSource.h"
#include "VideoSegment.h"
#include "AudioSegment.h"
#include "StreamBuffer.h"
#include "MediaStreamGraph.h"
#include "MediaEngineWrapper.h"
#include "mozilla/dom/MediaStreamTrackBinding.h"
// WebRTC library includes follow
#include "webrtc/common.h"
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_render.h"
#include "CamerasChild.h"
#include "NullTransport.h"
namespace mozilla {
/**
* The WebRTC implementation of the MediaEngine interface.
*/
class MediaEngineRemoteVideoSource : public MediaEngineCameraVideoSource,
public webrtc::ExternalRenderer
{
public:
NS_DECL_THREADSAFE_ISUPPORTS
// ExternalRenderer
virtual int FrameSizeChange(unsigned int w, unsigned int h,
unsigned int streams) override;
virtual int DeliverFrame(unsigned char* buffer,
int size,
uint32_t time_stamp,
int64_t ntp_time,
int64_t render_time,
void *handle) override;
virtual bool IsTextureSupported() override { return false; };
// MediaEngineCameraVideoSource
MediaEngineRemoteVideoSource(int aIndex, mozilla::camera::CaptureEngine aCapEngine,
dom::MediaSourceEnum aMediaSource,
const char* aMonitorName = "RemoteVideo.Monitor");
virtual nsresult Allocate(const dom::MediaTrackConstraints& aConstraints,
const MediaEnginePrefs& aPrefs,
const nsString& aDeviceId) override;
virtual nsresult Deallocate() override;;
virtual nsresult Start(SourceMediaStream*, TrackID) override;
virtual nsresult Stop(SourceMediaStream*, TrackID) override;
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream* aSource,
TrackID aId,
StreamTime aDesiredTime) override;
virtual const dom::MediaSourceEnum GetMediaSource() override {
return mMediaSource;
}
void Refresh(int aIndex);
virtual void Shutdown() override;
protected:
~MediaEngineRemoteVideoSource() { Shutdown(); }
private:
// Initialize the needed Video engine interfaces.
void Init();
size_t NumCapabilities() override;
void GetCapability(size_t aIndex, webrtc::CaptureCapability& aOut) override;
dom::MediaSourceEnum mMediaSource; // source of media (camera | application | screen)
mozilla::camera::CaptureEngine mCapEngine;
};
}
#endif /* MEDIAENGINE_REMOTE_VIDEO_SOURCE_H_ */

View File

@ -1,9 +1,12 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set sw=2 ts=8 et ft=cpp : */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "nsIPrefService.h"
#include "nsIPrefBranch.h"
#include "CamerasUtils.h"
#include "CSFLog.h"
#include "prenv.h"
@ -23,6 +26,8 @@ GetUserMediaLog()
#include "ImageContainer.h"
#include "nsIComponentRegistrar.h"
#include "MediaEngineTabVideoSource.h"
#include "MediaEngineRemoteVideoSource.h"
#include "CamerasChild.h"
#include "nsITabSource.h"
#include "MediaTrackConstraints.h"
@ -42,18 +47,9 @@ GetUserMediaLog()
namespace mozilla {
MediaEngineWebRTC::MediaEngineWebRTC(MediaEnginePrefs &aPrefs)
: mMutex("mozilla::MediaEngineWebRTC")
, mScreenEngine(nullptr)
, mBrowserEngine(nullptr)
, mWinEngine(nullptr)
, mAppEngine(nullptr)
, mVideoEngine(nullptr)
, mVoiceEngine(nullptr)
, mVideoEngineInit(false)
, mAudioEngineInit(false)
, mScreenEngineInit(false)
, mBrowserEngineInit(false)
, mAppEngineInit(false)
: mMutex("mozilla::MediaEngineWebRTC"),
mVoiceEngine(nullptr),
mAudioEngineInit(false)
{
#ifndef MOZ_B2G_CAMERA
nsCOMPtr<nsIComponentRegistrar> compMgr;
@ -122,11 +118,7 @@ MediaEngineWebRTC::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
return;
#else
ScopedCustomReleasePtr<webrtc::ViEBase> ptrViEBase;
ScopedCustomReleasePtr<webrtc::ViECapture> ptrViECapture;
webrtc::Config configSet;
webrtc::VideoEngine *videoEngine = nullptr;
bool *videoEngineInit = nullptr;
mozilla::camera::CaptureEngine capEngine = mozilla::camera::InvalidEngine;
#ifdef MOZ_WIDGET_ANDROID
// get the JVM
@ -142,74 +134,24 @@ MediaEngineWebRTC::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
switch (aMediaSource) {
case dom::MediaSourceEnum::Window:
mWinEngineConfig.Set<webrtc::CaptureDeviceInfo>(
new webrtc::CaptureDeviceInfo(webrtc::CaptureDeviceType::Window));
if (!mWinEngine) {
if (!(mWinEngine = webrtc::VideoEngine::Create(mWinEngineConfig))) {
return;
}
}
videoEngine = mWinEngine;
videoEngineInit = &mWinEngineInit;
capEngine = mozilla::camera::WinEngine;
break;
case dom::MediaSourceEnum::Application:
mAppEngineConfig.Set<webrtc::CaptureDeviceInfo>(
new webrtc::CaptureDeviceInfo(webrtc::CaptureDeviceType::Application));
if (!mAppEngine) {
if (!(mAppEngine = webrtc::VideoEngine::Create(mAppEngineConfig))) {
return;
}
}
videoEngine = mAppEngine;
videoEngineInit = &mAppEngineInit;
capEngine = mozilla::camera::AppEngine;
break;
case dom::MediaSourceEnum::Screen:
mScreenEngineConfig.Set<webrtc::CaptureDeviceInfo>(
new webrtc::CaptureDeviceInfo(webrtc::CaptureDeviceType::Screen));
if (!mScreenEngine) {
if (!(mScreenEngine = webrtc::VideoEngine::Create(mScreenEngineConfig))) {
return;
}
}
videoEngine = mScreenEngine;
videoEngineInit = &mScreenEngineInit;
capEngine = mozilla::camera::ScreenEngine;
break;
case dom::MediaSourceEnum::Browser:
mBrowserEngineConfig.Set<webrtc::CaptureDeviceInfo>(
new webrtc::CaptureDeviceInfo(webrtc::CaptureDeviceType::Browser));
if (!mBrowserEngine) {
if (!(mBrowserEngine = webrtc::VideoEngine::Create(mBrowserEngineConfig))) {
return;
}
}
videoEngine = mBrowserEngine;
videoEngineInit = &mBrowserEngineInit;
capEngine = mozilla::camera::BrowserEngine;
break;
case dom::MediaSourceEnum::Camera:
// fall through
default:
if (!mVideoEngine) {
if (!(mVideoEngine = webrtc::VideoEngine::Create())) {
return;
}
}
videoEngine = mVideoEngine;
videoEngineInit = &mVideoEngineInit;
capEngine = mozilla::camera::CameraEngine;
break;
default:
// BOOM
MOZ_CRASH("No valid video engine");
break;
}
ptrViEBase = webrtc::ViEBase::GetInterface(videoEngine);
if (!ptrViEBase) {
return;
}
if (ptrViEBase->Init() < 0) {
return;
}
*videoEngineInit = true;
ptrViECapture = webrtc::ViECapture::GetInterface(videoEngine);
if (!ptrViECapture) {
return;
}
/**
@ -220,7 +162,8 @@ MediaEngineWebRTC::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
* for a given instance of the engine. Likewise, if a device was plugged out,
* mVideoSources must be updated.
*/
int num = ptrViECapture->NumberOfCaptureDevices();
int num;
num = mozilla::camera::NumberOfCaptureDevices(capEngine);
if (num <= 0) {
return;
}
@ -232,27 +175,29 @@ MediaEngineWebRTC::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
// paranoia
deviceName[0] = '\0';
uniqueId[0] = '\0';
int error = ptrViECapture->GetCaptureDevice(i, deviceName,
sizeof(deviceName), uniqueId,
sizeof(uniqueId));
int error;
error = mozilla::camera::GetCaptureDevice(capEngine,
i, deviceName,
sizeof(deviceName), uniqueId,
sizeof(uniqueId));
if (error) {
LOG((" VieCapture:GetCaptureDevice: Failed %d",
ptrViEBase->LastError() ));
LOG(("camera:GetCaptureDevice: Failed %d", error ));
continue;
}
#ifdef DEBUG
LOG((" Capture Device Index %d, Name %s", i, deviceName));
webrtc::CaptureCapability cap;
int numCaps = ptrViECapture->NumberOfCapabilities(uniqueId,
MediaEngineSource::kMaxUniqueIdLength);
int numCaps = mozilla::camera::NumberOfCapabilities(capEngine,
uniqueId);
LOG(("Number of Capabilities %d", numCaps));
for (int j = 0; j < numCaps; j++) {
if (ptrViECapture->GetCaptureCapability(uniqueId,
MediaEngineSource::kMaxUniqueIdLength,
j, cap ) != 0 ) {
break;
if (mozilla::camera::GetCaptureCapability(capEngine,
uniqueId,
j, cap ) != 0 ) {
break;
}
LOG(("type=%d width=%d height=%d maxFPS=%d",
cap.rawType, cap.width, cap.height, cap.maxFPS ));
@ -269,10 +214,10 @@ MediaEngineWebRTC::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource,
NS_ConvertUTF8toUTF16 uuid(uniqueId);
if (mVideoSources.Get(uuid, getter_AddRefs(vSource))) {
// We've already seen this device, just refresh and append.
static_cast<MediaEngineWebRTCVideoSource*>(vSource.get())->Refresh(i);
static_cast<MediaEngineRemoteVideoSource*>(vSource.get())->Refresh(i);
aVSources->AppendElement(vSource.get());
} else {
vSource = new MediaEngineWebRTCVideoSource(videoEngine, i, aMediaSource);
vSource = new MediaEngineRemoteVideoSource(i, capEngine, aMediaSource);
mVideoSources.Put(uuid, vSource); // Hashtable takes ownership.
aVSources->AppendElement(vSource);
}
@ -417,40 +362,14 @@ MediaEngineWebRTC::Shutdown()
mVideoSources.Clear();
mAudioSources.Clear();
// Clear callbacks before we go away since the engines may outlive us
if (mVideoEngine) {
mVideoEngine->SetTraceCallback(nullptr);
webrtc::VideoEngine::Delete(mVideoEngine);
}
if (mScreenEngine) {
mScreenEngine->SetTraceCallback(nullptr);
webrtc::VideoEngine::Delete(mScreenEngine);
}
if (mWinEngine) {
mWinEngine->SetTraceCallback(nullptr);
webrtc::VideoEngine::Delete(mWinEngine);
}
if (mBrowserEngine) {
mBrowserEngine->SetTraceCallback(nullptr);
webrtc::VideoEngine::Delete(mBrowserEngine);
}
if (mAppEngine) {
mAppEngine->SetTraceCallback(nullptr);
webrtc::VideoEngine::Delete(mAppEngine);
}
if (mVoiceEngine) {
mVoiceEngine->SetTraceCallback(nullptr);
webrtc::VoiceEngine::Delete(mVoiceEngine);
}
mVideoEngine = nullptr;
mVoiceEngine = nullptr;
mScreenEngine = nullptr;
mWinEngine = nullptr;
mBrowserEngine = nullptr;
mAppEngine = nullptr;
mozilla::camera::Shutdown();
if (mThread) {
mThread->Shutdown();

View File

@ -48,91 +48,13 @@
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_render.h"
#include "webrtc/video_engine/include/vie_capture.h"
#include "CamerasChild.h"
#include "NullTransport.h"
#include "AudioOutputObserver.h"
namespace mozilla {
/**
* The WebRTC implementation of the MediaEngine interface.
*/
class MediaEngineWebRTCVideoSource : public MediaEngineCameraVideoSource
, public webrtc::ExternalRenderer
{
public:
NS_DECL_THREADSAFE_ISUPPORTS
// ViEExternalRenderer.
virtual int FrameSizeChange(unsigned int w, unsigned int h, unsigned int streams) override;
virtual int DeliverFrame(unsigned char* buffer,
int size,
uint32_t time_stamp,
int64_t ntp_time_ms,
int64_t render_time,
void *handle) override;
/**
* Does DeliverFrame() support a null buffer and non-null handle
* (video texture)?
* XXX Investigate! Especially for Android/B2G
*/
virtual bool IsTextureSupported() override { return false; }
MediaEngineWebRTCVideoSource(webrtc::VideoEngine* aVideoEnginePtr, int aIndex,
dom::MediaSourceEnum aMediaSource = dom::MediaSourceEnum::Camera)
: MediaEngineCameraVideoSource(aIndex, "WebRTCCamera.Monitor")
, mVideoEngine(aVideoEnginePtr)
, mMinFps(-1)
, mMediaSource(aMediaSource)
{
MOZ_ASSERT(aVideoEnginePtr);
MOZ_ASSERT(aMediaSource != dom::MediaSourceEnum::Other);
Init();
}
virtual nsresult Allocate(const dom::MediaTrackConstraints& aConstraints,
const MediaEnginePrefs& aPrefs,
const nsString& aDeviceId) override;
virtual nsresult Deallocate() override;
virtual nsresult Start(SourceMediaStream*, TrackID) override;
virtual nsresult Stop(SourceMediaStream*, TrackID) override;
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream* aSource,
TrackID aId,
StreamTime aDesiredTime) override;
virtual const dom::MediaSourceEnum GetMediaSource() override {
return mMediaSource;
}
virtual nsresult TakePhoto(PhotoCallback* aCallback) override
{
return NS_ERROR_NOT_IMPLEMENTED;
}
void Refresh(int aIndex);
virtual void Shutdown() override;
protected:
~MediaEngineWebRTCVideoSource() { Shutdown(); }
private:
// Initialize the needed Video engine interfaces.
void Init();
// Engine variables.
webrtc::VideoEngine* mVideoEngine; // Weak reference, don't free.
ScopedCustomReleasePtr<webrtc::ViEBase> mViEBase;
ScopedCustomReleasePtr<webrtc::ViECapture> mViECapture;
ScopedCustomReleasePtr<webrtc::ViERender> mViERender;
int mMinFps; // Min rate we want to accept
dom::MediaSourceEnum mMediaSource; // source of media (camera | application | screen)
size_t NumCapabilities() override;
void GetCapability(size_t aIndex, webrtc::CaptureCapability& aOut) override;
};
class MediaEngineWebRTCAudioCaptureSource : public MediaEngineAudioSource
{
public:
@ -266,10 +188,10 @@ public:
int16_t audio10ms[], int length,
int samplingFreq, bool isStereo) override;
NS_DECL_THREADSAFE_ISUPPORTS
virtual void Shutdown() override;
NS_DECL_THREADSAFE_ISUPPORTS
protected:
~MediaEngineWebRTCMicrophoneSource() { Shutdown(); }
@ -330,29 +252,11 @@ private:
nsCOMPtr<nsIThread> mThread;
// gUM runnables can e.g. Enumerate from multiple threads
Mutex mMutex;
// protected with mMutex:
webrtc::VideoEngine* mScreenEngine;
webrtc::VideoEngine* mBrowserEngine;
webrtc::VideoEngine* mWinEngine;
webrtc::VideoEngine* mAppEngine;
webrtc::VideoEngine* mVideoEngine;
webrtc::VoiceEngine* mVoiceEngine;
// specialized configurations
webrtc::Config mAppEngineConfig;
webrtc::Config mWinEngineConfig;
webrtc::Config mScreenEngineConfig;
webrtc::Config mBrowserEngineConfig;
// Need this to avoid unneccesary WebRTC calls while enumerating.
bool mVideoEngineInit;
bool mAudioEngineInit;
bool mScreenEngineInit;
bool mBrowserEngineInit;
bool mWinEngineInit;
bool mAppEngineInit;
bool mHasTabVideoSource;
// Store devices we've already seen in a hashtable for quick return.

View File

@ -1,449 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "MediaEngineWebRTC.h"
#include "Layers.h"
#include "ImageTypes.h"
#include "ImageContainer.h"
#include "mozilla/layers/GrallocTextureClient.h"
#include "nsMemory.h"
#include "mtransport/runnable_utils.h"
#include "MediaTrackConstraints.h"
namespace mozilla {
using namespace mozilla::gfx;
using dom::ConstrainLongRange;
using dom::ConstrainDoubleRange;
using dom::MediaTrackConstraintSet;
extern PRLogModuleInfo* GetMediaManagerLog();
#define LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg)
#define LOGFRAME(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Verbose, msg)
/**
* Webrtc video source.
*/
NS_IMPL_ISUPPORTS0(MediaEngineWebRTCVideoSource)
int
MediaEngineWebRTCVideoSource::FrameSizeChange(
unsigned int w, unsigned int h, unsigned int streams)
{
mWidth = w;
mHeight = h;
LOG(("Video FrameSizeChange: %ux%u", w, h));
return 0;
}
// ViEExternalRenderer Callback. Process every incoming frame here.
int
MediaEngineWebRTCVideoSource::DeliverFrame(
unsigned char* buffer, int size, uint32_t time_stamp,
int64_t ntp_time_ms, int64_t render_time, void *handle)
{
// Check for proper state.
if (mState != kStarted) {
LOG(("DeliverFrame: video not started"));
return 0;
}
if (mWidth*mHeight + 2*(((mWidth+1)/2)*((mHeight+1)/2)) != size) {
MOZ_ASSERT(false, "Wrong size frame in DeliverFrame!");
return 0;
}
// Create a video frame and append it to the track.
nsRefPtr<layers::Image> image = mImageContainer->CreateImage(ImageFormat::PLANAR_YCBCR);
layers::PlanarYCbCrImage* videoImage = static_cast<layers::PlanarYCbCrImage*>(image.get());
uint8_t* frame = static_cast<uint8_t*> (buffer);
const uint8_t lumaBpp = 8;
const uint8_t chromaBpp = 4;
// Take lots of care to round up!
layers::PlanarYCbCrData data;
data.mYChannel = frame;
data.mYSize = IntSize(mWidth, mHeight);
data.mYStride = (mWidth * lumaBpp + 7)/ 8;
data.mCbCrStride = (mWidth * chromaBpp + 7) / 8;
data.mCbChannel = frame + mHeight * data.mYStride;
data.mCrChannel = data.mCbChannel + ((mHeight+1)/2) * data.mCbCrStride;
data.mCbCrSize = IntSize((mWidth+1)/ 2, (mHeight+1)/ 2);
data.mPicX = 0;
data.mPicY = 0;
data.mPicSize = IntSize(mWidth, mHeight);
data.mStereoMode = StereoMode::MONO;
videoImage->SetData(data);
#ifdef DEBUG
static uint32_t frame_num = 0;
LOGFRAME(("frame %d (%dx%d); timestamp %u, ntp_time %lu, render_time %lu", frame_num++,
mWidth, mHeight, time_stamp, ntp_time_ms, render_time));
#endif
// we don't touch anything in 'this' until here (except for snapshot,
// which has it's own lock)
MonitorAutoLock lock(mMonitor);
// implicitly releases last image
mImage = image.forget();
// Push the frame into the MSG with a minimal duration. This will likely
// mean we'll still get NotifyPull calls which will then return the same
// frame again with a longer duration. However, this means we won't
// fail to get the frame in and drop frames.
// XXX The timestamp for the frame should be based on the Capture time,
// not the MSG time, and MSG should never, ever block on a (realtime)
// video frame (or even really for streaming - audio yes, video probably no).
// Note that MediaPipeline currently ignores the timestamps from MSG
uint32_t len = mSources.Length();
for (uint32_t i = 0; i < len; i++) {
if (mSources[i]) {
AppendToTrack(mSources[i], mImage, mTrackID, 1); // shortest possible duration
}
}
return 0;
}
// Called if the graph thinks it's running out of buffered video; repeat
// the last frame for whatever minimum period it think it needs. Note that
// this means that no *real* frame can be inserted during this period.
void
MediaEngineWebRTCVideoSource::NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream* aSource,
TrackID aID,
StreamTime aDesiredTime)
{
VideoSegment segment;
MonitorAutoLock lock(mMonitor);
// B2G does AddTrack, but holds kStarted until the hardware changes state.
// So mState could be kReleased here. We really don't care about the state,
// though.
StreamTime delta = aDesiredTime - aSource->GetEndOfAppendedData(aID);
LOGFRAME(("NotifyPull, desired = %ld, delta = %ld %s", (int64_t) aDesiredTime,
(int64_t) delta, mImage.get() ? "" : "<null>"));
// Bug 846188 We may want to limit incoming frames to the requested frame rate
// mFps - if you want 30FPS, and the camera gives you 60FPS, this could
// cause issues.
// We may want to signal if the actual frame rate is below mMinFPS -
// cameras often don't return the requested frame rate especially in low
// light; we should consider surfacing this so that we can switch to a
// lower resolution (which may up the frame rate)
// Don't append if we've already provided a frame that supposedly goes past the current aDesiredTime
// Doing so means a negative delta and thus messes up handling of the graph
if (delta > 0) {
// nullptr images are allowed
AppendToTrack(aSource, mImage, aID, delta);
}
}
size_t
MediaEngineWebRTCVideoSource::NumCapabilities()
{
int num = mViECapture->NumberOfCapabilities(GetUUID().get(), kMaxUniqueIdLength);
if (num > 0) {
return num;
}
switch (mMediaSource) {
case dom::MediaSourceEnum::Camera:
#ifdef XP_MACOSX
// Mac doesn't support capabilities.
//
// Hardcode generic desktop capabilities modeled on OSX camera.
// Note: Values are empirically picked to be OSX friendly, as on OSX,
// values other than these cause the source to not produce.
if (mHardcodedCapabilities.IsEmpty()) {
for (int i = 0; i < 9; i++) {
webrtc::CaptureCapability c;
c.width = 1920 - i*128;
c.height = 1080 - i*72;
c.maxFPS = 30;
mHardcodedCapabilities.AppendElement(c);
}
for (int i = 0; i < 16; i++) {
webrtc::CaptureCapability c;
c.width = 640 - i*40;
c.height = 480 - i*30;
c.maxFPS = 30;
mHardcodedCapabilities.AppendElement(c);
}
}
break;
#endif
default:
// The default for devices that don't return discrete capabilities: treat
// them as supporting all capabilities orthogonally. E.g. screensharing.
webrtc::CaptureCapability c;
c.width = 0; // 0 = accept any value
c.height = 0;
c.maxFPS = 0;
mHardcodedCapabilities.AppendElement(c);
break;
}
return mHardcodedCapabilities.Length();
}
void
MediaEngineWebRTCVideoSource::GetCapability(size_t aIndex,
webrtc::CaptureCapability& aOut)
{
if (!mHardcodedCapabilities.IsEmpty()) {
MediaEngineCameraVideoSource::GetCapability(aIndex, aOut);
}
mViECapture->GetCaptureCapability(GetUUID().get(), kMaxUniqueIdLength, aIndex, aOut);
}
nsresult
MediaEngineWebRTCVideoSource::Allocate(const dom::MediaTrackConstraints &aConstraints,
const MediaEnginePrefs &aPrefs,
const nsString& aDeviceId)
{
LOG((__FUNCTION__));
if (mState == kReleased && mInitDone) {
// Note: if shared, we don't allow a later opener to affect the resolution.
// (This may change depending on spec changes for Constraints/settings)
if (!ChooseCapability(aConstraints, aPrefs, aDeviceId)) {
return NS_ERROR_UNEXPECTED;
}
if (mViECapture->AllocateCaptureDevice(GetUUID().get(),
kMaxUniqueIdLength, mCaptureIndex)) {
return NS_ERROR_FAILURE;
}
mState = kAllocated;
LOG(("Video device %d allocated", mCaptureIndex));
} else if (MOZ_LOG_TEST(GetMediaManagerLog(), LogLevel::Debug)) {
MonitorAutoLock lock(mMonitor);
if (mSources.IsEmpty()) {
LOG(("Video device %d reallocated", mCaptureIndex));
} else {
LOG(("Video device %d allocated shared", mCaptureIndex));
}
}
return NS_OK;
}
nsresult
MediaEngineWebRTCVideoSource::Deallocate()
{
LOG((__FUNCTION__));
bool empty;
{
MonitorAutoLock lock(mMonitor);
empty = mSources.IsEmpty();
}
if (empty) {
// If empty, no callbacks to deliver data should be occuring
if (mState != kStopped && mState != kAllocated) {
return NS_ERROR_FAILURE;
}
#ifdef XP_MACOSX
// Bug 829907 - on mac, in shutdown, the mainthread stops processing
// 'native' events, and the QTKit code uses events to the main native CFRunLoop
// in order to provide thread safety. In order to avoid this locking us up,
// release the ViE capture device synchronously on MainThread (so the native
// event isn't needed).
// XXX Note if MainThread Dispatch()es NS_DISPATCH_SYNC to us we can deadlock.
// XXX It might be nice to only do this if we're in shutdown... Hard to be
// sure when that is though.
// Thread safety: a) we call this synchronously, and don't use ViECapture from
// another thread anywhere else, b) ViEInputManager::DestroyCaptureDevice() grabs
// an exclusive object lock and deletes it in a critical section, so all in all
// this should be safe threadwise.
NS_DispatchToMainThread(WrapRunnable(mViECapture.get(),
&webrtc::ViECapture::ReleaseCaptureDevice,
mCaptureIndex),
NS_DISPATCH_SYNC);
#else
mViECapture->ReleaseCaptureDevice(mCaptureIndex);
#endif
mState = kReleased;
LOG(("Video device %d deallocated", mCaptureIndex));
} else {
LOG(("Video device %d deallocated but still in use", mCaptureIndex));
}
return NS_OK;
}
nsresult
MediaEngineWebRTCVideoSource::Start(SourceMediaStream* aStream, TrackID aID)
{
LOG((__FUNCTION__));
int error = 0;
if (!mInitDone || !aStream) {
return NS_ERROR_FAILURE;
}
{
MonitorAutoLock lock(mMonitor);
mSources.AppendElement(aStream);
}
aStream->AddTrack(aID, 0, new VideoSegment(), SourceMediaStream::ADDTRACK_QUEUED);
if (mState == kStarted) {
return NS_OK;
}
mImageContainer = layers::LayerManager::CreateImageContainer();
mState = kStarted;
mTrackID = aID;
error = mViERender->AddRenderer(mCaptureIndex, webrtc::kVideoI420, (webrtc::ExternalRenderer*)this);
if (error == -1) {
return NS_ERROR_FAILURE;
}
error = mViERender->StartRender(mCaptureIndex);
if (error == -1) {
return NS_ERROR_FAILURE;
}
if (mViECapture->StartCapture(mCaptureIndex, mCapability) < 0) {
return NS_ERROR_FAILURE;
}
return NS_OK;
}
nsresult
MediaEngineWebRTCVideoSource::Stop(SourceMediaStream *aSource, TrackID aID)
{
LOG((__FUNCTION__));
{
MonitorAutoLock lock(mMonitor);
if (!mSources.RemoveElement(aSource)) {
// Already stopped - this is allowed
return NS_OK;
}
aSource->EndTrack(aID);
if (!mSources.IsEmpty()) {
return NS_OK;
}
if (mState != kStarted) {
return NS_ERROR_FAILURE;
}
mState = kStopped;
// Drop any cached image so we don't start with a stale image on next
// usage
mImage = nullptr;
}
mViERender->StopRender(mCaptureIndex);
mViERender->RemoveRenderer(mCaptureIndex);
mViECapture->StopCapture(mCaptureIndex);
return NS_OK;
}
void
MediaEngineWebRTCVideoSource::Init()
{
// fix compile warning for these being unused. (remove once used)
(void) mFps;
(void) mMinFps;
LOG((__FUNCTION__));
if (mVideoEngine == nullptr) {
return;
}
mViEBase = webrtc::ViEBase::GetInterface(mVideoEngine);
if (mViEBase == nullptr) {
return;
}
// Get interfaces for capture, render for now
mViECapture = webrtc::ViECapture::GetInterface(mVideoEngine);
mViERender = webrtc::ViERender::GetInterface(mVideoEngine);
if (mViECapture == nullptr || mViERender == nullptr) {
return;
}
char deviceName[kMaxDeviceNameLength];
char uniqueId[kMaxUniqueIdLength];
if (mViECapture->GetCaptureDevice(mCaptureIndex,
deviceName, kMaxDeviceNameLength,
uniqueId, kMaxUniqueIdLength)) {
return;
}
SetName(NS_ConvertUTF8toUTF16(deviceName));
SetUUID(uniqueId);
mInitDone = true;
}
void
MediaEngineWebRTCVideoSource::Shutdown()
{
LOG((__FUNCTION__));
if (!mInitDone) {
return;
}
if (mState == kStarted) {
SourceMediaStream *source;
bool empty;
while (1) {
{
MonitorAutoLock lock(mMonitor);
empty = mSources.IsEmpty();
if (empty) {
break;
}
source = mSources[0];
}
Stop(source, kVideoTrack); // XXX change to support multiple tracks
}
MOZ_ASSERT(mState == kStopped);
}
if (mState == kAllocated || mState == kStopped) {
Deallocate();
}
mViECapture = nullptr;
mViERender = nullptr;
mViEBase = nullptr;
mState = kReleased;
mInitDone = false;
}
void MediaEngineWebRTCVideoSource::Refresh(int aIndex) {
// NOTE: mCaptureIndex might have changed when allocated!
// Use aIndex to update information, but don't change mCaptureIndex!!
// Caller looked up this source by uniqueId, so it shouldn't change
char deviceName[kMaxDeviceNameLength];
char uniqueId[kMaxUniqueIdLength];
if (mViECapture->GetCaptureDevice(aIndex,
deviceName, sizeof(deviceName),
uniqueId, sizeof(uniqueId))) {
return;
}
SetName(NS_ConvertUTF8toUTF16(deviceName));
#ifdef DEBUG
MOZ_ASSERT(GetUUID().Equals(uniqueId));
#endif
}
} // namespace mozilla

View File

@ -21,13 +21,14 @@ EXPORTS += [
if CONFIG['MOZ_WEBRTC']:
EXPORTS += ['AudioOutputObserver.h',
'MediaEngineRemoteVideoSource.h',
'MediaEngineWebRTC.h']
EXPORTS.mozilla.dom += [ 'RTCIdentityProviderRegistrar.h' ]
UNIFIED_SOURCES += [
'MediaEngineCameraVideoSource.cpp',
'MediaEngineRemoteVideoSource.cpp',
'MediaEngineTabVideoSource.cpp',
'MediaEngineWebRTCAudio.cpp',
'MediaEngineWebRTCVideo.cpp',
'MediaTrackConstraints.cpp',
'RTCCertificate.cpp',
'RTCIdentityProviderRegistrar.cpp',
@ -78,13 +79,14 @@ include('/ipc/chromium/chromium-config.mozbuild')
# defined, which complains about important MOZ_EXPORT attributes for
# android API types
if CONFIG['GNU_CC'] or CONFIG['CLANG_CL']:
CXXFLAGS += [
'-Wno-error=attributes'
]
CXXFLAGS += [
'-Wno-error=attributes'
]
FINAL_LIBRARY = 'xul'
if CONFIG['_MSC_VER']:
CXXFLAGS += [
'-wd4275', # non dll-interface class used as base for dll-interface class
]
CXXFLAGS += [
'-wd4275', # non dll-interface class used as base for dll-interface class
]
DEFINES['__PRETTY_FUNCTION__'] = '__FUNCSIG__'

View File

@ -8,6 +8,8 @@
#include "BroadcastChannelChild.h"
#include "ServiceWorkerManagerChild.h"
#include "FileDescriptorSetChild.h"
#include "CamerasChild.h"
#include "mozilla/media/MediaChild.h"
#include "mozilla/Assertions.h"
#include "mozilla/dom/PBlobChild.h"
#include "mozilla/dom/asmjscache/AsmJSCache.h"
@ -265,6 +267,23 @@ BackgroundChildImpl::DeallocPBroadcastChannelChild(
return true;
}
camera::PCamerasChild*
BackgroundChildImpl::AllocPCamerasChild()
{
nsRefPtr<camera::CamerasChild> agent =
new camera::CamerasChild();
return agent.forget().take();
}
bool
BackgroundChildImpl::DeallocPCamerasChild(camera::PCamerasChild *aActor)
{
nsRefPtr<camera::CamerasChild> child =
dont_AddRef(static_cast<camera::CamerasChild*>(aActor));
MOZ_ASSERT(aActor);
return true;
}
// -----------------------------------------------------------------------------
// ServiceWorkerManager
// -----------------------------------------------------------------------------

View File

@ -71,6 +71,12 @@ protected:
virtual bool
DeallocPFileDescriptorSetChild(PFileDescriptorSetChild* aActor) override;
virtual PCamerasChild*
AllocPCamerasChild() override;
virtual bool
DeallocPCamerasChild(PCamerasChild* aActor) override;
virtual PVsyncChild*
AllocPVsyncChild() override;

View File

@ -6,6 +6,8 @@
#include "BroadcastChannelParent.h"
#include "FileDescriptorSetParent.h"
#include "CamerasParent.h"
#include "mozilla/media/MediaParent.h"
#include "mozilla/AppProcessChecker.h"
#include "mozilla/Assertions.h"
#include "mozilla/dom/ContentParent.h"
@ -282,6 +284,29 @@ BackgroundParentImpl::DeallocPVsyncParent(PVsyncParent* aActor)
return true;
}
camera::PCamerasParent*
BackgroundParentImpl::AllocPCamerasParent()
{
AssertIsInMainProcess();
AssertIsOnBackgroundThread();
nsRefPtr<mozilla::camera::CamerasParent> actor =
mozilla::camera::CamerasParent::Create();
return actor.forget().take();
}
bool
BackgroundParentImpl::DeallocPCamerasParent(camera::PCamerasParent *aActor)
{
AssertIsInMainProcess();
AssertIsOnBackgroundThread();
MOZ_ASSERT(aActor);
nsRefPtr<mozilla::camera::CamerasParent> actor =
dont_AddRef(static_cast<mozilla::camera::CamerasParent*>(aActor));
return true;
}
namespace {
class InitUDPSocketParentCallback final : public nsRunnable

View File

@ -101,6 +101,12 @@ protected:
virtual bool
DeallocPServiceWorkerManagerParent(PServiceWorkerManagerParent* aActor) override;
virtual PCamerasParent*
AllocPCamerasParent() override;
virtual bool
DeallocPCamerasParent(PCamerasParent* aActor) override;
virtual bool
RecvShutdownServiceWorkerRegistrar() override;

View File

@ -12,6 +12,7 @@ include protocol PCacheStorage;
include protocol PCacheStreamControl;
include protocol PFileDescriptorSet;
include protocol PMessagePort;
include protocol PCameras;
include protocol PNuwa;
include protocol PServiceWorkerManager;
include protocol PUDPSocket;
@ -47,6 +48,7 @@ sync protocol PBackground
manages PCacheStreamControl;
manages PFileDescriptorSet;
manages PMessagePort;
manages PCameras;
manages PNuwa;
manages PServiceWorkerManager;
manages PUDPSocket;
@ -60,6 +62,8 @@ parent:
PVsync();
PCameras();
PUDPSocket(OptionalPrincipalInfo pInfo, nsCString filter);
PBroadcastChannel(PrincipalInfo pInfo, nsCString origin, nsString channel,
bool privateBrowsing);

View File

@ -140,6 +140,7 @@ LOCAL_INCLUDES += [
'/dom/broadcastchannel',
'/dom/indexedDB',
'/dom/workers',
'/media/webrtc/trunk',
'/xpcom/build',
]