Bug 938034 - Enable gonk camera recording callback. r=roc

This commit is contained in:
Alfredo Yang 2014-12-18 01:00:00 -05:00
parent 01779d04ac
commit 833002460c
7 changed files with 195 additions and 21 deletions

View File

@ -903,6 +903,12 @@ nsGonkCameraControl::SetThumbnailSizeImpl(const Size& aSize)
return SetAndPush(CAMERA_PARAM_THUMBNAILSIZE, size);
}
android::sp<android::GonkCameraHardware>
nsGonkCameraControl::GetCameraHw()
{
return mCameraHw;
}
nsresult
nsGonkCameraControl::SetThumbnailSize(const Size& aSize)
{

View File

@ -32,6 +32,7 @@ namespace android {
class GonkCameraHardware;
class MediaProfiles;
class GonkRecorder;
class GonkCameraSource;
}
namespace mozilla {
@ -154,6 +155,9 @@ protected:
nsresult UpdateThumbnailSize();
nsresult SetThumbnailSizeImpl(const Size& aSize);
friend class android::GonkCameraSource;
android::sp<android::GonkCameraHardware> GetCameraHw();
int32_t RationalizeRotation(int32_t aRotation);
uint32_t mCameraId;

View File

@ -46,6 +46,7 @@
#include "GonkCameraSource.h"
#include "GonkCameraListener.h"
#include "GonkCameraHwMgr.h"
#include "ICameraControl.h"
using namespace mozilla;
@ -157,6 +158,16 @@ GonkCameraSource *GonkCameraSource::Create(
return source;
}
GonkCameraSource *GonkCameraSource::Create(
ICameraControl* aControl,
Size videoSize,
int32_t frameRate)
{
mozilla::nsGonkCameraControl* control =
static_cast<mozilla::nsGonkCameraControl*>(aControl);
return Create(control->GetCameraHw(), videoSize, frameRate, false);
}
GonkCameraSource::GonkCameraSource(
const sp<GonkCameraHardware>& aCameraHw,
Size videoSize,
@ -596,6 +607,10 @@ status_t GonkCameraSource::reset() {
}
releaseCamera();
if (mDirectBufferListener.get()) {
mDirectBufferListener = nullptr;
}
if (mCollectStats) {
CS_LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
@ -652,6 +667,14 @@ void GonkCameraSource::signalBufferReturned(MediaBuffer *buffer) {
CHECK(!"signalBufferReturned: bogus buffer");
}
status_t GonkCameraSource::AddDirectBufferListener(DirectBufferListener* aListener) {
if (mDirectBufferListener.get()) {
return UNKNOWN_ERROR;
}
mDirectBufferListener = aListener;
return OK;
}
status_t GonkCameraSource::read(
MediaBuffer **buffer, const ReadOptions *options) {
CS_LOGV("read");
@ -761,6 +784,15 @@ void GonkCameraSource::dataCallbackTimestamp(int64_t timestampUs,
if(prevRateLimit != rateLimit) {
mCameraHw->OnRateLimitPreview(rateLimit);
}
if (mDirectBufferListener.get()) {
MediaBuffer* mediaBuffer;
if (read(&mediaBuffer) == OK) {
mDirectBufferListener->BufferAvailable(mediaBuffer);
// read() calls MediaBuffer->add_ref() so it needs to be released here.
mediaBuffer->release();
}
}
}
bool GonkCameraSource::isMetaDataStoredInVideoBuffers() const {

View File

@ -27,6 +27,10 @@
#include "GonkCameraHwMgr.h"
namespace mozilla {
class ICameraControl;
}
namespace android {
class IMemory;
@ -39,6 +43,10 @@ public:
int32_t frameRate,
bool storeMetaDataInVideoBuffers = false);
static GonkCameraSource *Create(mozilla::ICameraControl* aControl,
Size videoSize,
int32_t frameRate);
virtual ~GonkCameraSource();
virtual status_t start(MetaData *params = NULL);
@ -75,6 +83,24 @@ public:
virtual void signalBufferReturned(MediaBuffer* buffer);
/**
* It sends recording frames to listener directly in the same thread.
* Because recording frame is critical resource and it should not be
* propagated to other thread as much as possible or there could be frame
* rate jitter due to camera HAL waiting for resource.
*/
class DirectBufferListener : public RefBase {
public:
DirectBufferListener() {};
virtual status_t BufferAvailable(MediaBuffer* aBuffer) = 0;
protected:
virtual ~DirectBufferListener() {}
};
status_t AddDirectBufferListener(DirectBufferListener* aListener);
protected:
enum CameraFlags {
@ -136,6 +162,7 @@ private:
bool mCollectStats;
bool mIsMetaDataStoredInVideoBuffers;
sp<GonkCameraHardware> mCameraHw;
sp<DirectBufferListener> mDirectBufferListener;
void releaseQueuedFrames();
void releaseOneRecordingFrame(const sp<IMemory>& frame);

View File

@ -15,11 +15,13 @@
#include "libyuv.h"
#include "mtransport/runnable_utils.h"
#include "GonkCameraImage.h"
namespace mozilla {
using namespace mozilla::dom;
using namespace mozilla::gfx;
using namespace android;
#ifdef PR_LOGGING
extern PRLogModuleInfo* GetMediaManagerLog();
@ -30,6 +32,29 @@ extern PRLogModuleInfo* GetMediaManagerLog();
#define LOGFRAME(msg)
#endif
class MediaBufferListener : public GonkCameraSource::DirectBufferListener {
public:
MediaBufferListener(MediaEngineGonkVideoSource* aMediaEngine)
: mMediaEngine(aMediaEngine)
{
}
status_t BufferAvailable(MediaBuffer* aBuffer)
{
nsresult rv = mMediaEngine->OnNewMediaBufferFrame(aBuffer);
if (NS_SUCCEEDED(rv)) {
return OK;
}
return UNKNOWN_ERROR;
}
~MediaBufferListener()
{
}
nsRefPtr<MediaEngineGonkVideoSource> mMediaEngine;
};
#define WEBRTC_GONK_VIDEO_SOURCE_POOL_BUFFERS 10
// We are subclassed from CameraControlListener, which implements a
@ -168,6 +193,46 @@ MediaEngineGonkVideoSource::Start(SourceMediaStream* aStream, TrackID aID)
return NS_ERROR_FAILURE;
}
if (NS_FAILED(InitDirectMediaBuffer())) {
return NS_ERROR_FAILURE;
}
return NS_OK;
}
nsresult
MediaEngineGonkVideoSource::InitDirectMediaBuffer()
{
// Check available buffer resolution.
nsTArray<ICameraControl::Size> videoSizes;
mCameraControl->Get(CAMERA_PARAM_SUPPORTED_VIDEOSIZES, videoSizes);
if (!videoSizes.Length()) {
return NS_ERROR_FAILURE;
}
// TODO: MediaEgnine should use supported recording frame sizes as the size
// range in MediaTrackConstraintSet and find the best match.
// Here we use the first one as the default size (largest supported size).
android::Size videoSize;
videoSize.width = videoSizes[0].width;
videoSize.height = videoSizes[0].height;
LOG(("Intial size, width: %d, height: %d", videoSize.width, videoSize.height));
mCameraSource = GonkCameraSource::Create(mCameraControl,
videoSize,
MediaEngine::DEFAULT_VIDEO_FPS);
status_t rv;
rv = mCameraSource->AddDirectBufferListener(new MediaBufferListener(this));
if (rv != OK) {
return NS_ERROR_FAILURE;
}
rv = mCameraSource->start(nullptr);
if (rv != OK) {
return NS_ERROR_FAILURE;
}
return NS_OK;
}
@ -353,6 +418,9 @@ void
MediaEngineGonkVideoSource::StopImpl() {
MOZ_ASSERT(NS_IsMainThread());
mCameraSource->stop();
mCameraSource = nullptr;
hal::UnregisterScreenConfigurationObserver(this);
mCameraControl->Stop();
}
@ -589,17 +657,17 @@ MediaEngineGonkVideoSource::ConvertPixelFormatToFOURCC(int aFormat)
void
MediaEngineGonkVideoSource::RotateImage(layers::Image* aImage, uint32_t aWidth, uint32_t aHeight) {
layers::GrallocImage *nativeImage = static_cast<layers::GrallocImage*>(aImage);
android::sp<android::GraphicBuffer> graphicBuffer = nativeImage->GetGraphicBuffer();
android::sp<GraphicBuffer> graphicBuffer = nativeImage->GetGraphicBuffer();
void *pMem = nullptr;
// Bug 1109957 size will be wrong if width or height are odd
uint32_t size = aWidth * aHeight * 3 / 2;
MOZ_ASSERT(!(aWidth & 1) && !(aHeight & 1));
graphicBuffer->lock(android::GraphicBuffer::USAGE_SW_READ_MASK, &pMem);
graphicBuffer->lock(GraphicBuffer::USAGE_SW_READ_MASK, &pMem);
uint8_t* srcPtr = static_cast<uint8_t*>(pMem);
// Create a video frame and append it to the track.
ImageFormat format = ImageFormat::GRALLOC_PLANAR_YCBCR;
ImageFormat format = ImageFormat::GONK_CAMERA_IMAGE;
nsRefPtr<layers::Image> image = mImageContainer->CreateImage(format);
uint32_t dstWidth;
@ -657,23 +725,8 @@ MediaEngineGonkVideoSource::RotateImage(layers::Image* aImage, uint32_t aWidth,
data.mGraphicBuffer = textureClient;
videoImage->SetData(data);
// implicitly releases last image
// Implicitly releases last preview image.
mImage = image.forget();
// Push the frame into the MSG with a minimal duration. This will likely
// mean we'll still get NotifyPull calls which will then return the same
// frame again with a longer duration. However, this means we won't
// fail to get the frame in and drop frames.
// XXX The timestamp for the frame should be base on the Capture time,
// not the MSG time, and MSG should never, ever block on a (realtime)
// video frame (or even really for streaming - audio yes, video probably no).
uint32_t len = mSources.Length();
for (uint32_t i = 0; i < len; i++) {
if (mSources[i]) {
AppendToTrack(mSources[i], mImage, mTrackID, 1); // shortest possible duration
}
}
}
bool
@ -702,4 +755,40 @@ MediaEngineGonkVideoSource::OnNewPreviewFrame(layers::Image* aImage, uint32_t aW
return true; // return true because we're accepting the frame
}
nsresult
MediaEngineGonkVideoSource::OnNewMediaBufferFrame(MediaBuffer* aBuffer)
{
{
ReentrantMonitorAutoEnter sync(mCallbackMonitor);
if (mState == kStopped) {
return NS_OK;
}
}
MonitorAutoLock enter(mMonitor);
if (mImage) {
GonkCameraImage* cameraImage = static_cast<GonkCameraImage*>(mImage.get());
cameraImage->SetBuffer(aBuffer);
uint32_t len = mSources.Length();
for (uint32_t i = 0; i < len; i++) {
if (mSources[i]) {
// Duration is 1 here.
// Ideally, it should be camera timestamp here and the MSG will have
// enough sample duration without calling NotifyPull() anymore.
// Unfortunately, clock in gonk camera looks like is a different one
// comparing to MSG. As result, it causes time inaccurate. (frames be
// queued in MSG longer and longer as time going by in device like Frame)
AppendToTrack(mSources[i], cameraImage, mTrackID, 1);
}
}
// Clear MediaBuffer immediately, it prevents MediaBuffer is kept in
// MediaStreamGraph thread.
cameraImage->ClearBuffer();
}
return NS_OK;
}
} // namespace mozilla

View File

@ -16,6 +16,11 @@
#include "mozilla/ReentrantMonitor.h"
#include "mozilla/dom/File.h"
#include "mozilla/layers/TextureClientRecycleAllocator.h"
#include "GonkCameraSource.h"
namespace android {
class MOZ_EXPORT MediaBuffer;
}
namespace mozilla {
@ -91,6 +96,12 @@ public:
// current screen orientation.
nsresult UpdatePhotoOrientation();
// It adds aBuffer to current preview image and sends this image to MediaStreamDirectListener
// via AppendToTrack(). Due to MediaBuffer is limited resource, it will clear
// image's MediaBuffer by calling GonkCameraImage::ClearBuffer() before leaving
// this function.
nsresult OnNewMediaBufferFrame(android::MediaBuffer* aBuffer);
protected:
~MediaEngineGonkVideoSource()
{
@ -101,12 +112,17 @@ protected:
void Shutdown();
void ChooseCapability(const VideoTrackConstraintsN& aConstraints,
const MediaEnginePrefs& aPrefs);
// Initialize the recording frame (MediaBuffer) callback and Gonk camera.
// MediaBuffer will transfers to MediaStreamGraph via AppendToTrack.
nsresult InitDirectMediaBuffer();
mozilla::ReentrantMonitor mCallbackMonitor; // Monitor for camera callback handling
// This is only modified on MainThread (AllocImpl and DeallocImpl)
nsRefPtr<ICameraControl> mCameraControl;
nsCOMPtr<nsIDOMFile> mLastCapture;
android::sp<android::GonkCameraSource> mCameraSource;
// These are protected by mMonitor in parent class
nsTArray<nsRefPtr<PhotoCallback>> mPhotoCallbacks;
int mRotation;

View File

@ -1172,8 +1172,8 @@ void MediaPipelineTransmit::PipelineListener::ProcessVideoChunk(
ImageFormat format = img->GetFormat();
#ifdef WEBRTC_GONK
if (format == ImageFormat::GRALLOC_PLANAR_YCBCR) {
layers::GrallocImage *nativeImage = static_cast<layers::GrallocImage*>(img);
layers::GrallocImage* nativeImage = img->AsGrallocImage();
if (nativeImage) {
android::sp<android::GraphicBuffer> graphicBuffer = nativeImage->GetGraphicBuffer();
int pixelFormat = graphicBuffer->getPixelFormat(); /* PixelFormat is an enum == int */
mozilla::VideoType destFormat;