diff --git a/dom/camera/GonkCameraControl.cpp b/dom/camera/GonkCameraControl.cpp index ccb110188fd1..618c915aeb68 100644 --- a/dom/camera/GonkCameraControl.cpp +++ b/dom/camera/GonkCameraControl.cpp @@ -903,6 +903,12 @@ nsGonkCameraControl::SetThumbnailSizeImpl(const Size& aSize) return SetAndPush(CAMERA_PARAM_THUMBNAILSIZE, size); } +android::sp +nsGonkCameraControl::GetCameraHw() +{ + return mCameraHw; +} + nsresult nsGonkCameraControl::SetThumbnailSize(const Size& aSize) { diff --git a/dom/camera/GonkCameraControl.h b/dom/camera/GonkCameraControl.h index 17c19aac54a6..307036d98931 100644 --- a/dom/camera/GonkCameraControl.h +++ b/dom/camera/GonkCameraControl.h @@ -32,6 +32,7 @@ namespace android { class GonkCameraHardware; class MediaProfiles; class GonkRecorder; + class GonkCameraSource; } namespace mozilla { @@ -154,6 +155,9 @@ protected: nsresult UpdateThumbnailSize(); nsresult SetThumbnailSizeImpl(const Size& aSize); + friend class android::GonkCameraSource; + android::sp GetCameraHw(); + int32_t RationalizeRotation(int32_t aRotation); uint32_t mCameraId; diff --git a/dom/camera/GonkCameraSource.cpp b/dom/camera/GonkCameraSource.cpp index df25082b1a84..9cc6192b3bff 100644 --- a/dom/camera/GonkCameraSource.cpp +++ b/dom/camera/GonkCameraSource.cpp @@ -46,6 +46,7 @@ #include "GonkCameraSource.h" #include "GonkCameraListener.h" #include "GonkCameraHwMgr.h" +#include "ICameraControl.h" using namespace mozilla; @@ -157,6 +158,16 @@ GonkCameraSource *GonkCameraSource::Create( return source; } +GonkCameraSource *GonkCameraSource::Create( + ICameraControl* aControl, + Size videoSize, + int32_t frameRate) +{ + mozilla::nsGonkCameraControl* control = + static_cast(aControl); + return Create(control->GetCameraHw(), videoSize, frameRate, false); +} + GonkCameraSource::GonkCameraSource( const sp& aCameraHw, Size videoSize, @@ -596,6 +607,10 @@ status_t GonkCameraSource::reset() { } releaseCamera(); + if (mDirectBufferListener.get()) { + mDirectBufferListener = nullptr; + } + if (mCollectStats) { CS_LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us", mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped, @@ -652,6 +667,14 @@ void GonkCameraSource::signalBufferReturned(MediaBuffer *buffer) { CHECK(!"signalBufferReturned: bogus buffer"); } +status_t GonkCameraSource::AddDirectBufferListener(DirectBufferListener* aListener) { + if (mDirectBufferListener.get()) { + return UNKNOWN_ERROR; + } + mDirectBufferListener = aListener; + return OK; +} + status_t GonkCameraSource::read( MediaBuffer **buffer, const ReadOptions *options) { CS_LOGV("read"); @@ -761,6 +784,15 @@ void GonkCameraSource::dataCallbackTimestamp(int64_t timestampUs, if(prevRateLimit != rateLimit) { mCameraHw->OnRateLimitPreview(rateLimit); } + + if (mDirectBufferListener.get()) { + MediaBuffer* mediaBuffer; + if (read(&mediaBuffer) == OK) { + mDirectBufferListener->BufferAvailable(mediaBuffer); + // read() calls MediaBuffer->add_ref() so it needs to be released here. + mediaBuffer->release(); + } + } } bool GonkCameraSource::isMetaDataStoredInVideoBuffers() const { diff --git a/dom/camera/GonkCameraSource.h b/dom/camera/GonkCameraSource.h index f7b17c2755d5..9e912a493f3e 100644 --- a/dom/camera/GonkCameraSource.h +++ b/dom/camera/GonkCameraSource.h @@ -27,6 +27,10 @@ #include "GonkCameraHwMgr.h" +namespace mozilla { +class ICameraControl; +} + namespace android { class IMemory; @@ -39,6 +43,10 @@ public: int32_t frameRate, bool storeMetaDataInVideoBuffers = false); + static GonkCameraSource *Create(mozilla::ICameraControl* aControl, + Size videoSize, + int32_t frameRate); + virtual ~GonkCameraSource(); virtual status_t start(MetaData *params = NULL); @@ -75,6 +83,24 @@ public: virtual void signalBufferReturned(MediaBuffer* buffer); + /** + * It sends recording frames to listener directly in the same thread. + * Because recording frame is critical resource and it should not be + * propagated to other thread as much as possible or there could be frame + * rate jitter due to camera HAL waiting for resource. + */ + class DirectBufferListener : public RefBase { + public: + DirectBufferListener() {}; + + virtual status_t BufferAvailable(MediaBuffer* aBuffer) = 0; + + protected: + virtual ~DirectBufferListener() {} + }; + + status_t AddDirectBufferListener(DirectBufferListener* aListener); + protected: enum CameraFlags { @@ -136,6 +162,7 @@ private: bool mCollectStats; bool mIsMetaDataStoredInVideoBuffers; sp mCameraHw; + sp mDirectBufferListener; void releaseQueuedFrames(); void releaseOneRecordingFrame(const sp& frame); diff --git a/dom/media/webrtc/MediaEngineGonkVideoSource.cpp b/dom/media/webrtc/MediaEngineGonkVideoSource.cpp index 984379a9992c..a885ceb96675 100644 --- a/dom/media/webrtc/MediaEngineGonkVideoSource.cpp +++ b/dom/media/webrtc/MediaEngineGonkVideoSource.cpp @@ -15,11 +15,13 @@ #include "libyuv.h" #include "mtransport/runnable_utils.h" +#include "GonkCameraImage.h" namespace mozilla { using namespace mozilla::dom; using namespace mozilla::gfx; +using namespace android; #ifdef PR_LOGGING extern PRLogModuleInfo* GetMediaManagerLog(); @@ -30,6 +32,29 @@ extern PRLogModuleInfo* GetMediaManagerLog(); #define LOGFRAME(msg) #endif +class MediaBufferListener : public GonkCameraSource::DirectBufferListener { +public: + MediaBufferListener(MediaEngineGonkVideoSource* aMediaEngine) + : mMediaEngine(aMediaEngine) + { + } + + status_t BufferAvailable(MediaBuffer* aBuffer) + { + nsresult rv = mMediaEngine->OnNewMediaBufferFrame(aBuffer); + if (NS_SUCCEEDED(rv)) { + return OK; + } + return UNKNOWN_ERROR; + } + + ~MediaBufferListener() + { + } + + nsRefPtr mMediaEngine; +}; + #define WEBRTC_GONK_VIDEO_SOURCE_POOL_BUFFERS 10 // We are subclassed from CameraControlListener, which implements a @@ -168,6 +193,46 @@ MediaEngineGonkVideoSource::Start(SourceMediaStream* aStream, TrackID aID) return NS_ERROR_FAILURE; } + if (NS_FAILED(InitDirectMediaBuffer())) { + return NS_ERROR_FAILURE; + } + + return NS_OK; +} + +nsresult +MediaEngineGonkVideoSource::InitDirectMediaBuffer() +{ + // Check available buffer resolution. + nsTArray videoSizes; + mCameraControl->Get(CAMERA_PARAM_SUPPORTED_VIDEOSIZES, videoSizes); + if (!videoSizes.Length()) { + return NS_ERROR_FAILURE; + } + + // TODO: MediaEgnine should use supported recording frame sizes as the size + // range in MediaTrackConstraintSet and find the best match. + // Here we use the first one as the default size (largest supported size). + android::Size videoSize; + videoSize.width = videoSizes[0].width; + videoSize.height = videoSizes[0].height; + + LOG(("Intial size, width: %d, height: %d", videoSize.width, videoSize.height)); + mCameraSource = GonkCameraSource::Create(mCameraControl, + videoSize, + MediaEngine::DEFAULT_VIDEO_FPS); + + status_t rv; + rv = mCameraSource->AddDirectBufferListener(new MediaBufferListener(this)); + if (rv != OK) { + return NS_ERROR_FAILURE; + } + + rv = mCameraSource->start(nullptr); + if (rv != OK) { + return NS_ERROR_FAILURE; + } + return NS_OK; } @@ -353,6 +418,9 @@ void MediaEngineGonkVideoSource::StopImpl() { MOZ_ASSERT(NS_IsMainThread()); + mCameraSource->stop(); + mCameraSource = nullptr; + hal::UnregisterScreenConfigurationObserver(this); mCameraControl->Stop(); } @@ -589,17 +657,17 @@ MediaEngineGonkVideoSource::ConvertPixelFormatToFOURCC(int aFormat) void MediaEngineGonkVideoSource::RotateImage(layers::Image* aImage, uint32_t aWidth, uint32_t aHeight) { layers::GrallocImage *nativeImage = static_cast(aImage); - android::sp graphicBuffer = nativeImage->GetGraphicBuffer(); + android::sp graphicBuffer = nativeImage->GetGraphicBuffer(); void *pMem = nullptr; // Bug 1109957 size will be wrong if width or height are odd uint32_t size = aWidth * aHeight * 3 / 2; MOZ_ASSERT(!(aWidth & 1) && !(aHeight & 1)); - graphicBuffer->lock(android::GraphicBuffer::USAGE_SW_READ_MASK, &pMem); + graphicBuffer->lock(GraphicBuffer::USAGE_SW_READ_MASK, &pMem); uint8_t* srcPtr = static_cast(pMem); // Create a video frame and append it to the track. - ImageFormat format = ImageFormat::GRALLOC_PLANAR_YCBCR; + ImageFormat format = ImageFormat::GONK_CAMERA_IMAGE; nsRefPtr image = mImageContainer->CreateImage(format); uint32_t dstWidth; @@ -657,23 +725,8 @@ MediaEngineGonkVideoSource::RotateImage(layers::Image* aImage, uint32_t aWidth, data.mGraphicBuffer = textureClient; videoImage->SetData(data); - // implicitly releases last image + // Implicitly releases last preview image. mImage = image.forget(); - - // Push the frame into the MSG with a minimal duration. This will likely - // mean we'll still get NotifyPull calls which will then return the same - // frame again with a longer duration. However, this means we won't - // fail to get the frame in and drop frames. - - // XXX The timestamp for the frame should be base on the Capture time, - // not the MSG time, and MSG should never, ever block on a (realtime) - // video frame (or even really for streaming - audio yes, video probably no). - uint32_t len = mSources.Length(); - for (uint32_t i = 0; i < len; i++) { - if (mSources[i]) { - AppendToTrack(mSources[i], mImage, mTrackID, 1); // shortest possible duration - } - } } bool @@ -702,4 +755,40 @@ MediaEngineGonkVideoSource::OnNewPreviewFrame(layers::Image* aImage, uint32_t aW return true; // return true because we're accepting the frame } +nsresult +MediaEngineGonkVideoSource::OnNewMediaBufferFrame(MediaBuffer* aBuffer) +{ + { + ReentrantMonitorAutoEnter sync(mCallbackMonitor); + if (mState == kStopped) { + return NS_OK; + } + } + + MonitorAutoLock enter(mMonitor); + if (mImage) { + GonkCameraImage* cameraImage = static_cast(mImage.get()); + + cameraImage->SetBuffer(aBuffer); + + uint32_t len = mSources.Length(); + for (uint32_t i = 0; i < len; i++) { + if (mSources[i]) { + // Duration is 1 here. + // Ideally, it should be camera timestamp here and the MSG will have + // enough sample duration without calling NotifyPull() anymore. + // Unfortunately, clock in gonk camera looks like is a different one + // comparing to MSG. As result, it causes time inaccurate. (frames be + // queued in MSG longer and longer as time going by in device like Frame) + AppendToTrack(mSources[i], cameraImage, mTrackID, 1); + } + } + // Clear MediaBuffer immediately, it prevents MediaBuffer is kept in + // MediaStreamGraph thread. + cameraImage->ClearBuffer(); + } + + return NS_OK; +} + } // namespace mozilla diff --git a/dom/media/webrtc/MediaEngineGonkVideoSource.h b/dom/media/webrtc/MediaEngineGonkVideoSource.h index 7884f0f0ce03..2d361705b8d9 100644 --- a/dom/media/webrtc/MediaEngineGonkVideoSource.h +++ b/dom/media/webrtc/MediaEngineGonkVideoSource.h @@ -16,6 +16,11 @@ #include "mozilla/ReentrantMonitor.h" #include "mozilla/dom/File.h" #include "mozilla/layers/TextureClientRecycleAllocator.h" +#include "GonkCameraSource.h" + +namespace android { +class MOZ_EXPORT MediaBuffer; +} namespace mozilla { @@ -91,6 +96,12 @@ public: // current screen orientation. nsresult UpdatePhotoOrientation(); + // It adds aBuffer to current preview image and sends this image to MediaStreamDirectListener + // via AppendToTrack(). Due to MediaBuffer is limited resource, it will clear + // image's MediaBuffer by calling GonkCameraImage::ClearBuffer() before leaving + // this function. + nsresult OnNewMediaBufferFrame(android::MediaBuffer* aBuffer); + protected: ~MediaEngineGonkVideoSource() { @@ -101,12 +112,17 @@ protected: void Shutdown(); void ChooseCapability(const VideoTrackConstraintsN& aConstraints, const MediaEnginePrefs& aPrefs); + // Initialize the recording frame (MediaBuffer) callback and Gonk camera. + // MediaBuffer will transfers to MediaStreamGraph via AppendToTrack. + nsresult InitDirectMediaBuffer(); mozilla::ReentrantMonitor mCallbackMonitor; // Monitor for camera callback handling // This is only modified on MainThread (AllocImpl and DeallocImpl) nsRefPtr mCameraControl; nsCOMPtr mLastCapture; + android::sp mCameraSource; + // These are protected by mMonitor in parent class nsTArray> mPhotoCallbacks; int mRotation; diff --git a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp index f4e0a0863594..d42df013958b 100644 --- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp +++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp @@ -1172,8 +1172,8 @@ void MediaPipelineTransmit::PipelineListener::ProcessVideoChunk( ImageFormat format = img->GetFormat(); #ifdef WEBRTC_GONK - if (format == ImageFormat::GRALLOC_PLANAR_YCBCR) { - layers::GrallocImage *nativeImage = static_cast(img); + layers::GrallocImage* nativeImage = img->AsGrallocImage(); + if (nativeImage) { android::sp graphicBuffer = nativeImage->GetGraphicBuffer(); int pixelFormat = graphicBuffer->getPixelFormat(); /* PixelFormat is an enum == int */ mozilla::VideoType destFormat;