Bug 1043558 - Use gralloc for WebRTC camera preview r=jesup,nical

This commit is contained in:
Sotaro Ikeda 2014-12-16 07:11:48 -08:00
parent c713a465ca
commit 1247620b1d
10 changed files with 128 additions and 39 deletions

View File

@ -8,6 +8,8 @@
#include <utils/Log.h> #include <utils/Log.h>
#include "GrallocImages.h" #include "GrallocImages.h"
#include "mozilla/layers/GrallocTextureClient.h"
#include "mozilla/layers/ImageBridgeChild.h"
#include "VideoUtils.h" #include "VideoUtils.h"
#include "ScreenOrientation.h" #include "ScreenOrientation.h"
@ -28,6 +30,8 @@ extern PRLogModuleInfo* GetMediaManagerLog();
#define LOGFRAME(msg) #define LOGFRAME(msg)
#endif #endif
#define WEBRTC_GONK_VIDEO_SOURCE_POOL_BUFFERS 10
// We are subclassed from CameraControlListener, which implements a // We are subclassed from CameraControlListener, which implements a
// threadsafe reference-count for us. // threadsafe reference-count for us.
NS_IMPL_QUERY_INTERFACE(MediaEngineGonkVideoSource, nsISupports) NS_IMPL_QUERY_INTERFACE(MediaEngineGonkVideoSource, nsISupports)
@ -254,6 +258,9 @@ MediaEngineGonkVideoSource::AllocImpl() {
// to explicitly remove this--destroying the CameraControl object // to explicitly remove this--destroying the CameraControl object
// in DeallocImpl() will do that for us. // in DeallocImpl() will do that for us.
mCameraControl->AddListener(this); mCameraControl->AddListener(this);
mTextureClientAllocator =
new layers::TextureClientRecycleAllocator(layers::ImageBridgeChild::GetSingleton());
mTextureClientAllocator->SetMaxPoolSize(WEBRTC_GONK_VIDEO_SOURCE_POOL_BUFFERS);
} }
mCallbackMonitor.Notify(); mCallbackMonitor.Notify();
} }
@ -263,6 +270,7 @@ MediaEngineGonkVideoSource::DeallocImpl() {
MOZ_ASSERT(NS_IsMainThread()); MOZ_ASSERT(NS_IsMainThread());
mCameraControl = nullptr; mCameraControl = nullptr;
mTextureClientAllocator = nullptr;
} }
// The same algorithm from bug 840244 // The same algorithm from bug 840244
@ -578,14 +586,16 @@ MediaEngineGonkVideoSource::RotateImage(layers::Image* aImage, uint32_t aWidth,
layers::GrallocImage *nativeImage = static_cast<layers::GrallocImage*>(aImage); layers::GrallocImage *nativeImage = static_cast<layers::GrallocImage*>(aImage);
android::sp<android::GraphicBuffer> graphicBuffer = nativeImage->GetGraphicBuffer(); android::sp<android::GraphicBuffer> graphicBuffer = nativeImage->GetGraphicBuffer();
void *pMem = nullptr; void *pMem = nullptr;
// Bug 1109957 size will be wrong if width or height are odd
uint32_t size = aWidth * aHeight * 3 / 2; uint32_t size = aWidth * aHeight * 3 / 2;
MOZ_ASSERT(!(aWidth & 1) && !(aHeight & 1));
graphicBuffer->lock(android::GraphicBuffer::USAGE_SW_READ_MASK, &pMem); graphicBuffer->lock(android::GraphicBuffer::USAGE_SW_READ_MASK, &pMem);
uint8_t* srcPtr = static_cast<uint8_t*>(pMem); uint8_t* srcPtr = static_cast<uint8_t*>(pMem);
// Create a video frame and append it to the track. // Create a video frame and append it to the track.
nsRefPtr<layers::Image> image = mImageContainer->CreateImage(ImageFormat::PLANAR_YCBCR); ImageFormat format = ImageFormat::GRALLOC_PLANAR_YCBCR;
layers::PlanarYCbCrImage* videoImage = static_cast<layers::PlanarYCbCrImage*>(image.get()); nsRefPtr<layers::Image> image = mImageContainer->CreateImage(format);
uint32_t dstWidth; uint32_t dstWidth;
uint32_t dstHeight; uint32_t dstHeight;
@ -599,35 +609,48 @@ MediaEngineGonkVideoSource::RotateImage(layers::Image* aImage, uint32_t aWidth,
} }
uint32_t half_width = dstWidth / 2; uint32_t half_width = dstWidth / 2;
uint8_t* dstPtr = videoImage->AllocateAndGetNewBuffer(size);
layers::GrallocImage* videoImage = static_cast<layers::GrallocImage*>(image.get());
MOZ_ASSERT(mTextureClientAllocator);
RefPtr<layers::TextureClient> textureClient
= mTextureClientAllocator->CreateOrRecycleForDrawing(gfx::SurfaceFormat::YUV,
gfx::IntSize(dstWidth, dstHeight),
gfx::BackendType::NONE,
layers::TextureFlags::DEFAULT,
layers::ALLOC_DISALLOW_BUFFERTEXTURECLIENT);
if (!textureClient) {
return;
}
RefPtr<layers::GrallocTextureClientOGL> grallocTextureClient =
static_cast<layers::GrallocTextureClientOGL*>(textureClient.get());
android::sp<android::GraphicBuffer> destBuffer = grallocTextureClient->GetGraphicBuffer();
void* destMem = nullptr;
destBuffer->lock(android::GraphicBuffer::USAGE_SW_WRITE_OFTEN, &destMem);
uint8_t* dstPtr = static_cast<uint8_t*>(destMem);
int32_t yStride = destBuffer->getStride();
// Align to 16 bytes boundary
int32_t uvStride = ((yStride / 2) + 15) & ~0x0F;
libyuv::ConvertToI420(srcPtr, size, libyuv::ConvertToI420(srcPtr, size,
dstPtr, dstWidth, dstPtr, yStride,
dstPtr + (dstWidth * dstHeight), half_width, dstPtr + (yStride * dstHeight + (uvStride * dstHeight / 2)), uvStride,
dstPtr + (dstWidth * dstHeight * 5 / 4), half_width, dstPtr + (yStride * dstHeight), uvStride,
0, 0, 0, 0,
aWidth, aHeight, aWidth, aHeight,
aWidth, aHeight, aWidth, aHeight,
static_cast<libyuv::RotationMode>(mRotation), static_cast<libyuv::RotationMode>(mRotation),
ConvertPixelFormatToFOURCC(graphicBuffer->getPixelFormat())); libyuv::FOURCC_NV21);
destBuffer->unlock();
graphicBuffer->unlock(); graphicBuffer->unlock();
const uint8_t lumaBpp = 8; layers::GrallocImage::GrallocData data;
const uint8_t chromaBpp = 4;
layers::PlanarYCbCrData data; data.mPicSize = gfx::IntSize(dstWidth, dstHeight);
data.mYChannel = dstPtr; data.mGraphicBuffer = textureClient;
data.mYSize = IntSize(dstWidth, dstHeight); videoImage->SetData(data);
data.mYStride = dstWidth * lumaBpp / 8;
data.mCbCrStride = dstWidth * chromaBpp / 8;
data.mCbChannel = dstPtr + dstHeight * data.mYStride;
data.mCrChannel = data.mCbChannel +( dstHeight * data.mCbCrStride / 2);
data.mCbCrSize = IntSize(dstWidth / 2, dstHeight / 2);
data.mPicX = 0;
data.mPicY = 0;
data.mPicSize = IntSize(dstWidth, dstHeight);
data.mStereoMode = StereoMode::MONO;
videoImage->SetDataNoCopy(data);
// implicitly releases last image // implicitly releases last image
mImage = image.forget(); mImage = image.forget();

View File

@ -15,6 +15,7 @@
#include "mozilla/Hal.h" #include "mozilla/Hal.h"
#include "mozilla/ReentrantMonitor.h" #include "mozilla/ReentrantMonitor.h"
#include "mozilla/dom/File.h" #include "mozilla/dom/File.h"
#include "mozilla/layers/TextureClientRecycleAllocator.h"
namespace mozilla { namespace mozilla {
@ -112,6 +113,8 @@ protected:
int mCameraAngle; // See dom/base/ScreenOrientation.h int mCameraAngle; // See dom/base/ScreenOrientation.h
bool mBackCamera; bool mBackCamera;
bool mOrientationChanged; // True when screen rotates. bool mOrientationChanged; // True when screen rotates.
RefPtr<layers::TextureClientRecycleAllocator> mTextureClientAllocator;
}; };
} // namespace mozilla } // namespace mozilla

View File

@ -386,6 +386,10 @@ TextureClient::CreateForDrawing(ISurfaceAllocator* aAllocator,
return texture; return texture;
} }
if (aAllocFlags & ALLOC_DISALLOW_BUFFERTEXTURECLIENT) {
return nullptr;
}
if (texture) { if (texture) {
NS_WARNING("Failed to allocate a TextureClient, falling back to BufferTextureClient."); NS_WARNING("Failed to allocate a TextureClient, falling back to BufferTextureClient.");
} }

View File

@ -71,7 +71,8 @@ class KeepAlive;
enum TextureAllocationFlags { enum TextureAllocationFlags {
ALLOC_DEFAULT = 0, ALLOC_DEFAULT = 0,
ALLOC_CLEAR_BUFFER = 1, ALLOC_CLEAR_BUFFER = 1,
ALLOC_CLEAR_BUFFER_WHITE = 2 ALLOC_CLEAR_BUFFER_WHITE = 2,
ALLOC_DISALLOW_BUFFERTEXTURECLIENT = 4
}; };
#ifdef XP_WIN #ifdef XP_WIN

View File

@ -23,6 +23,13 @@ class TextureClientRecycleAllocatorImp : public ISurfaceAllocator
public: public:
explicit TextureClientRecycleAllocatorImp(ISurfaceAllocator* aAllocator); explicit TextureClientRecycleAllocatorImp(ISurfaceAllocator* aAllocator);
void SetMaxPoolSize(uint32_t aMax)
{
if (aMax > 0) {
mMaxPooledSize = aMax;
}
}
// Creates and allocates a TextureClient. // Creates and allocates a TextureClient.
TemporaryRef<TextureClient> TemporaryRef<TextureClient>
CreateOrRecycleForDrawing(gfx::SurfaceFormat aFormat, CreateOrRecycleForDrawing(gfx::SurfaceFormat aFormat,
@ -125,7 +132,6 @@ TextureClientRecycleAllocatorImp::~TextureClientRecycleAllocatorImp()
MOZ_ASSERT(mInUseClients.empty()); MOZ_ASSERT(mInUseClients.empty());
} }
TemporaryRef<TextureClient> TemporaryRef<TextureClient>
TextureClientRecycleAllocatorImp::CreateOrRecycleForDrawing( TextureClientRecycleAllocatorImp::CreateOrRecycleForDrawing(
gfx::SurfaceFormat aFormat, gfx::SurfaceFormat aFormat,
@ -136,7 +142,8 @@ TextureClientRecycleAllocatorImp::CreateOrRecycleForDrawing(
{ {
// TextureAllocationFlags is actually used only by ContentClient. // TextureAllocationFlags is actually used only by ContentClient.
// This class does not handle ConteClient's TextureClient allocation. // This class does not handle ConteClient's TextureClient allocation.
MOZ_ASSERT(aAllocFlags == TextureAllocationFlags::ALLOC_DEFAULT); MOZ_ASSERT(aAllocFlags == TextureAllocationFlags::ALLOC_DEFAULT ||
aAllocFlags == TextureAllocationFlags::ALLOC_DISALLOW_BUFFERTEXTURECLIENT);
MOZ_ASSERT(!(aTextureFlags & TextureFlags::RECYCLE)); MOZ_ASSERT(!(aTextureFlags & TextureFlags::RECYCLE));
aTextureFlags = aTextureFlags | TextureFlags::RECYCLE; // Set recycle flag aTextureFlags = aTextureFlags | TextureFlags::RECYCLE; // Set recycle flag
@ -237,6 +244,11 @@ TextureClientRecycleAllocator::~TextureClientRecycleAllocator()
mAllocator = nullptr; mAllocator = nullptr;
} }
void
TextureClientRecycleAllocator::SetMaxPoolSize(uint32_t aMax)
{
mAllocator->SetMaxPoolSize(aMax);
}
TemporaryRef<TextureClient> TemporaryRef<TextureClient>
TextureClientRecycleAllocator::CreateOrRecycleForDrawing( TextureClientRecycleAllocator::CreateOrRecycleForDrawing(

View File

@ -32,6 +32,8 @@ public:
explicit TextureClientRecycleAllocator(ISurfaceAllocator* aAllocator); explicit TextureClientRecycleAllocator(ISurfaceAllocator* aAllocator);
void SetMaxPoolSize(uint32_t aMax);
// Creates and allocates a TextureClient. // Creates and allocates a TextureClient.
TemporaryRef<TextureClient> TemporaryRef<TextureClient>
CreateOrRecycleForDrawing(gfx::SurfaceFormat aFormat, CreateOrRecycleForDrawing(gfx::SurfaceFormat aFormat,

View File

@ -171,6 +171,8 @@ SurfaceFormatForPixelFormat(android::PixelFormat aFormat)
return gfx::SurfaceFormat::R8G8B8X8; return gfx::SurfaceFormat::R8G8B8X8;
case PIXEL_FORMAT_RGB_565: case PIXEL_FORMAT_RGB_565:
return gfx::SurfaceFormat::R5G6B5; return gfx::SurfaceFormat::R5G6B5;
case HAL_PIXEL_FORMAT_YV12:
return gfx::SurfaceFormat::YUV;
default: default:
MOZ_CRASH("Unknown gralloc pixel format"); MOZ_CRASH("Unknown gralloc pixel format");
} }
@ -230,6 +232,9 @@ GrallocTextureClientOGL::AllocateForSurface(gfx::IntSize aSize,
case gfx::SurfaceFormat::R5G6B5: case gfx::SurfaceFormat::R5G6B5:
format = android::PIXEL_FORMAT_RGB_565; format = android::PIXEL_FORMAT_RGB_565;
break; break;
case gfx::SurfaceFormat::YUV:
format = HAL_PIXEL_FORMAT_YV12;
break;
case gfx::SurfaceFormat::A8: case gfx::SurfaceFormat::A8:
NS_WARNING("gralloc does not support gfx::SurfaceFormat::A8"); NS_WARNING("gralloc does not support gfx::SurfaceFormat::A8");
return false; return false;

View File

@ -1060,19 +1060,15 @@ WebrtcVideoConduit::SendVideoFrame(unsigned char* video_frame,
return kMediaConduitMalformedArgument; return kMediaConduitMalformedArgument;
} }
webrtc::RawVideoType type; // NOTE: update when common_types.h changes
switch (video_type) { if (video_type > kVideoBGRA) {
case kVideoI420: CSFLogError(logTag, "%s VideoType %d Invalid", __FUNCTION__, video_type);
type = webrtc::kVideoI420; MOZ_ASSERT(PR_FALSE);
break; return kMediaConduitMalformedArgument;
case kVideoNV21:
type = webrtc::kVideoNV21;
break;
default:
CSFLogError(logTag, "%s VideoType Invalid. Only 1420 and NV21 Supported",__FUNCTION__);
MOZ_ASSERT(PR_FALSE);
return kMediaConduitMalformedArgument;
} }
// RawVideoType == VideoType
webrtc::RawVideoType type = static_cast<webrtc::RawVideoType>((int)video_type);
//Transmission should be enabled before we insert any frames. //Transmission should be enabled before we insert any frames.
if(!mEngineTransmitting) if(!mEngineTransmitting)
{ {

View File

@ -1175,15 +1175,36 @@ void MediaPipelineTransmit::PipelineListener::ProcessVideoChunk(
if (format == ImageFormat::GRALLOC_PLANAR_YCBCR) { if (format == ImageFormat::GRALLOC_PLANAR_YCBCR) {
layers::GrallocImage *nativeImage = static_cast<layers::GrallocImage*>(img); layers::GrallocImage *nativeImage = static_cast<layers::GrallocImage*>(img);
android::sp<android::GraphicBuffer> graphicBuffer = nativeImage->GetGraphicBuffer(); android::sp<android::GraphicBuffer> graphicBuffer = nativeImage->GetGraphicBuffer();
int pixelFormat = graphicBuffer->getPixelFormat(); /* PixelFormat is an enum == int */
mozilla::VideoType destFormat;
switch (pixelFormat) {
case HAL_PIXEL_FORMAT_YV12:
// all android must support this
destFormat = mozilla::kVideoYV12;
break;
case layers::GrallocImage::HAL_PIXEL_FORMAT_YCbCr_420_SP:
destFormat = mozilla::kVideoNV21;
break;
case layers::GrallocImage::HAL_PIXEL_FORMAT_YCbCr_420_P:
destFormat = mozilla::kVideoI420;
break;
default:
// XXX Bug NNNNNNN
// use http://mxr.mozilla.org/mozilla-central/source/content/media/omx/I420ColorConverterHelper.cpp
// to convert unknown types (OEM-specific) to I420
MOZ_MTLOG(ML_ERROR, "Un-handled GRALLOC buffer type:" << pixelFormat);
MOZ_CRASH();
}
void *basePtr; void *basePtr;
graphicBuffer->lock(android::GraphicBuffer::USAGE_SW_READ_MASK, &basePtr); graphicBuffer->lock(android::GraphicBuffer::USAGE_SW_READ_MASK, &basePtr);
uint32_t width = graphicBuffer->getWidth(); uint32_t width = graphicBuffer->getWidth();
uint32_t height = graphicBuffer->getHeight(); uint32_t height = graphicBuffer->getHeight();
// XXX gralloc buffer's width and stride could be different depends on implementations.
conduit->SendVideoFrame(static_cast<unsigned char*>(basePtr), conduit->SendVideoFrame(static_cast<unsigned char*>(basePtr),
I420SIZE(width, height), I420SIZE(width, height),
width, width,
height, height,
mozilla::kVideoNV21, 0); destFormat, 0);
graphicBuffer->unlock(); graphicBuffer->unlock();
} else } else
#endif #endif

View File

@ -241,6 +241,28 @@ int ConvertToI420(VideoType src_video_type,
dst_width = dst_frame->height(); dst_width = dst_frame->height();
dst_height =dst_frame->width(); dst_height =dst_frame->width();
} }
#ifdef WEBRTC_GONK
if (src_video_type == kYV12) {
// In gralloc buffer, yv12 color format's cb and cr's strides are aligned
// to 16 Bytes boundary. See /system/core/include/system/graphics.h
int stride_y = src_width;
int stride_uv = (((stride_y + 1) / 2) + 15) & ~0x0F;
return libyuv::I420Rotate(src_frame,
stride_y,
src_frame + (stride_y * src_height) + (stride_uv * ((src_height + 1) / 2)),
stride_uv,
src_frame + (stride_y * src_height),
stride_uv,
dst_frame->buffer(kYPlane),
dst_frame->stride(kYPlane),
dst_frame->buffer(kUPlane),
dst_frame->stride(kUPlane),
dst_frame->buffer(kVPlane),
dst_frame->stride(kVPlane),
src_width, src_height,
ConvertRotationMode(rotation));
}
#endif
return libyuv::ConvertToI420(src_frame, sample_size, return libyuv::ConvertToI420(src_frame, sample_size,
dst_frame->buffer(kYPlane), dst_frame->buffer(kYPlane),
dst_frame->stride(kYPlane), dst_frame->stride(kYPlane),