mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-24 13:21:05 +00:00
Decouple SharedRGBImage and PlanarYCbCrImage from ImageContainer. (bug 1222910, r=mattwoodrow)
This commit is contained in:
parent
7970ac45fd
commit
27bfc27d62
@ -316,7 +316,7 @@ VideoData::Create(const VideoInfo& aInfo,
|
||||
}
|
||||
#endif
|
||||
if (!v->mImage) {
|
||||
v->mImage = aContainer->CreateImage(ImageFormat::PLANAR_YCBCR);
|
||||
v->mImage = aContainer->CreatePlanarYCbCrImage();
|
||||
}
|
||||
} else {
|
||||
v->mImage = aImage;
|
||||
@ -328,7 +328,8 @@ VideoData::Create(const VideoInfo& aInfo,
|
||||
NS_ASSERTION(v->mImage->GetFormat() == ImageFormat::PLANAR_YCBCR ||
|
||||
v->mImage->GetFormat() == ImageFormat::GRALLOC_PLANAR_YCBCR,
|
||||
"Wrong format?");
|
||||
PlanarYCbCrImage* videoImage = static_cast<PlanarYCbCrImage*>(v->mImage.get());
|
||||
PlanarYCbCrImage* videoImage = v->mImage->AsPlanarYCbCrImage();
|
||||
MOZ_ASSERT(videoImage);
|
||||
|
||||
bool shouldCopyData = (aImage == nullptr);
|
||||
if (!VideoData::SetVideoDataToImage(videoImage, aInfo, aBuffer, aPicture,
|
||||
@ -339,11 +340,11 @@ VideoData::Create(const VideoInfo& aInfo,
|
||||
#ifdef MOZ_WIDGET_GONK
|
||||
if (!videoImage->IsValid() && !aImage && IsYV12Format(Y, Cb, Cr)) {
|
||||
// Failed to allocate gralloc. Try fallback.
|
||||
v->mImage = aContainer->CreateImage(ImageFormat::PLANAR_YCBCR);
|
||||
v->mImage = aContainer->CreatePlanarYCbCrImage();
|
||||
if (!v->mImage) {
|
||||
return nullptr;
|
||||
}
|
||||
videoImage = static_cast<PlanarYCbCrImage*>(v->mImage.get());
|
||||
videoImage = v->mImage->AsPlanarYCbCrImage();
|
||||
if(!VideoData::SetVideoDataToImage(videoImage, aInfo, aBuffer, aPicture,
|
||||
true /* aCopyData */)) {
|
||||
return nullptr;
|
||||
|
@ -828,13 +828,11 @@ MediaStreamGraphImpl::PlayVideo(MediaStream* aStream)
|
||||
|
||||
if (frame->GetForceBlack()) {
|
||||
if (!blackImage) {
|
||||
blackImage = aStream->mVideoOutputs[0]->
|
||||
GetImageContainer()->CreateImage(ImageFormat::PLANAR_YCBCR);
|
||||
blackImage = aStream->mVideoOutputs[0]->GetImageContainer()->CreatePlanarYCbCrImage();
|
||||
if (blackImage) {
|
||||
// Sets the image to a single black pixel, which will be scaled to
|
||||
// fill the rendered size.
|
||||
SetImageToBlackPixel(static_cast<PlanarYCbCrImage*>
|
||||
(blackImage.get()));
|
||||
SetImageToBlackPixel(blackImage->AsPlanarYCbCrImage());
|
||||
}
|
||||
}
|
||||
if (blackImage) {
|
||||
|
@ -43,17 +43,14 @@ VideoFrame::TakeFrom(VideoFrame* aFrame)
|
||||
/* static */ already_AddRefed<Image>
|
||||
VideoFrame::CreateBlackImage(const gfx::IntSize& aSize)
|
||||
{
|
||||
RefPtr<ImageContainer> container;
|
||||
RefPtr<Image> image;
|
||||
container = LayerManager::CreateImageContainer();
|
||||
image = container->CreateImage(ImageFormat::PLANAR_YCBCR);
|
||||
RefPtr<ImageContainer> container = LayerManager::CreateImageContainer();
|
||||
RefPtr<PlanarYCbCrImage> image = container->CreatePlanarYCbCrImage();
|
||||
if (!image) {
|
||||
MOZ_ASSERT(false);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
int len = ((aSize.width * aSize.height) * 3 / 2);
|
||||
PlanarYCbCrImage* planar = static_cast<PlanarYCbCrImage*>(image.get());
|
||||
|
||||
// Generate a black image.
|
||||
ScopedDeletePtr<uint8_t> frame(new uint8_t[len]);
|
||||
@ -80,7 +77,7 @@ VideoFrame::CreateBlackImage(const gfx::IntSize& aSize)
|
||||
data.mStereoMode = StereoMode::MONO;
|
||||
|
||||
// SetData copies data, so we can free data.
|
||||
if (!planar->SetData(data)) {
|
||||
if (!image->SetData(data)) {
|
||||
MOZ_ASSERT(false);
|
||||
return nullptr;
|
||||
}
|
||||
|
@ -387,8 +387,8 @@ uint8_t *
|
||||
AndroidMediaReader::ImageBufferCallback::CreateI420Image(size_t aWidth,
|
||||
size_t aHeight)
|
||||
{
|
||||
mImage = mImageContainer->CreateImage(ImageFormat::PLANAR_YCBCR);
|
||||
PlanarYCbCrImage *yuvImage = static_cast<PlanarYCbCrImage *>(mImage.get());
|
||||
RefPtr<PlanarYCbCrImage> yuvImage = mImageContainer->CreatePlanarYCbCrImage();
|
||||
mImage = yuvImage;
|
||||
|
||||
if (!yuvImage) {
|
||||
NS_WARNING("Could not create I420 image");
|
||||
|
@ -45,8 +45,7 @@ GstFlowReturn GStreamerReader::AllocateVideoBufferFull(GstPad* aPad,
|
||||
if (container == nullptr) {
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
RefPtr<PlanarYCbCrImage> image =
|
||||
container->CreateImage(ImageFormat::PLANAR_YCBCR).downcast<PlanarYCbCrImage>();
|
||||
RefPtr<PlanarYCbCrImage> image = container->CreatePlanarYCbCrImage();
|
||||
|
||||
/* prepare a GstBuffer pointing to the underlying PlanarYCbCrImage buffer */
|
||||
GstBuffer* buf = GST_BUFFER(gst_moz_video_buffer_new());
|
||||
|
@ -331,10 +331,8 @@ FFmpegH264Decoder<LIBAV_VER>::AllocateYUV420PVideoBuffer(
|
||||
|
||||
size_t allocSize = pitch * decodeHeight + (chroma_pitch * chroma_height) * 2;
|
||||
|
||||
RefPtr<Image> image =
|
||||
mImageContainer->CreateImage(ImageFormat::PLANAR_YCBCR);
|
||||
PlanarYCbCrImage* ycbcr = static_cast<PlanarYCbCrImage*>(image.get());
|
||||
uint8_t* buffer = ycbcr->AllocateAndGetNewBuffer(allocSize + 64);
|
||||
RefPtr<PlanarYCbCrImage> image = mImageContainer->CreatePlanarYCbCrImage();
|
||||
uint8_t* buffer = image->AllocateAndGetNewBuffer(allocSize + 64);
|
||||
// FFmpeg requires a 16/32 bytes-aligned buffer, align it on 64 to be safe
|
||||
buffer = reinterpret_cast<uint8_t*>((reinterpret_cast<uintptr_t>(buffer) + 63) & ~63);
|
||||
|
||||
|
@ -236,9 +236,7 @@ MediaEngineDefaultVideoSource::Notify(nsITimer* aTimer)
|
||||
}
|
||||
|
||||
// Allocate a single solid color image
|
||||
RefPtr<layers::Image> image = mImageContainer->CreateImage(ImageFormat::PLANAR_YCBCR);
|
||||
RefPtr<layers::PlanarYCbCrImage> ycbcr_image =
|
||||
static_cast<layers::PlanarYCbCrImage*>(image.get());
|
||||
RefPtr<layers::PlanarYCbCrImage> ycbcr_image = mImageContainer->CreatePlanarYCbCrImage();
|
||||
layers::PlanarYCbCrData data;
|
||||
AllocateSolidColorFrame(data, mOpts.mWidth, mOpts.mHeight, 0x80, mCb, mCr);
|
||||
|
||||
|
@ -755,7 +755,6 @@ MediaEngineGonkVideoSource::RotateImage(layers::Image* aImage, uint32_t aWidth,
|
||||
|
||||
uint32_t half_width = dstWidth / 2;
|
||||
|
||||
layers::GrallocImage* videoImage = static_cast<layers::GrallocImage*>(image.get());
|
||||
MOZ_ASSERT(mTextureClientAllocator);
|
||||
RefPtr<layers::TextureClient> textureClient
|
||||
= mTextureClientAllocator->CreateOrRecycle(gfx::SurfaceFormat::YUV,
|
||||
@ -792,12 +791,11 @@ MediaEngineGonkVideoSource::RotateImage(layers::Image* aImage, uint32_t aWidth,
|
||||
|
||||
data.mPicSize = gfx::IntSize(dstWidth, dstHeight);
|
||||
data.mGraphicBuffer = textureClient;
|
||||
videoImage->SetData(data);
|
||||
image->AsGrallocImage()->SetData(data);
|
||||
} else {
|
||||
// Handle out of gralloc case.
|
||||
image = mImageContainer->CreateImage(ImageFormat::PLANAR_YCBCR);
|
||||
layers::PlanarYCbCrImage* videoImage = static_cast<layers::PlanarYCbCrImage*>(image.get());
|
||||
uint8_t* dstPtr = videoImage->AllocateAndGetNewBuffer(size);
|
||||
image = mImageContainer->CreatePlanarYCbCrImage();
|
||||
uint8_t* dstPtr = image->AsPlanarYCbCrImage()->AllocateAndGetNewBuffer(size);
|
||||
|
||||
libyuv::ConvertToI420(srcPtr, size,
|
||||
dstPtr, dstWidth,
|
||||
@ -825,7 +823,7 @@ MediaEngineGonkVideoSource::RotateImage(layers::Image* aImage, uint32_t aWidth,
|
||||
data.mPicSize = IntSize(dstWidth, dstHeight);
|
||||
data.mStereoMode = StereoMode::MONO;
|
||||
|
||||
videoImage->SetDataNoCopy(data);
|
||||
image->AsPlanarYCbCrImage()->SetDataNoCopy(data);
|
||||
}
|
||||
graphicBuffer->unlock();
|
||||
|
||||
|
@ -294,8 +294,7 @@ MediaEngineRemoteVideoSource::DeliverFrame(unsigned char* buffer,
|
||||
}
|
||||
|
||||
// Create a video frame and append it to the track.
|
||||
RefPtr<layers::Image> image = mImageContainer->CreateImage(ImageFormat::PLANAR_YCBCR);
|
||||
layers::PlanarYCbCrImage* videoImage = static_cast<layers::PlanarYCbCrImage*>(image.get());
|
||||
RefPtr<layers::PlanarYCbCrImage> image = mImageContainer->CreatePlanarYCbCrImage();
|
||||
|
||||
uint8_t* frame = static_cast<uint8_t*> (buffer);
|
||||
const uint8_t lumaBpp = 8;
|
||||
@ -315,7 +314,7 @@ MediaEngineRemoteVideoSource::DeliverFrame(unsigned char* buffer,
|
||||
data.mPicSize = IntSize(mWidth, mHeight);
|
||||
data.mStereoMode = StereoMode::MONO;
|
||||
|
||||
if (!videoImage->SetData(data)) {
|
||||
if (!image->SetData(data)) {
|
||||
MOZ_ASSERT(false);
|
||||
return 0;
|
||||
}
|
||||
|
@ -17,6 +17,8 @@
|
||||
#include "mozilla/layers/PImageContainerChild.h"
|
||||
#include "mozilla/layers/ImageClient.h" // for ImageClient
|
||||
#include "mozilla/layers/LayersMessages.h"
|
||||
#include "mozilla/layers/SharedPlanarYCbCrImage.h"
|
||||
#include "mozilla/layers/SharedRGBImage.h"
|
||||
#include "nsISupportsUtils.h" // for NS_IF_ADDREF
|
||||
#include "YCbCrUtils.h" // for YCbCr conversions
|
||||
#ifdef MOZ_WIDGET_GONK
|
||||
@ -70,13 +72,15 @@ ImageFactory::CreateImage(ImageFormat aFormat,
|
||||
return img.forget();
|
||||
}
|
||||
#endif
|
||||
if (aFormat == ImageFormat::PLANAR_YCBCR) {
|
||||
img = new RecyclingPlanarYCbCrImage(aRecycleBin);
|
||||
return img.forget();
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
RefPtr<PlanarYCbCrImage>
|
||||
ImageFactory::CreatePlanarYCbCrImage(const gfx::IntSize& aScaleHint, BufferRecycleBin *aRecycleBin)
|
||||
{
|
||||
return new RecyclingPlanarYCbCrImage(aRecycleBin);
|
||||
}
|
||||
|
||||
BufferRecycleBin::BufferRecycleBin()
|
||||
: mLock("mozilla.layers.BufferRecycleBin.mLock")
|
||||
{
|
||||
@ -201,6 +205,26 @@ ImageContainer::CreateImage(ImageFormat aFormat)
|
||||
return mImageFactory->CreateImage(aFormat, mScaleHint, mRecycleBin);
|
||||
}
|
||||
|
||||
RefPtr<PlanarYCbCrImage>
|
||||
ImageContainer::CreatePlanarYCbCrImage()
|
||||
{
|
||||
ReentrantMonitorAutoEnter mon(mReentrantMonitor);
|
||||
if (mImageClient && mImageClient->AsImageClientSingle()) {
|
||||
return new SharedPlanarYCbCrImage(mImageClient);
|
||||
}
|
||||
return mImageFactory->CreatePlanarYCbCrImage(mScaleHint, mRecycleBin);
|
||||
}
|
||||
|
||||
RefPtr<SharedRGBImage>
|
||||
ImageContainer::CreateSharedRGBImage()
|
||||
{
|
||||
ReentrantMonitorAutoEnter mon(mReentrantMonitor);
|
||||
if (!mImageClient || !mImageClient->AsImageClientSingle()) {
|
||||
return nullptr;
|
||||
}
|
||||
return new SharedRGBImage(mImageClient);
|
||||
}
|
||||
|
||||
void
|
||||
ImageContainer::SetCurrentImageInternal(const nsTArray<NonOwningImage>& aImages)
|
||||
{
|
||||
|
@ -103,6 +103,7 @@ class ImageCompositeNotification;
|
||||
class ImageContainerChild;
|
||||
class PImageContainerChild;
|
||||
class SharedPlanarYCbCrImage;
|
||||
class PlanarYCbCrImage;
|
||||
class TextureClient;
|
||||
class CompositableClient;
|
||||
class GrallocImage;
|
||||
@ -123,6 +124,7 @@ class SurfaceTextureImage;
|
||||
#ifdef XP_MACOSX
|
||||
class MacIOSurfaceImage;
|
||||
#endif
|
||||
class SharedRGBImage;
|
||||
|
||||
/**
|
||||
* A class representing a buffer of pixel data. The data can be in one
|
||||
@ -183,6 +185,7 @@ public:
|
||||
#ifdef XP_MACOSX
|
||||
virtual MacIOSurfaceImage* AsMacIOSurfaceImage() { return nullptr; }
|
||||
#endif
|
||||
virtual PlanarYCbCrImage* AsPlanarYCbCrImage() { return nullptr; }
|
||||
|
||||
protected:
|
||||
Image(void* aImplData, ImageFormat aFormat) :
|
||||
@ -274,6 +277,9 @@ protected:
|
||||
const gfx::IntSize &aScaleHint,
|
||||
BufferRecycleBin *aRecycleBin);
|
||||
|
||||
virtual RefPtr<PlanarYCbCrImage> CreatePlanarYCbCrImage(
|
||||
const gfx::IntSize& aScaleHint,
|
||||
BufferRecycleBin *aRecycleBin);
|
||||
};
|
||||
|
||||
/**
|
||||
@ -321,6 +327,11 @@ public:
|
||||
*/
|
||||
B2G_ACL_EXPORT already_AddRefed<Image> CreateImage(ImageFormat aFormat);
|
||||
|
||||
RefPtr<PlanarYCbCrImage> CreatePlanarYCbCrImage();
|
||||
|
||||
// Factory methods for shared image types.
|
||||
RefPtr<SharedRGBImage> CreateSharedRGBImage();
|
||||
|
||||
struct NonOwningImage {
|
||||
explicit NonOwningImage(Image* aImage = nullptr,
|
||||
TimeStamp aTimeStamp = TimeStamp(),
|
||||
@ -722,6 +733,8 @@ public:
|
||||
|
||||
virtual size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const = 0;
|
||||
|
||||
PlanarYCbCrImage* AsPlanarYCbCrImage() { return this; }
|
||||
|
||||
protected:
|
||||
already_AddRefed<gfx::SourceSurface> GetAsSourceSurface();
|
||||
|
||||
|
@ -78,17 +78,10 @@ class BasicImageFactory : public ImageFactory
|
||||
public:
|
||||
BasicImageFactory() {}
|
||||
|
||||
virtual already_AddRefed<Image> CreateImage(ImageFormat aFormat,
|
||||
const gfx::IntSize &aScaleHint,
|
||||
BufferRecycleBin *aRecycleBin)
|
||||
virtual RefPtr<PlanarYCbCrImage>
|
||||
CreatePlanarYCbCrImage(const gfx::IntSize& aScaleHint, BufferRecycleBin* aRecycleBin)
|
||||
{
|
||||
RefPtr<Image> image;
|
||||
if (aFormat == ImageFormat::PLANAR_YCBCR) {
|
||||
image = new BasicPlanarYCbCrImage(aScaleHint, gfxPlatform::GetPlatform()->GetOffscreenFormat(), aRecycleBin);
|
||||
return image.forget();
|
||||
}
|
||||
|
||||
return ImageFactory::CreateImage(aFormat, aScaleHint, aRecycleBin);
|
||||
return new BasicPlanarYCbCrImage(aScaleHint, gfxPlatform::GetPlatform()->GetOffscreenFormat(), aRecycleBin);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -21,8 +21,6 @@
|
||||
#include "mozilla/layers/ISurfaceAllocator.h"
|
||||
#include "mozilla/layers/LayersSurfaces.h" // for SurfaceDescriptor, etc
|
||||
#include "mozilla/layers/ShadowLayers.h" // for ShadowLayerForwarder
|
||||
#include "mozilla/layers/SharedPlanarYCbCrImage.h"
|
||||
#include "mozilla/layers/SharedRGBImage.h"
|
||||
#include "mozilla/layers/TextureClient.h" // for TextureClient, etc
|
||||
#include "mozilla/layers/TextureClientOGL.h" // for SurfaceTextureClient
|
||||
#include "mozilla/mozalloc.h" // for operator delete, etc
|
||||
@ -324,12 +322,6 @@ ImageClientSingle::CreateImage(ImageFormat aFormat)
|
||||
{
|
||||
RefPtr<Image> img;
|
||||
switch (aFormat) {
|
||||
case ImageFormat::PLANAR_YCBCR:
|
||||
img = new SharedPlanarYCbCrImage(this);
|
||||
return img.forget();
|
||||
case ImageFormat::SHARED_RGB:
|
||||
img = new SharedRGBImage(this);
|
||||
return img.forget();
|
||||
#ifdef MOZ_WIDGET_GONK
|
||||
case ImageFormat::GRALLOC_PLANAR_YCBCR:
|
||||
img = new GrallocImage();
|
||||
|
@ -29,6 +29,7 @@ class AsyncTransactionTracker;
|
||||
class Image;
|
||||
class ImageContainer;
|
||||
class ShadowableLayer;
|
||||
class ImageClientSingle;
|
||||
|
||||
/**
|
||||
* Image clients are used by basic image layers on the content thread, they
|
||||
@ -72,6 +73,8 @@ public:
|
||||
void RemoveTextureWithWaiter(TextureClient* aTexture,
|
||||
AsyncTransactionWaiter* aAsyncTransactionWaiter = nullptr);
|
||||
|
||||
virtual ImageClientSingle* AsImageClientSingle() { return nullptr; }
|
||||
|
||||
protected:
|
||||
ImageClient(CompositableForwarder* aFwd, TextureFlags aFlags,
|
||||
CompositableType aType);
|
||||
@ -103,6 +106,8 @@ public:
|
||||
|
||||
virtual void FlushAllImages(AsyncTransactionWaiter* aAsyncTransactionWaiter) override;
|
||||
|
||||
ImageClientSingle* AsImageClientSingle() override { return this; }
|
||||
|
||||
protected:
|
||||
struct Buffer {
|
||||
RefPtr<TextureClient> mTextureClient;
|
||||
|
@ -41,19 +41,16 @@ CreateSharedRGBImage(ImageContainer *aImageContainer,
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
RefPtr<Image> image = aImageContainer->CreateImage(ImageFormat::SHARED_RGB);
|
||||
|
||||
if (!image) {
|
||||
RefPtr<SharedRGBImage> rgbImage = aImageContainer->CreateSharedRGBImage();
|
||||
if (!rgbImage) {
|
||||
NS_WARNING("Failed to create SharedRGBImage");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
RefPtr<SharedRGBImage> rgbImage = static_cast<SharedRGBImage*>(image.get());
|
||||
if (!rgbImage->Allocate(aSize, gfx::ImageFormatToSurfaceFormat(aImageFormat))) {
|
||||
NS_WARNING("Failed to allocate a shared image");
|
||||
return nullptr;
|
||||
}
|
||||
return image.forget();
|
||||
return rgbImage.forget();
|
||||
}
|
||||
|
||||
SharedRGBImage::SharedRGBImage(ImageClient* aCompositable)
|
||||
|
@ -1482,11 +1482,11 @@ void MediaPipelineReceiveVideo::PipelineListener::RenderVideoFrame(
|
||||
// Create a video frame using |buffer|.
|
||||
#ifdef MOZ_WIDGET_GONK
|
||||
ImageFormat format = ImageFormat::GRALLOC_PLANAR_YCBCR;
|
||||
#else
|
||||
ImageFormat format = ImageFormat::PLANAR_YCBCR;
|
||||
#endif
|
||||
RefPtr<Image> image = image_container_->CreateImage(format);
|
||||
PlanarYCbCrImage* yuvImage = static_cast<PlanarYCbCrImage*>(image.get());
|
||||
#else
|
||||
RefPtr<PlanarYCbCrImage> yuvImage = image_container_->CreatePlanarYCbCrImage();
|
||||
#endif
|
||||
uint8_t* frame = const_cast<uint8_t*>(static_cast<const uint8_t*> (buffer));
|
||||
|
||||
PlanarYCbCrData yuvData;
|
||||
@ -1507,7 +1507,7 @@ void MediaPipelineReceiveVideo::PipelineListener::RenderVideoFrame(
|
||||
return;
|
||||
}
|
||||
|
||||
image_ = image.forget();
|
||||
image_ = yuvImage;
|
||||
}
|
||||
#ifdef WEBRTC_GONK
|
||||
else {
|
||||
|
Loading…
Reference in New Issue
Block a user