mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-27 23:02:20 +00:00
Bug 1774300 - Implement VideoFrame Constructor for I420A ArrayBuffer* r=padenot,jgilbert
This patch allows constructing a VideoFrame from the ArrayBuffer* in I420A format, which contains the I420 data with an extra alpha channel data. Depends on D149584 Differential Revision: https://phabricator.services.mozilla.com/D149943
This commit is contained in:
parent
0844a3ff75
commit
c3449a3361
@ -17,6 +17,7 @@
|
||||
#include "mozilla/ResultVariant.h"
|
||||
#include "mozilla/ScopeExit.h"
|
||||
#include "mozilla/Tuple.h"
|
||||
#include "mozilla/UniquePtr.h"
|
||||
#include "mozilla/dom/DOMRect.h"
|
||||
#include "mozilla/dom/Promise.h"
|
||||
#include "mozilla/dom/UnionTypes.h"
|
||||
@ -131,6 +132,7 @@ class YUVBufferReaderBase {
|
||||
const RangedPtr<uint8_t> mPtr;
|
||||
};
|
||||
|
||||
class I420ABufferReader;
|
||||
class I420BufferReader : public YUVBufferReaderBase {
|
||||
public:
|
||||
I420BufferReader(const RangedPtr<uint8_t>& aPtr, int32_t aWidth,
|
||||
@ -148,6 +150,7 @@ class I420BufferReader : public YUVBufferReaderBase {
|
||||
UByteSize().value())
|
||||
.value()];
|
||||
}
|
||||
virtual I420ABufferReader* AsI420ABufferReader() { return nullptr; }
|
||||
|
||||
const int32_t mStrideU;
|
||||
const int32_t mStrideV;
|
||||
@ -156,6 +159,30 @@ class I420BufferReader : public YUVBufferReaderBase {
|
||||
CheckedInt<size_t> UByteSize() const {
|
||||
return CheckedInt<size_t>(CeilingOfHalf(mHeight)) * mStrideU;
|
||||
}
|
||||
|
||||
CheckedInt<size_t> VSize() const {
|
||||
return CheckedInt<size_t>(CeilingOfHalf(mHeight)) * mStrideV;
|
||||
}
|
||||
};
|
||||
|
||||
class I420ABufferReader final : public I420BufferReader {
|
||||
public:
|
||||
I420ABufferReader(const RangedPtr<uint8_t>& aPtr, int32_t aWidth,
|
||||
int32_t aHeight)
|
||||
: I420BufferReader(aPtr, aWidth, aHeight), mStrideA(aWidth) {
|
||||
MOZ_ASSERT(mStrideA == mStrideY);
|
||||
}
|
||||
virtual ~I420ABufferReader() = default;
|
||||
|
||||
const uint8_t* DataA() const {
|
||||
return &mPtr[(CheckedInt<ptrdiff_t>(YByteSize().value()) +
|
||||
UByteSize().value() + VSize().value())
|
||||
.value()];
|
||||
}
|
||||
|
||||
virtual I420ABufferReader* AsI420ABufferReader() override { return this; }
|
||||
|
||||
const int32_t mStrideA;
|
||||
};
|
||||
|
||||
class NV12BufferReader final : public YUVBufferReaderBase {
|
||||
@ -651,25 +678,40 @@ static Result<RefPtr<layers::Image>, nsCString> CreateRGBAImageFromBuffer(
|
||||
static Result<RefPtr<layers::Image>, nsCString> CreateYUVImageFromBuffer(
|
||||
const VideoFrame::Format& aFormat, const VideoColorSpaceInit& aColorSpace,
|
||||
const gfx::IntSize& aSize, const RangedPtr<uint8_t>& aPtr) {
|
||||
if (aFormat.PixelFormat() == VideoPixelFormat::I420) {
|
||||
I420BufferReader reader(aPtr, aSize.Width(), aSize.Height());
|
||||
if (aFormat.PixelFormat() == VideoPixelFormat::I420 ||
|
||||
aFormat.PixelFormat() == VideoPixelFormat::I420A) {
|
||||
UniquePtr<I420BufferReader> reader;
|
||||
if (aFormat.PixelFormat() == VideoPixelFormat::I420) {
|
||||
reader.reset(new I420BufferReader(aPtr, aSize.Width(), aSize.Height()));
|
||||
} else {
|
||||
reader.reset(new I420ABufferReader(aPtr, aSize.Width(), aSize.Height()));
|
||||
}
|
||||
|
||||
layers::PlanarYCbCrData data;
|
||||
data.mPictureRect = gfx::IntRect(0, 0, reader.mWidth, reader.mHeight);
|
||||
data.mPictureRect = gfx::IntRect(0, 0, reader->mWidth, reader->mHeight);
|
||||
|
||||
// Y plane.
|
||||
data.mYChannel = const_cast<uint8_t*>(reader.DataY());
|
||||
data.mYStride = reader.mStrideY;
|
||||
data.mYChannel = const_cast<uint8_t*>(reader->DataY());
|
||||
data.mYStride = reader->mStrideY;
|
||||
data.mYSkip = 0;
|
||||
// Cb plane.
|
||||
data.mCbChannel = const_cast<uint8_t*>(reader.DataU());
|
||||
data.mCbChannel = const_cast<uint8_t*>(reader->DataU());
|
||||
data.mCbSkip = 0;
|
||||
// Cr plane.
|
||||
data.mCrChannel = const_cast<uint8_t*>(reader.DataV());
|
||||
data.mCrChannel = const_cast<uint8_t*>(reader->DataV());
|
||||
data.mCbSkip = 0;
|
||||
// A plane.
|
||||
if (aFormat.PixelFormat() == VideoPixelFormat::I420A) {
|
||||
data.mAlpha.emplace();
|
||||
data.mAlpha->mChannel =
|
||||
const_cast<uint8_t*>(reader->AsI420ABufferReader()->DataA());
|
||||
data.mAlpha->mSize = data.mPictureRect.Size();
|
||||
// No values for mDepth and mPremultiplied.
|
||||
}
|
||||
|
||||
// CbCr plane vector.
|
||||
MOZ_RELEASE_ASSERT(reader.mStrideU == reader.mStrideV);
|
||||
data.mCbCrStride = reader.mStrideU;
|
||||
MOZ_RELEASE_ASSERT(reader->mStrideU == reader->mStrideV);
|
||||
data.mCbCrStride = reader->mStrideU;
|
||||
data.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
|
||||
// Color settings.
|
||||
if (aColorSpace.mFullRange.WasPassed() && aColorSpace.mFullRange.Value()) {
|
||||
@ -686,7 +728,9 @@ static Result<RefPtr<layers::Image>, nsCString> CreateYUVImageFromBuffer(
|
||||
RefPtr<layers::PlanarYCbCrImage> image =
|
||||
new layers::RecyclingPlanarYCbCrImage(new layers::BufferRecycleBin());
|
||||
if (!image->CopyData(data)) {
|
||||
return Err(nsCString("Failed to create I420 image"));
|
||||
return Err(nsPrintfCString(
|
||||
"Failed to create I420%s image",
|
||||
(aFormat.PixelFormat() == VideoPixelFormat::I420A ? "A" : "")));
|
||||
}
|
||||
// Manually cast type to make Result work.
|
||||
return RefPtr<layers::Image>(image.forget());
|
||||
@ -739,9 +783,9 @@ static Result<RefPtr<layers::Image>, nsCString> CreateImageFromBuffer(
|
||||
const gfx::IntSize& aSize, const RangedPtr<uint8_t>& aPtr) {
|
||||
switch (aFormat.PixelFormat()) {
|
||||
case VideoPixelFormat::I420:
|
||||
case VideoPixelFormat::I420A:
|
||||
case VideoPixelFormat::NV12:
|
||||
return CreateYUVImageFromBuffer(aFormat, aColorSpace, aSize, aPtr);
|
||||
case VideoPixelFormat::I420A:
|
||||
case VideoPixelFormat::I422:
|
||||
case VideoPixelFormat::I444:
|
||||
// Not yet support for now.
|
||||
@ -1588,8 +1632,12 @@ bool VideoFrame::Resource::CopyTo(const Format::Plane& aPlane,
|
||||
case Format::Plane::V:
|
||||
return copyPlane(mImage->AsPlanarYCbCrImage()->GetData()->mCrChannel);
|
||||
case Format::Plane::A:
|
||||
MOZ_ASSERT_UNREACHABLE("invalid plane");
|
||||
MOZ_ASSERT(mFormat.PixelFormat() == VideoPixelFormat::I420A);
|
||||
MOZ_ASSERT(mImage->AsPlanarYCbCrImage()->GetData()->mAlpha);
|
||||
return copyPlane(
|
||||
mImage->AsPlanarYCbCrImage()->GetData()->mAlpha->mChannel);
|
||||
}
|
||||
MOZ_ASSERT_UNREACHABLE("invalid plane");
|
||||
}
|
||||
|
||||
if (mImage->GetFormat() == ImageFormat::NV_IMAGE) {
|
||||
|
@ -684,7 +684,8 @@ bool RecyclingPlanarYCbCrImage::CopyData(const Data& aData) {
|
||||
auto cbcrSize = aData.CbCrDataSize();
|
||||
const auto checkedSize =
|
||||
CheckedInt<uint32_t>(aData.mCbCrStride) * cbcrSize.height * 2 +
|
||||
CheckedInt<uint32_t>(aData.mYStride) * ySize.height;
|
||||
CheckedInt<uint32_t>(aData.mYStride) * ySize.height *
|
||||
(aData.mAlpha ? 2 : 1);
|
||||
|
||||
if (!checkedSize.isValid()) return false;
|
||||
|
||||
@ -697,7 +698,7 @@ bool RecyclingPlanarYCbCrImage::CopyData(const Data& aData) {
|
||||
// update buffer size
|
||||
mBufferSize = size;
|
||||
|
||||
mData = aData;
|
||||
mData = aData; // mAlpha will be set if aData has it
|
||||
mData.mYChannel = mBuffer.get();
|
||||
mData.mCbChannel = mData.mYChannel + mData.mYStride * ySize.height;
|
||||
mData.mCrChannel = mData.mCbChannel + mData.mCbCrStride * cbcrSize.height;
|
||||
@ -709,6 +710,10 @@ bool RecyclingPlanarYCbCrImage::CopyData(const Data& aData) {
|
||||
aData.mCbSkip);
|
||||
CopyPlane(mData.mCrChannel, aData.mCrChannel, cbcrSize, aData.mCbCrStride,
|
||||
aData.mCrSkip);
|
||||
if (aData.mAlpha) {
|
||||
CopyPlane(mData.mAlpha->mChannel, aData.mAlpha->mChannel, ySize,
|
||||
aData.mYStride, aData.mYSkip);
|
||||
}
|
||||
|
||||
mSize = aData.mPictureRect.Size();
|
||||
mOrigin = aData.mPictureRect.TopLeft();
|
||||
|
@ -643,6 +643,14 @@ class AutoLockImage {
|
||||
AutoTArray<ImageContainer::OwningImage, 4> mImages;
|
||||
};
|
||||
|
||||
// This type is currently only used for AVIF and WebCodecs therefore makes some
|
||||
// specific assumptions (e.g., Alpha's bpc and stride is equal to Y's one)
|
||||
struct PlanarAlphaData {
|
||||
uint8_t* mChannel = nullptr;
|
||||
gfx::IntSize mSize = gfx::IntSize(0, 0);
|
||||
gfx::ColorDepth mDepth = gfx::ColorDepth::COLOR_8;
|
||||
bool mPremultiplied = false;
|
||||
};
|
||||
struct PlanarYCbCrData {
|
||||
// Luminance buffer
|
||||
uint8_t* mYChannel = nullptr;
|
||||
@ -654,6 +662,8 @@ struct PlanarYCbCrData {
|
||||
int32_t mCbCrStride = 0;
|
||||
int32_t mCbSkip = 0;
|
||||
int32_t mCrSkip = 0;
|
||||
// Alpha buffer and its metadata
|
||||
Maybe<PlanarAlphaData> mAlpha = Nothing();
|
||||
// Picture region
|
||||
gfx::IntRect mPictureRect = gfx::IntRect(0, 0, 0, 0);
|
||||
StereoMode mStereoMode = StereoMode::MONO;
|
||||
@ -686,15 +696,6 @@ struct PlanarYCbCrData {
|
||||
static Maybe<PlanarYCbCrData> From(const SurfaceDescriptorBuffer&);
|
||||
};
|
||||
|
||||
// This type is currently only used for AVIF and therefore makes some
|
||||
// AVIF-specific assumptions (e.g., Alpha's bpc and stride is equal to Y's one)
|
||||
struct PlanarAlphaData {
|
||||
uint8_t* mChannel = nullptr;
|
||||
gfx::IntSize mSize = gfx::IntSize(0, 0);
|
||||
gfx::ColorDepth mDepth = gfx::ColorDepth::COLOR_8;
|
||||
bool mPremultiplied = false;
|
||||
};
|
||||
|
||||
/****** Image subtypes for the different formats ******/
|
||||
|
||||
/**
|
||||
|
@ -280,11 +280,10 @@ class AVIFParser {
|
||||
Maybe<Mp4parseAvifImage> mAvifImage;
|
||||
};
|
||||
|
||||
// As well as Maybe<PlanarAlphaData>, add CICP values (either from the BMFF
|
||||
// container or the AV1 sequence header) which are used to create the
|
||||
// colorspace transform. CICP::MatrixCoefficients is only stored for the sake
|
||||
// of telemetry, since the relevant information for YUV -> RGB conversion is
|
||||
// stored in mYUVColorSpace.
|
||||
// CICP values (either from the BMFF container or the AV1 sequence header) are
|
||||
// used to create the colorspace transform. CICP::MatrixCoefficients is only
|
||||
// stored for the sake of telemetry, since the relevant information for YUV ->
|
||||
// RGB conversion is stored in mYUVColorSpace.
|
||||
//
|
||||
// There are three potential sources of color information for an AVIF:
|
||||
// 1. ICC profile via a ColourInformationBox (colr) defined in [ISOBMFF]
|
||||
@ -335,7 +334,6 @@ class AVIFParser {
|
||||
// [ITU-T H.273]: Rec. ITU-T H.273 (12/2016)
|
||||
// <https://www.itu.int/rec/T-REC-H.273-201612-I/en>
|
||||
struct AVIFDecodedData : layers::PlanarYCbCrData {
|
||||
Maybe<layers::PlanarAlphaData> mAlpha = Nothing();
|
||||
CICP::ColourPrimaries mColourPrimaries = CICP::CP_UNSPECIFIED;
|
||||
CICP::TransferCharacteristics mTransferCharacteristics = CICP::TC_UNSPECIFIED;
|
||||
CICP::MatrixCoefficients mMatrixCoefficients = CICP::MC_UNSPECIFIED;
|
||||
|
@ -13,9 +13,6 @@
|
||||
[Test we can construct an odd-sized VideoFrame.]
|
||||
expected: FAIL
|
||||
|
||||
[Test buffer constructed I420+Alpha VideoFrame]
|
||||
expected: FAIL
|
||||
|
||||
[Test VideoFrame constructed VideoFrame]
|
||||
expected: FAIL
|
||||
|
||||
@ -83,9 +80,6 @@
|
||||
[Test we can construct an odd-sized VideoFrame.]
|
||||
expected: FAIL
|
||||
|
||||
[Test buffer constructed I420+Alpha VideoFrame]
|
||||
expected: FAIL
|
||||
|
||||
[Test VideoFrame constructed VideoFrame]
|
||||
expected: FAIL
|
||||
|
||||
|
@ -87,7 +87,7 @@ promise_test(async t => {
|
||||
const layout = await frame.copyTo(data, options);
|
||||
assert_layout_equals(layout, options.layout);
|
||||
assert_buffer_equals(data, expectedData);
|
||||
}, 'Test stride and offset work.');
|
||||
}, 'Test I420 stride and offset work.');
|
||||
|
||||
promise_test(async t => {
|
||||
const frame = makeI420_4x2();
|
||||
@ -112,7 +112,46 @@ promise_test(async t => {
|
||||
const layout = await frame.copyTo(data, options);
|
||||
assert_layout_equals(layout, options.layout);
|
||||
assert_buffer_equals(data, expectedData);
|
||||
}, 'Test stride and offset with padding.');
|
||||
}, 'Test I420 stride and offset with padding.');
|
||||
|
||||
promise_test(async t => {
|
||||
const init = {
|
||||
format: 'I420A',
|
||||
timestamp: 0,
|
||||
codedWidth: 4,
|
||||
codedHeight: 2,
|
||||
};
|
||||
const buf = new Uint8Array([
|
||||
1, 2, 3, 4, // y
|
||||
5, 6, 7, 8,
|
||||
9, 10, // u
|
||||
11, 12, // v
|
||||
13, 14, 15, 16, // a
|
||||
17, 18, 19, 20,
|
||||
]);
|
||||
const frame = new VideoFrame(buf, init);
|
||||
const options = {
|
||||
layout: [
|
||||
{offset: 12, stride: 4},
|
||||
{offset: 8, stride: 2},
|
||||
{offset: 10, stride: 2},
|
||||
{offset: 0, stride: 4},
|
||||
],
|
||||
};
|
||||
const expectedData = new Uint8Array([
|
||||
13, 14, 15, 16, // a
|
||||
17, 18, 19, 20,
|
||||
9, 10, // u
|
||||
11, 12, // v
|
||||
1, 2, 3, 4, // y
|
||||
5, 6, 7, 8,
|
||||
]);
|
||||
assert_equals(frame.allocationSize(options), expectedData.length, 'allocationSize()');
|
||||
const data = new Uint8Array(expectedData.length);
|
||||
const layout = await frame.copyTo(data, options);
|
||||
assert_layout_equals(layout, options.layout);
|
||||
assert_buffer_equals(data, expectedData);
|
||||
}, 'Test I420A stride and offset work.');
|
||||
|
||||
promise_test(async t => {
|
||||
const init = {
|
||||
|
Loading…
Reference in New Issue
Block a user