!3159 提升avcodec的UT覆盖率

Merge pull request !3159 from 国庆/master
This commit is contained in:
openharmony_ci 2024-08-29 11:02:32 +00:00 committed by Gitee
commit d0bd815b17
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
8 changed files with 261 additions and 13 deletions

View File

@ -176,11 +176,34 @@ HWTEST_F(VideoDecoderAdapterUnitTest, VideoDecoderAdapter_AquireAvailableInputBu
videoDecoder->inputBufferQueue_ = std::shared_ptr<Media::AVBufferQueue>();
videoDecoder->eventReceiver_ = std::make_shared<MyEventReceiver>();
videoDecoder->inputBufferQueueConsumer_ = new TestAVBufferQueueConsumer();
std::shared_ptr<AVBuffer> tmpBuffer = AVBuffer::CreateAVBuffer();
uint8_t data[100];
std::shared_ptr<AVBuffer> tmpBuffer = AVBuffer::CreateAVBuffer(data, sizeof(data), sizeof(data));
tmpBuffer->meta_ = std::make_shared<Meta>();
tmpBuffer->flag_ = 1;
videoDecoder->mediaCodec_ = std::make_shared<TestAVCodecVideoDecoder>();
videoDecoder->AquireAvailableInputBuffer();
ASSERT_EQ(videoDecoder->currentTime_, 0);
}
/**
* @tc.name: VideoDecoderAdapter_AquireAvailableInputBuffer_002
* @tc.desc: AquireAvailableInputBuffer
* @tc.type: FUNC
*/
HWTEST_F(VideoDecoderAdapterUnitTest, VideoDecoderAdapter_AquireAvailableInputBuffer_002, TestSize.Level1)
{
std::shared_ptr<VideoDecoderAdapter> videoDecoder = std::make_shared<VideoDecoderAdapter>();
videoDecoder->inputBufferQueue_ = std::shared_ptr<Media::AVBufferQueue>();
videoDecoder->eventReceiver_ = std::make_shared<MyEventReceiver>();
videoDecoder->inputBufferQueueConsumer_ = new TestAVBufferQueueConsumer();
uint8_t data[100];
std::shared_ptr<AVBuffer> tmpBuffer = AVBuffer::CreateAVBuffer(data, sizeof(data), sizeof(data));
tmpBuffer->meta_ = std::make_shared<Meta>();
tmpBuffer->flag_ = 0;
videoDecoder->mediaCodec_ = std::make_shared<TestAVCodecVideoDecoder>();
videoDecoder->AquireAvailableInputBuffer();
tmpBuffer = nullptr;
videoDecoder->AquireAvailableInputBuffer();
ASSERT_EQ(videoDecoder->currentTime_, 0);
}
@ -192,12 +215,68 @@ HWTEST_F(VideoDecoderAdapterUnitTest, VideoDecoderAdapter_AquireAvailableInputBu
HWTEST_F(VideoDecoderAdapterUnitTest, VideoDecoderAdapter_OnInputBufferAvailable_001, TestSize.Level1)
{
std::shared_ptr<VideoDecoderAdapter> videoDecoder = std::make_shared<VideoDecoderAdapter>();
std::shared_ptr<AVBuffer> buffer = AVBuffer::CreateAVBuffer();
uint8_t data[100];
std::shared_ptr<AVBuffer> buffer = AVBuffer::CreateAVBuffer(data, sizeof(data), sizeof(data));
buffer->meta_ = std::make_shared<Meta>();
uint32_t index = 1;
videoDecoder->OnInputBufferAvailable(index, buffer);
videoDecoder->inputBufferQueueConsumer_ = new TestAVBufferQueueConsumer();
videoDecoder->OnInputBufferAvailable(index, buffer);
EXPECT_EQ(videoDecoder->inputBufferQueueConsumer_->ReleaseBuffer(buffer), Status::OK);
EXPECT_EQ(videoDecoder->inputBufferQueueConsumer_->ReleaseBuffer(buffer), Status::ERROR_UNKNOWN);
}
/**
* @tc.name: VideoDecoderAdapter_PrepareInputBufferQueue_001
* @tc.desc: PrepareInputBufferQueue
* @tc.type: FUNC
*/
HWTEST_F(VideoDecoderAdapterUnitTest, VideoDecoderAdapter_PrepareInputBufferQueue_001, TestSize.Level1)
{
std::shared_ptr<VideoDecoderAdapter> videoDecoder = std::make_shared<VideoDecoderAdapter>();
uint8_t data[100];
std::shared_ptr<AVBuffer> tmpBuffer = AVBuffer::CreateAVBuffer(data, sizeof(data), sizeof(data));
videoDecoder->inputBufferQueue_ = std::shared_ptr<Media::AVBufferQueue>();
videoDecoder->PrepareInputBufferQueue();
videoDecoder->inputBufferQueue_ = nullptr;
videoDecoder->PrepareInputBufferQueue();
ASSERT_EQ(videoDecoder->currentTime_, 0);
}
/**
* @tc.name: VideoDecoderAdapter_ReleaseOutputBuffer_001
* @tc.desc: ReleaseOutputBuffer
* @tc.type: FUNC
*/
HWTEST_F(VideoDecoderAdapterUnitTest, VideoDecoderAdapter_ReleaseOutputBuffer_001, TestSize.Level1)
{
std::shared_ptr<VideoDecoderAdapter> videoDecoder = std::make_shared<VideoDecoderAdapter>();
uint8_t data[100];
std::shared_ptr<AVBuffer> tmpBuffer = AVBuffer::CreateAVBuffer(data, sizeof(data), sizeof(data));
videoDecoder->mediaCodec_ = std::make_shared<TestAVCodecVideoDecoder>();
videoDecoder->currentTime_ = -1;
int32_t ret = videoDecoder->ReleaseOutputBuffer(1, true);
videoDecoder->currentTime_ = 0;
ret = videoDecoder->ReleaseOutputBuffer(1, true);
videoDecoder->eventReceiver_ = std::make_shared<MyEventReceiver>();
ret = videoDecoder->ReleaseOutputBuffer(1, true);
ASSERT_EQ(ret, 0);
}
/**
* @tc.name: VideoDecoderAdapter_GetLagInfo_001
* @tc.desc: GetLagInfo
* @tc.type: FUNC
*/
HWTEST_F(VideoDecoderAdapterUnitTest, VideoDecoderAdapter_GetLagInfo_001, TestSize.Level1)
{
std::shared_ptr<VideoDecoderAdapter> videoDecoder = std::make_shared<VideoDecoderAdapter>();
int32_t lagTimes = 0;
int32_t maxLagDuration = 0;
int32_t avgLagDuration = 0;
videoDecoder->lagTimes_ = 0;
Status ret = videoDecoder->GetLagInfo(lagTimes, maxLagDuration, avgLagDuration);
videoDecoder->lagTimes_ = 1;
ret = videoDecoder->GetLagInfo(lagTimes, maxLagDuration, avgLagDuration);
EXPECT_EQ(ret, Status::OK);
}
}

View File

@ -143,7 +143,7 @@ public:
virtual int32_t QueueInputBuffer(uint32_t index)
{
return status_;
return ret;
}
virtual int32_t GetOutputFormat(Format &format)
@ -185,6 +185,7 @@ public:
}
private:
int32_t status_ = 0;
int32_t ret = 1;
};
class TestAVBufferQueueConsumer : public AVBufferQueueConsumer {
@ -208,11 +209,15 @@ public:
Status AcquireBuffer(std::shared_ptr<AVBuffer>& outBuffer)
{
return Status::ERROR_UNKNOWN;
if (outBuffer == nullptr) {
return Status::ERROR_UNKNOWN;
} else {
return Status::OK;
}
}
Status ReleaseBuffer(const std::shared_ptr<AVBuffer>& inBuffer)
{
return Status::OK;
return Status::ERROR_UNKNOWN;
}
Status AttachBuffer(std::shared_ptr<AVBuffer>& inBuffer, bool isFilled)

View File

@ -142,6 +142,9 @@ HWTEST_F(MuxerFilterUnitTest, MuxerFilter_OnBufferFilled_0100, TestSize.Level1)
inputBuffer->pts_ = 3000000000;
muxerFilter_->OnBufferFilled(inputBuffer, trackIndex, streamType, inputBufferQueue);
EXPECT_EQ(inputBuffer->flag_, 0);
muxerFilter_->isTransCoderMode = true;
muxerFilter_->OnBufferFilled(inputBuffer, trackIndex, streamType, inputBufferQueue);
EXPECT_EQ(inputBuffer->flag_, 0);
}
/**

View File

@ -57,6 +57,7 @@ HWTEST_F(SurfaceDecoderUnitTest, SurfaceDecoderAdapter_Init_0100, TestSize.Level
{
Status status = surfaceDecoderAdapter_->Init("");
ASSERT_EQ(status, Status::ERROR_UNKNOWN);
surfaceDecoderAdapter_->codecServer_ = std::make_shared<MyAVCodecVideoDecoder>();
status = surfaceDecoderAdapter_->Init("video/mp4");
ASSERT_EQ(status, Status::ERROR_UNKNOWN);
surfaceDecoderAdapter_->releaseBufferTask_ = nullptr;

View File

@ -127,10 +127,10 @@ HWTEST_F(SurfaceEncoderAdapterUnitTest, SurfaceEncoderAdapter_Start_0100, TestSi
surfaceEncoderAdapter_->codecServer_ = nullptr;
Status ret = surfaceEncoderAdapter_->Start();
EXPECT_EQ(ret, Status::ERROR_UNKNOWN);
surfaceEncoderAdapter_->codecServer_ = std::make_shared<MyAVCodecVideoEncoder>();
surfaceEncoderAdapter_->releaseBufferTask_ = nullptr;
ret = surfaceEncoderAdapter_->Start();
surfaceEncoderAdapter_->releaseBufferTask_ = std::make_shared<Task>("test");
surfaceEncoderAdapter_->codecServer_ = std::make_shared<MyAVCodecVideoEncoder>();
ret = surfaceEncoderAdapter_->Start();
EXPECT_EQ(ret, Status::OK);
}
@ -287,6 +287,22 @@ HWTEST_F(SurfaceEncoderAdapterUnitTest, SurfaceEncoderAdapter_ConfigureAboutRGBA
EXPECT_NE(meta->Find(Tag::VIDEO_PIXEL_FORMAT), meta->end());
}
/**
* @tc.name: SurfaceEncoderAdapter_ConfigureAboutEnableTemporalScale_0100
* @tc.desc: ConfigureAboutEnableTemporalScale
* @tc.type: FUNC
*/
HWTEST_F(SurfaceEncoderAdapterUnitTest, ConfigureAboutEnableTemporalScale_0100, TestSize.Level1)
{
std::shared_ptr<Meta> meta = std::make_shared<Meta>();
MediaAVCodec::Format format;
meta->SetData(Tag::VIDEO_ENCODER_ENABLE_TEMPORAL_SCALABILITY, 0);
surfaceEncoderAdapter_->ConfigureAboutEnableTemporalScale(format, meta);
meta->SetData(Tag::VIDEO_ENCODER_ENABLE_TEMPORAL_SCALABILITY, 2);
surfaceEncoderAdapter_->ConfigureAboutEnableTemporalScale(format, meta);
EXPECT_EQ(surfaceEncoderAdapter_->totalPauseTime_, 0);
}
/**
* @tc.name: OnInputParameterWithAttrAvailablee_100
* @tc.desc: OnInputParameterWithAttrAvailable
@ -391,6 +407,28 @@ HWTEST_F(SurfaceEncoderAdapterUnitTest, SurfaceEncoderAdapter_TransCoder_100, Te
EXPECT_EQ(surfaceEncoderAdapter_->isResume_, buffer->pts_);
}
/**
* @tc.name: SurfaceEncoderAdapter_TransCoder_200
* @tc.desc: TransCoderOnOutputBufferAvailable
* @tc.type: FUNC
*/
HWTEST_F(SurfaceEncoderAdapterUnitTest, SurfaceEncoderAdapter_TransCoder_200, TestSize.Level1)
{
uint8_t data[100];
std::shared_ptr<AVBuffer> buffer = AVBuffer::CreateAVBuffer(data, sizeof(data), sizeof(data));
surfaceEncoderAdapter_->outputBufferQueueProducer_ =
new OHOS::Media::Pipeline::MyAVBufferQueueProducer();
uint32_t index = 1;
surfaceEncoderAdapter_->TransCoderOnOutputBufferAvailable(index, buffer);
surfaceEncoderAdapter_->isResume_ = true;
surfaceEncoderAdapter_->TransCoderOnOutputBufferAvailable(index, buffer);
surfaceEncoderAdapter_->isTransCoderMode = true;
surfaceEncoderAdapter_->startBufferTime_ = -1;
buffer->pts_ = 1;
surfaceEncoderAdapter_->TransCoderOnOutputBufferAvailable(index, buffer);
EXPECT_EQ(surfaceEncoderAdapter_->startBufferTime_, buffer->pts_);
}
/**
* @tc.name: SurfaceEncoderAdapter_OnOutputBufferAvailable_0100
* @tc.desc: OnOutputBufferAvailable

View File

@ -165,11 +165,7 @@ public:
Status RequestBuffer(std::shared_ptr<AVBuffer>& outBuffer,
const AVBufferConfig& config, int32_t timeoutMs)
{
if (outBuffer == nullptr) {
return Status::ERROR_NULL_POINTER;
} else {
return Status::OK;
}
return Status::ERROR_UNKNOWN;
}
Status PushBuffer(const std::shared_ptr<AVBuffer>& inBuffer, bool available)
{

View File

@ -115,6 +115,9 @@ HWTEST_F(VideoResizeFilterUnitTest, VideoResizeFilter_SetOutputSurface_001, Test
sptr<Surface> surface = nullptr;
Status ret = videoResize_->SetOutputSurface(surface, 0, 0);
EXPECT_EQ(ret, Status::ERROR_NULL_POINTER);
videoResize_->videoEnhancer_ = nullptr;
ret = videoResize_->SetOutputSurface(surface, 0, 0);
EXPECT_EQ(ret, Status::ERROR_NULL_POINTER);
videoResize_->videoEnhancer_ = DetailEnhancerVideo::Create();
videoResize_->eventReceiver_ = nullptr;
surface = videoResize_->videoEnhancer_->GetInputSurface();
@ -167,7 +170,7 @@ HWTEST_F(VideoResizeFilterUnitTest, VideoResizeFilter_DoStop_001, TestSize.Level
*/
HWTEST_F(VideoResizeFilterUnitTest, VideoResizeFilter_SetParameter_001, TestSize.Level1)
{
std::shared_ptr<Meta> parameter= std::make_shared<Meta>();
std::shared_ptr<Meta> parameter = std::make_shared<Meta>();
videoResize_->videoEnhancer_ = nullptr;
videoResize_->SetParameter(parameter);
videoResize_->videoEnhancer_ = DetailEnhancerVideo::Create();
@ -177,6 +180,22 @@ HWTEST_F(VideoResizeFilterUnitTest, VideoResizeFilter_SetParameter_001, TestSize
EXPECT_EQ(videoResize_->appPid_, 0);
}
/**
* @tc.name: VideoResizeFilter_SetParameter_002
* @tc.desc: SetParameter
* @tc.type: FUNC
*/
HWTEST_F(VideoResizeFilterUnitTest, VideoResizeFilter_SetParameter_002, TestSize.Level1)
{
std::shared_ptr<Meta> parameter = std::make_shared<Meta>();
parameter->SetData(Tag::MEDIA_END_OF_STREAM, 2);
videoResize_->videoEnhancer_ = nullptr;
videoResize_->SetParameter(parameter);
videoResize_->videoEnhancer_ = DetailEnhancerVideo::Create();
videoResize_->SetParameter(parameter);
EXPECT_EQ(videoResize_->appPid_, 0);
}
/**
* @tc.name: VideoResizeFilter_LinkNext_001
* @tc.desc: LinkNext
@ -194,6 +213,22 @@ HWTEST_F(VideoResizeFilterUnitTest, VideoResizeFilter_LinkNext_001, TestSize.Lev
EXPECT_NE(ret, Status::OK);
}
/**
* @tc.name: VideoResizeFilter_OnLinkedResult_001
* @tc.desc: OnLinkedResult
* @tc.type: FUNC
*/
HWTEST_F(VideoResizeFilterUnitTest, VideoResizeFilter_OnLinkedResult_001, TestSize.Level1)
{
sptr<AVBufferQueueProducer> outputBufferQueue = new OHOS::Media::Pipeline::MyAVBufferQueueProducer();
std::shared_ptr<Meta> meta = std::make_shared<Meta>();
videoResize_->onLinkedResultCallback_ = std::make_shared<TestFilterLinkCallback>();
videoResize_->OnLinkedResult(outputBufferQueue, meta);
videoResize_->onLinkedResultCallback_ = nullptr;
videoResize_->OnLinkedResult(outputBufferQueue, meta);
EXPECT_EQ(videoResize_->instanceId_, 0);
}
/**
* @tc.name: VideoResizeFilter_ReleaseBuffer_001
* @tc.desc: ReleaseBuffer

View File

@ -101,6 +101,97 @@ public:
}
protected:
};
class MyAVBufferQueueProducer : public IRemoteStub<AVBufferQueueProducer> {
public:
uint32_t GetQueueSize()
{
return 0;
}
Status SetQueueSize(uint32_t size)
{
return Status::OK;
}
Status RequestBuffer(std::shared_ptr<AVBuffer>& outBuffer,
const AVBufferConfig& config, int32_t timeoutMs)
{
if (outBuffer == nullptr) {
return Status::ERROR_NULL_POINTER;
} else {
return Status::OK;
}
}
Status PushBuffer(const std::shared_ptr<AVBuffer>& inBuffer, bool available)
{
return Status::OK;
}
Status ReturnBuffer(const std::shared_ptr<AVBuffer>& inBuffer, bool available)
{
return Status::OK;
}
Status AttachBuffer(std::shared_ptr<AVBuffer>& inBuffer, bool isFilled)
{
return Status::OK;
}
Status DetachBuffer(const std::shared_ptr<AVBuffer>& outBuffer)
{
return Status::OK;
}
Status SetBufferFilledListener(sptr<IBrokerListener>& listener)
{
return Status::OK;
}
Status RemoveBufferFilledListener(sptr<IBrokerListener>& listener)
{
return Status::OK;
}
Status SetBufferAvailableListener(sptr<IProducerListener>& listener)
{
return Status::OK;
}
Status Clear()
{
return Status::OK;
}
DECLARE_INTERFACE_DESCRIPTOR(u"Media.MyAVBufferQueueProducer");
protected:
enum: uint32_t {
PRODUCER_GET_QUEUE_SIZE = 0,
PRODUCER_SET_QUEUE_SIZE = 1,
PRODUCER_REQUEST_BUFFER = 2,
PRODUCER_PUSH_BUFFER = 3,
PRODUCER_RETURN_BUFFER = 4,
PRODUCER_ATTACH_BUFFER = 5,
PRODUCER_DETACH_BUFFER = 6,
PRODUCER_SET_FILLED_LISTENER = 7,
PRODUCER_REMOVE_FILLED_LISTENER = 8,
PRODUCER_SET_AVAILABLE_LISTENER = 9
};
};
class TestFilterLinkCallback : public Pipeline::FilterLinkCallback {
public:
explicit TestFilterLinkCallback()
{
std::cout << "filter back constructor" << std::endl;
}
void OnLinkedResult(const sptr<AVBufferQueueProducer>& queue, std::shared_ptr<Meta>& meta)
{
std::cout << "call OnLinkedResult" << std::endl;
}
void OnUnlinkedResult(std::shared_ptr<Meta>& meta)
{
std::cout << "call OnUnlinkedResult" << std::endl;
}
void OnUpdatedResult(std::shared_ptr<Meta>& meta)
{
std::cout << "call OnUpdatedResult" << std::endl;
}
};
} // namespace Pipeline
} // namespace Media
} // namespace OHOS