mirror of
https://gitee.com/openharmony/third_party_ffmpeg
synced 2025-01-27 07:02:01 +00:00
Merge remote-tracking branch 'qatar/master'
* qatar/master: (21 commits) ipmovie: do not read audio packets before the codec is known truemotion2: check size before GetBitContext initialisation avio: Only do implicit network initialization for network protocols avio: Add an URLProtocol flag for indicating that a protocol uses network adpcm: ADPCM Electronic Arts has always two channels matroskadec: Fix a bug where a pointer was cached to an array that might later move due to a realloc() fate: Add missing reference file from 9b4767e4. mov: Support MOV_CH_LAYOUT_USE_DESCRIPTIONS for labeled descriptions. 4xm: Prevent buffer overreads. mjpegdec: parse RSTn to prevent skipping other data in mjpeg_decode_scan vp3: add fate test for non-zero last coefficient vp3: fix streams with non-zero last coefficient swscale: remove unused U/V arguments from yuv2rgb_write(). timer: K&R formatting cosmetics lavf: cosmetics, reformat av_read_frame(). lavf: refactor av_read_frame() to make it easier to understand. Report an error if pitch_lag is zero in AMR-NB decoder. Revert "4xm: Prevent buffer overreads." 4xm: Prevent buffer overreads. 4xm: pass the correct remaining buffer size to decode_i2_frame(). ... Conflicts: libavcodec/4xm.c libavcodec/mjpegdec.c libavcodec/truemotion2.c libavformat/ipmovie.c libavformat/mov_chan.c Merged-by: Michael Niedermayer <michaelni@gmx.at>
This commit is contained in:
commit
3edff185ab
@ -643,9 +643,17 @@ static int decode_i2_frame(FourXContext *f, const uint8_t *buf, int length){
|
||||
int x, y, x2, y2;
|
||||
const int width= f->avctx->width;
|
||||
const int height= f->avctx->height;
|
||||
const int mbs = (FFALIGN(width, 16) >> 4) * (FFALIGN(height, 16) >> 4);
|
||||
uint16_t *dst= (uint16_t*)f->current_picture.data[0];
|
||||
const int stride= f->current_picture.linesize[0]>>1;
|
||||
const uint8_t *buf_end = buf + length;
|
||||
GetByteContext g3;
|
||||
|
||||
if(length < mbs * 8) {
|
||||
av_log(f->avctx, AV_LOG_ERROR, "packet size too small\n");
|
||||
return AVERROR_INVALIDDATA;
|
||||
}
|
||||
bytestream2_init(&g3, buf, length);
|
||||
|
||||
for(y=0; y<height; y+=16){
|
||||
for(x=0; x<width; x+=16){
|
||||
@ -654,8 +662,8 @@ static int decode_i2_frame(FourXContext *f, const uint8_t *buf, int length){
|
||||
return -1;
|
||||
memset(color, 0, sizeof(color));
|
||||
//warning following is purely guessed ...
|
||||
color[0]= bytestream_get_le16(&buf);
|
||||
color[1]= bytestream_get_le16(&buf);
|
||||
color[0]= bytestream2_get_le16u(&g3);
|
||||
color[1]= bytestream2_get_le16u(&g3);
|
||||
|
||||
if(color[0]&0x8000) av_log(NULL, AV_LOG_ERROR, "unk bit 1\n");
|
||||
if(color[1]&0x8000) av_log(NULL, AV_LOG_ERROR, "unk bit 2\n");
|
||||
@ -663,7 +671,7 @@ static int decode_i2_frame(FourXContext *f, const uint8_t *buf, int length){
|
||||
color[2]= mix(color[0], color[1]);
|
||||
color[3]= mix(color[1], color[0]);
|
||||
|
||||
bits= bytestream_get_le32(&buf);
|
||||
bits= bytestream2_get_le32u(&g3);
|
||||
for(y2=0; y2<16; y2++){
|
||||
for(x2=0; x2<16; x2++){
|
||||
int index= 2*(x2>>2) + 8*(y2>>2);
|
||||
@ -672,7 +680,7 @@ static int decode_i2_frame(FourXContext *f, const uint8_t *buf, int length){
|
||||
}
|
||||
dst+=16;
|
||||
}
|
||||
dst += 16*stride - width;
|
||||
dst += 16 * stride - x;
|
||||
}
|
||||
|
||||
return 0;
|
||||
@ -823,7 +831,7 @@ static int decode_frame(AVCodecContext *avctx,
|
||||
|
||||
if(frame_4cc == AV_RL32("ifr2")){
|
||||
p->pict_type= AV_PICTURE_TYPE_I;
|
||||
if(decode_i2_frame(f, buf-4, frame_size+4) < 0){
|
||||
if(decode_i2_frame(f, buf-4, frame_size + 4) < 0) {
|
||||
av_log(f->avctx, AV_LOG_ERROR, "decode i2 frame failed\n");
|
||||
return -1;
|
||||
}
|
||||
|
@ -91,9 +91,13 @@ typedef struct ADPCMDecodeContext {
|
||||
static av_cold int adpcm_decode_init(AVCodecContext * avctx)
|
||||
{
|
||||
ADPCMDecodeContext *c = avctx->priv_data;
|
||||
unsigned int min_channels = 1;
|
||||
unsigned int max_channels = 2;
|
||||
|
||||
switch(avctx->codec->id) {
|
||||
case CODEC_ID_ADPCM_EA:
|
||||
min_channels = 2;
|
||||
break;
|
||||
case CODEC_ID_ADPCM_EA_R1:
|
||||
case CODEC_ID_ADPCM_EA_R2:
|
||||
case CODEC_ID_ADPCM_EA_R3:
|
||||
@ -101,7 +105,7 @@ static av_cold int adpcm_decode_init(AVCodecContext * avctx)
|
||||
max_channels = 6;
|
||||
break;
|
||||
}
|
||||
if (avctx->channels <= 0 || avctx->channels > max_channels) {
|
||||
if (avctx->channels < min_channels || avctx->channels > max_channels) {
|
||||
av_log(avctx, AV_LOG_ERROR, "Invalid number of channels\n");
|
||||
return AVERROR(EINVAL);
|
||||
}
|
||||
|
@ -978,6 +978,10 @@ static int amrnb_decode_frame(AVCodecContext *avctx, void *data,
|
||||
|
||||
pitch_sharpening(p, subframe, p->cur_frame_mode, &fixed_sparse);
|
||||
|
||||
if (fixed_sparse.pitch_lag == 0) {
|
||||
av_log(avctx, AV_LOG_ERROR, "The file is corrupted, pitch_lag = 0 is not allowed\n");
|
||||
return AVERROR_INVALIDDATA;
|
||||
}
|
||||
ff_set_fixed_vector(p->fixed_vector, &fixed_sparse, 1.0,
|
||||
AMR_SUBFRAME_SIZE);
|
||||
|
||||
|
@ -985,18 +985,21 @@ static int mjpeg_decode_scan(MJpegDecodeContext *s, int nb_components, int Ah, i
|
||||
}
|
||||
}
|
||||
|
||||
if (s->restart_interval) --s->restart_count;
|
||||
i= 8+((-get_bits_count(&s->gb))&7);
|
||||
if (s->restart_interval && show_bits(&s->gb, i) == (1<<i)-1){ /* skip RSTn */
|
||||
int pos= get_bits_count(&s->gb);
|
||||
align_get_bits(&s->gb);
|
||||
while(get_bits_count(&s->gb) < s->gb.size_in_bits && show_bits(&s->gb, 8) == 0xFF)
|
||||
skip_bits(&s->gb, 8);
|
||||
if(get_bits_count(&s->gb) < s->gb.size_in_bits && (get_bits(&s->gb, 8)&0xF8) == 0xD0){
|
||||
for (i=0; i<nb_components; i++) /* reset dc */
|
||||
s->last_dc[i] = 1024;
|
||||
}else{
|
||||
skip_bits_long(&s->gb, pos - get_bits_count(&s->gb));
|
||||
if (s->restart_interval) {
|
||||
s->restart_count--;
|
||||
i = 8 + ((-get_bits_count(&s->gb)) & 7);
|
||||
/* skip RSTn */
|
||||
if (show_bits(&s->gb, i) == (1 << i) - 1) {
|
||||
int pos = get_bits_count(&s->gb);
|
||||
align_get_bits(&s->gb);
|
||||
while (get_bits_left(&s->gb) >= 8 && show_bits(&s->gb, 8) == 0xFF)
|
||||
skip_bits(&s->gb, 8);
|
||||
if (get_bits_left(&s->gb) >= 8 && (get_bits(&s->gb, 8) & 0xF8) == 0xD0) {
|
||||
for (i = 0; i < nb_components; i++) /* reset dc */
|
||||
s->last_dc[i] = 1024;
|
||||
} else {
|
||||
skip_bits_long(&s->gb, pos - get_bits_count(&s->gb));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -272,6 +272,8 @@ static int tm2_read_stream(TM2Context *ctx, const uint8_t *buf, int stream_id, i
|
||||
len = AV_RB32(buf); buf += 4; cur += 4;
|
||||
}
|
||||
if(len > 0) {
|
||||
if (skip <= cur)
|
||||
return -1;
|
||||
init_get_bits(&ctx->gb, buf, (skip - cur) * 8);
|
||||
if(tm2_read_deltas(ctx, stream_id) == -1)
|
||||
return -1;
|
||||
@ -286,7 +288,7 @@ static int tm2_read_stream(TM2Context *ctx, const uint8_t *buf, int stream_id, i
|
||||
buf += 4; cur += 4;
|
||||
buf += 4; cur += 4; /* unused by decoder */
|
||||
|
||||
if(skip < cur)
|
||||
if (skip <= cur)
|
||||
return -1;
|
||||
init_get_bits(&ctx->gb, buf, (skip - cur) * 8);
|
||||
if(tm2_build_huff_table(ctx, &codes) == -1)
|
||||
@ -305,6 +307,8 @@ static int tm2_read_stream(TM2Context *ctx, const uint8_t *buf, int stream_id, i
|
||||
ctx->tok_lens[stream_id] = toks;
|
||||
len = AV_RB32(buf); buf += 4; cur += 4;
|
||||
if(len > 0) {
|
||||
if (skip <= cur)
|
||||
return -1;
|
||||
init_get_bits(&ctx->gb, buf, (skip - cur) * 8);
|
||||
for(i = 0; i < toks; i++) {
|
||||
if (get_bits_left(&ctx->gb) <= 0) {
|
||||
|
@ -1378,6 +1378,8 @@ static inline int vp3_dequant(Vp3DecodeContext *s, Vp3Fragment *frag,
|
||||
return i;
|
||||
}
|
||||
} while (i < 64);
|
||||
// return value is expected to be a valid level
|
||||
i--;
|
||||
end:
|
||||
// the actual DC+prediction is in the fragment structure
|
||||
block[0] = frag->dc * s->qmat[0][inter][plane][0];
|
||||
|
@ -127,7 +127,7 @@ static int url_alloc_for_protocol (URLContext **puc, struct URLProtocol *up,
|
||||
int err;
|
||||
|
||||
#if CONFIG_NETWORK
|
||||
if (!ff_network_init())
|
||||
if (up->flags & URL_PROTOCOL_FLAG_NETWORK && !ff_network_init())
|
||||
return AVERROR(EIO);
|
||||
#endif
|
||||
uc = av_mallocz(sizeof(URLContext) + strlen(filename) + 1);
|
||||
@ -181,7 +181,8 @@ static int url_alloc_for_protocol (URLContext **puc, struct URLProtocol *up,
|
||||
fail:
|
||||
*puc = NULL;
|
||||
#if CONFIG_NETWORK
|
||||
ff_network_close();
|
||||
if (up->flags & URL_PROTOCOL_FLAG_NETWORK)
|
||||
ff_network_close();
|
||||
#endif
|
||||
return err;
|
||||
}
|
||||
@ -409,7 +410,8 @@ int ffurl_close(URLContext *h)
|
||||
if (h->is_connected && h->prot->url_close)
|
||||
ret = h->prot->url_close(h);
|
||||
#if CONFIG_NETWORK
|
||||
ff_network_close();
|
||||
if (h->prot->flags & URL_PROTOCOL_FLAG_NETWORK)
|
||||
ff_network_close();
|
||||
#endif
|
||||
if (h->prot->priv_data_size) {
|
||||
if (h->prot->priv_data_class)
|
||||
|
@ -152,6 +152,7 @@ typedef struct URLContext {
|
||||
} URLContext;
|
||||
|
||||
#define URL_PROTOCOL_FLAG_NESTED_SCHEME 1 /*< The protocol name can be the first part of a nested protocol scheme */
|
||||
#define URL_PROTOCOL_FLAG_NETWORK 2 /*< The protocol uses network */
|
||||
|
||||
/**
|
||||
* @deprecated This struct is to be made private. Use the higher-level
|
||||
|
@ -121,4 +121,5 @@ URLProtocol ff_gopher_protocol = {
|
||||
.url_write = gopher_write,
|
||||
.url_close = gopher_close,
|
||||
.priv_data_size = sizeof(GopherContext),
|
||||
.flags = URL_PROTOCOL_FLAG_NETWORK,
|
||||
};
|
||||
|
@ -569,6 +569,7 @@ URLProtocol ff_http_protocol = {
|
||||
.url_get_file_handle = http_get_file_handle,
|
||||
.priv_data_size = sizeof(HTTPContext),
|
||||
.priv_data_class = &http_context_class,
|
||||
.flags = URL_PROTOCOL_FLAG_NETWORK,
|
||||
};
|
||||
#endif
|
||||
#if CONFIG_HTTPS_PROTOCOL
|
||||
@ -582,6 +583,7 @@ URLProtocol ff_https_protocol = {
|
||||
.url_get_file_handle = http_get_file_handle,
|
||||
.priv_data_size = sizeof(HTTPContext),
|
||||
.priv_data_class = &https_context_class,
|
||||
.flags = URL_PROTOCOL_FLAG_NETWORK,
|
||||
};
|
||||
#endif
|
||||
|
||||
@ -697,5 +699,6 @@ URLProtocol ff_httpproxy_protocol = {
|
||||
.url_close = http_proxy_close,
|
||||
.url_get_file_handle = http_get_file_handle,
|
||||
.priv_data_size = sizeof(HTTPContext),
|
||||
.flags = URL_PROTOCOL_FLAG_NETWORK,
|
||||
};
|
||||
#endif
|
||||
|
@ -116,6 +116,11 @@ static int load_ipmovie_packet(IPMVEContext *s, AVIOContext *pb,
|
||||
int chunk_type;
|
||||
|
||||
if (s->audio_chunk_offset && s->audio_channels && s->audio_bits) {
|
||||
if (s->audio_type == CODEC_ID_NONE) {
|
||||
av_log(NULL, AV_LOG_ERROR, "Can not read audio packet before"
|
||||
"audio codec is known\n");
|
||||
return CHUNK_BAD;
|
||||
}
|
||||
|
||||
/* adjust for PCM audio by skipping chunk header */
|
||||
if (s->audio_type != CODEC_ID_INTERPLAY_DPCM) {
|
||||
|
@ -162,6 +162,7 @@ URLProtocol ff_rtmp_protocol = {
|
||||
.url_read_seek = rtmp_read_seek,
|
||||
.url_get_file_handle = rtmp_get_file_handle,
|
||||
.priv_data_size = sizeof(RTMP),
|
||||
.flags = URL_PROTOCOL_FLAG_NETWORK,
|
||||
};
|
||||
|
||||
URLProtocol ff_rtmpt_protocol = {
|
||||
@ -174,6 +175,7 @@ URLProtocol ff_rtmpt_protocol = {
|
||||
.url_read_seek = rtmp_read_seek,
|
||||
.url_get_file_handle = rtmp_get_file_handle,
|
||||
.priv_data_size = sizeof(RTMP),
|
||||
.flags = URL_PROTOCOL_FLAG_NETWORK,
|
||||
};
|
||||
|
||||
URLProtocol ff_rtmpe_protocol = {
|
||||
@ -186,6 +188,7 @@ URLProtocol ff_rtmpe_protocol = {
|
||||
.url_read_seek = rtmp_read_seek,
|
||||
.url_get_file_handle = rtmp_get_file_handle,
|
||||
.priv_data_size = sizeof(RTMP),
|
||||
.flags = URL_PROTOCOL_FLAG_NETWORK,
|
||||
};
|
||||
|
||||
URLProtocol ff_rtmpte_protocol = {
|
||||
@ -198,6 +201,7 @@ URLProtocol ff_rtmpte_protocol = {
|
||||
.url_read_seek = rtmp_read_seek,
|
||||
.url_get_file_handle = rtmp_get_file_handle,
|
||||
.priv_data_size = sizeof(RTMP),
|
||||
.flags = URL_PROTOCOL_FLAG_NETWORK,
|
||||
};
|
||||
|
||||
URLProtocol ff_rtmps_protocol = {
|
||||
@ -210,4 +214,5 @@ URLProtocol ff_rtmps_protocol = {
|
||||
.url_read_seek = rtmp_read_seek,
|
||||
.url_get_file_handle = rtmp_get_file_handle,
|
||||
.priv_data_size = sizeof(RTMP),
|
||||
.flags = URL_PROTOCOL_FLAG_NETWORK,
|
||||
};
|
||||
|
@ -406,4 +406,5 @@ URLProtocol ff_mmsh_protocol = {
|
||||
.url_close = mmsh_close,
|
||||
.url_read_seek = mmsh_read_seek,
|
||||
.priv_data_size = sizeof(MMSHContext),
|
||||
.flags = URL_PROTOCOL_FLAG_NETWORK,
|
||||
};
|
||||
|
@ -625,4 +625,5 @@ URLProtocol ff_mmst_protocol = {
|
||||
.url_read = mms_read,
|
||||
.url_close = mms_close,
|
||||
.priv_data_size = sizeof(MMSTContext),
|
||||
.flags = URL_PROTOCOL_FLAG_NETWORK,
|
||||
};
|
||||
|
@ -576,7 +576,8 @@ static int mov_read_chan(MOVContext *c, AVIOContext *pb, MOVAtom atom)
|
||||
{
|
||||
AVStream *st;
|
||||
uint8_t version;
|
||||
uint32_t flags, layout_tag, bitmap, num_descr;
|
||||
uint32_t flags, layout_tag, bitmap, num_descr, label_mask;
|
||||
int i;
|
||||
|
||||
if (c->fc->nb_streams < 1)
|
||||
return 0;
|
||||
@ -598,9 +599,7 @@ static int mov_read_chan(MOVContext *c, AVIOContext *pb, MOVAtom atom)
|
||||
av_dlog(c->fc, "chan: size=%" PRId64 " version=%u flags=%u layout=%u bitmap=%u num_descr=%u\n",
|
||||
atom.size, version, flags, layout_tag, bitmap, num_descr);
|
||||
|
||||
#if 0
|
||||
/* TODO: use the channel descriptions if the layout tag is 0 */
|
||||
int i;
|
||||
label_mask = 0;
|
||||
for (i = 0; i < num_descr; i++) {
|
||||
uint32_t label, cflags;
|
||||
float coords[3];
|
||||
@ -609,10 +608,19 @@ static int mov_read_chan(MOVContext *c, AVIOContext *pb, MOVAtom atom)
|
||||
AV_WN32(&coords[0], avio_rl32(pb)); // mCoordinates[0]
|
||||
AV_WN32(&coords[1], avio_rl32(pb)); // mCoordinates[1]
|
||||
AV_WN32(&coords[2], avio_rl32(pb)); // mCoordinates[2]
|
||||
if (layout_tag == 0) {
|
||||
uint32_t mask_incr = ff_mov_get_channel_label(label);
|
||||
if (mask_incr == 0) {
|
||||
label_mask = 0;
|
||||
break;
|
||||
}
|
||||
label_mask |= mask_incr;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
st->codec->channel_layout = ff_mov_get_channel_layout(layout_tag, bitmap);
|
||||
if (layout_tag == 0)
|
||||
st->codec->channel_layout = label_mask;
|
||||
else
|
||||
st->codec->channel_layout = ff_mov_get_channel_layout(layout_tag, bitmap);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
@ -428,8 +428,7 @@ uint64_t ff_mov_get_channel_layout(uint32_t tag, uint32_t bitmap)
|
||||
int i, channels;
|
||||
const struct MovChannelLayoutMap *layout_map;
|
||||
|
||||
/* handle the use of the channel descriptions */
|
||||
/* TODO: map MOV channel labels to FFmpeg channels */
|
||||
/* use ff_mov_get_channel_label() to build a layout instead */
|
||||
if (tag == MOV_CH_LAYOUT_USE_DESCRIPTIONS)
|
||||
return 0;
|
||||
|
||||
@ -451,6 +450,19 @@ uint64_t ff_mov_get_channel_layout(uint32_t tag, uint32_t bitmap)
|
||||
return layout_map[i].layout;
|
||||
}
|
||||
|
||||
uint32_t ff_mov_get_channel_label(uint32_t label)
|
||||
{
|
||||
if (label == 0)
|
||||
return 0;
|
||||
if (label <= 18)
|
||||
return 1U << (label - 1);
|
||||
if (label == 38)
|
||||
return AV_CH_STEREO_LEFT;
|
||||
if (label == 39)
|
||||
return AV_CH_STEREO_RIGHT;
|
||||
return 0;
|
||||
}
|
||||
|
||||
uint32_t ff_mov_get_channel_layout_tag(enum CodecID codec_id,
|
||||
uint64_t channel_layout,
|
||||
uint32_t *bitmap)
|
||||
|
@ -39,6 +39,14 @@
|
||||
*/
|
||||
uint64_t ff_mov_get_channel_layout(uint32_t tag, uint32_t bitmap);
|
||||
|
||||
/**
|
||||
* Get the channel layout for the specified channel layout tag.
|
||||
*
|
||||
* @param[in] tag channel label
|
||||
* @return channel layout mask fragment
|
||||
*/
|
||||
uint32_t ff_mov_get_channel_label(uint32_t label);
|
||||
|
||||
/**
|
||||
* Get the channel layout tag for the specified codec id and channel layout.
|
||||
* If the layout tag was not found, use a channel bitmap if possible.
|
||||
|
@ -1000,4 +1000,5 @@ URLProtocol ff_rtmp_protocol = {
|
||||
.url_write = rtmp_write,
|
||||
.url_close = rtmp_close,
|
||||
.priv_data_size = sizeof(RTMPContext),
|
||||
.flags = URL_PROTOCOL_FLAG_NETWORK,
|
||||
};
|
||||
|
@ -331,4 +331,5 @@ URLProtocol ff_rtp_protocol = {
|
||||
.url_close = rtp_close,
|
||||
.url_get_file_handle = rtp_get_file_handle,
|
||||
.priv_data_size = sizeof(RTPContext),
|
||||
.flags = URL_PROTOCOL_FLAG_NETWORK,
|
||||
};
|
||||
|
@ -204,4 +204,5 @@ URLProtocol ff_tcp_protocol = {
|
||||
.url_close = tcp_close,
|
||||
.url_get_file_handle = tcp_get_file_handle,
|
||||
.priv_data_size = sizeof(TCPContext),
|
||||
.flags = URL_PROTOCOL_FLAG_NETWORK,
|
||||
};
|
||||
|
@ -248,4 +248,5 @@ URLProtocol ff_tls_protocol = {
|
||||
.url_write = tls_write,
|
||||
.url_close = tls_close,
|
||||
.priv_data_size = sizeof(TLSContext),
|
||||
.flags = URL_PROTOCOL_FLAG_NETWORK,
|
||||
};
|
||||
|
@ -637,4 +637,5 @@ URLProtocol ff_udp_protocol = {
|
||||
.url_close = udp_close,
|
||||
.url_get_file_handle = udp_get_file_handle,
|
||||
.priv_data_size = sizeof(UDPContext),
|
||||
.flags = URL_PROTOCOL_FLAG_NETWORK,
|
||||
};
|
||||
|
@ -33,6 +33,7 @@
|
||||
|
||||
#if !FF_API_OLD_AVIO
|
||||
#define URL_PROTOCOL_FLAG_NESTED_SCHEME 1 /*< The protocol name can be the first part of a nested protocol scheme */
|
||||
#define URL_PROTOCOL_FLAG_NETWORK 2 /*< The protocol uses network */
|
||||
|
||||
extern int (*url_interrupt_cb)(void);
|
||||
|
||||
|
@ -33,6 +33,7 @@
|
||||
#include "libavutil/pixdesc.h"
|
||||
#include "metadata.h"
|
||||
#include "id3v2.h"
|
||||
#include "libavutil/avassert.h"
|
||||
#include "libavutil/avstring.h"
|
||||
#include "libavutil/mathematics.h"
|
||||
#include "libavutil/parseutils.h"
|
||||
@ -1318,57 +1319,63 @@ static int read_frame_internal(AVFormatContext *s, AVPacket *pkt)
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int read_from_packet_buffer(AVFormatContext *s, AVPacket *pkt)
|
||||
{
|
||||
AVPacketList *pktl = s->packet_buffer;
|
||||
av_assert0(pktl);
|
||||
*pkt = pktl->pkt;
|
||||
s->packet_buffer = pktl->next;
|
||||
av_freep(&pktl);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int av_read_frame(AVFormatContext *s, AVPacket *pkt)
|
||||
{
|
||||
AVPacketList *pktl;
|
||||
int eof=0;
|
||||
const int genpts= s->flags & AVFMT_FLAG_GENPTS;
|
||||
const int genpts = s->flags & AVFMT_FLAG_GENPTS;
|
||||
int eof = 0;
|
||||
|
||||
if (!genpts)
|
||||
return s->packet_buffer ? read_from_packet_buffer(s, pkt) :
|
||||
read_frame_internal(s, pkt);
|
||||
|
||||
for (;;) {
|
||||
int ret;
|
||||
AVPacketList *pktl = s->packet_buffer;
|
||||
|
||||
for(;;){
|
||||
pktl = s->packet_buffer;
|
||||
if (pktl) {
|
||||
AVPacket *next_pkt= &pktl->pkt;
|
||||
AVPacket *next_pkt = &pktl->pkt;
|
||||
|
||||
if(genpts && next_pkt->dts != AV_NOPTS_VALUE){
|
||||
if (next_pkt->dts != AV_NOPTS_VALUE) {
|
||||
int wrap_bits = s->streams[next_pkt->stream_index]->pts_wrap_bits;
|
||||
while(pktl && next_pkt->pts == AV_NOPTS_VALUE){
|
||||
if( pktl->pkt.stream_index == next_pkt->stream_index
|
||||
&& (0 > av_compare_mod(next_pkt->dts, pktl->pkt.dts, 2LL << (wrap_bits - 1)))
|
||||
&& av_compare_mod(pktl->pkt.pts, pktl->pkt.dts, 2LL << (wrap_bits - 1))) { //not b frame
|
||||
next_pkt->pts= pktl->pkt.dts;
|
||||
while (pktl && next_pkt->pts == AV_NOPTS_VALUE) {
|
||||
if (pktl->pkt.stream_index == next_pkt->stream_index &&
|
||||
(av_compare_mod(next_pkt->dts, pktl->pkt.dts, 2LL << (wrap_bits - 1)) < 0) &&
|
||||
av_compare_mod(pktl->pkt.pts, pktl->pkt.dts, 2LL << (wrap_bits - 1))) { //not b frame
|
||||
next_pkt->pts = pktl->pkt.dts;
|
||||
}
|
||||
pktl= pktl->next;
|
||||
pktl = pktl->next;
|
||||
}
|
||||
pktl = s->packet_buffer;
|
||||
}
|
||||
|
||||
if( next_pkt->pts != AV_NOPTS_VALUE
|
||||
|| next_pkt->dts == AV_NOPTS_VALUE
|
||||
|| !genpts || eof){
|
||||
/* read packet from packet buffer, if there is data */
|
||||
*pkt = *next_pkt;
|
||||
s->packet_buffer = pktl->next;
|
||||
av_free(pktl);
|
||||
return 0;
|
||||
}
|
||||
/* read packet from packet buffer, if there is data */
|
||||
if (!(next_pkt->pts == AV_NOPTS_VALUE &&
|
||||
next_pkt->dts != AV_NOPTS_VALUE && !eof))
|
||||
return read_from_packet_buffer(s, pkt);
|
||||
}
|
||||
if(genpts){
|
||||
int ret= read_frame_internal(s, pkt);
|
||||
if(ret<0){
|
||||
if(pktl && ret != AVERROR(EAGAIN)){
|
||||
eof=1;
|
||||
continue;
|
||||
}else
|
||||
return ret;
|
||||
}
|
||||
|
||||
if(av_dup_packet(add_to_pktbuf(&s->packet_buffer, pkt,
|
||||
&s->packet_buffer_end)) < 0)
|
||||
return AVERROR(ENOMEM);
|
||||
}else{
|
||||
assert(!s->packet_buffer);
|
||||
return read_frame_internal(s, pkt);
|
||||
ret = read_frame_internal(s, pkt);
|
||||
if (ret < 0) {
|
||||
if (pktl && ret != AVERROR(EAGAIN)) {
|
||||
eof = 1;
|
||||
continue;
|
||||
} else
|
||||
return ret;
|
||||
}
|
||||
|
||||
if (av_dup_packet(add_to_pktbuf(&s->packet_buffer, pkt,
|
||||
&s->packet_buffer_end)) < 0)
|
||||
return AVERROR(ENOMEM);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -28,6 +28,7 @@
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <stdint.h>
|
||||
|
||||
#include "config.h"
|
||||
|
||||
#if ARCH_ARM
|
||||
@ -45,29 +46,32 @@
|
||||
#endif
|
||||
|
||||
#ifdef AV_READ_TIME
|
||||
#define START_TIMER \
|
||||
uint64_t tend;\
|
||||
uint64_t tstart= AV_READ_TIME();\
|
||||
#define START_TIMER \
|
||||
uint64_t tend; \
|
||||
uint64_t tstart = AV_READ_TIME(); \
|
||||
|
||||
#define STOP_TIMER(id) \
|
||||
tend= AV_READ_TIME();\
|
||||
{\
|
||||
static uint64_t tsum=0;\
|
||||
static int tcount=0;\
|
||||
static int tskip_count=0;\
|
||||
if(tcount<2 || tend - tstart < 8*tsum/tcount || tend - tstart < 2000){\
|
||||
tsum+= tend - tstart;\
|
||||
tcount++;\
|
||||
}else\
|
||||
tskip_count++;\
|
||||
if(((tcount+tskip_count)&(tcount+tskip_count-1))==0){\
|
||||
av_log(NULL, AV_LOG_ERROR, "%"PRIu64" decicycles in %s, %d runs, %d skips\n",\
|
||||
tsum*10/tcount, id, tcount, tskip_count);\
|
||||
}\
|
||||
}
|
||||
#define STOP_TIMER(id) \
|
||||
tend = AV_READ_TIME(); \
|
||||
{ \
|
||||
static uint64_t tsum = 0; \
|
||||
static int tcount = 0; \
|
||||
static int tskip_count = 0; \
|
||||
if (tcount < 2 || \
|
||||
tend - tstart < 8 * tsum / tcount || \
|
||||
tend - tstart < 2000) { \
|
||||
tsum+= tend - tstart; \
|
||||
tcount++; \
|
||||
} else \
|
||||
tskip_count++; \
|
||||
if (((tcount + tskip_count) & (tcount + tskip_count - 1)) == 0) { \
|
||||
av_log(NULL, AV_LOG_ERROR, \
|
||||
"%"PRIu64" decicycles in %s, %d runs, %d skips\n", \
|
||||
tsum * 10 / tcount, id, tcount, tskip_count); \
|
||||
} \
|
||||
}
|
||||
#else
|
||||
#define START_TIMER
|
||||
#define STOP_TIMER(id) {}
|
||||
#define STOP_TIMER(id) { }
|
||||
#endif
|
||||
|
||||
#endif /* AVUTIL_TIMER_H */
|
||||
|
@ -981,9 +981,17 @@ YUV2PACKED16WRAPPER(yuv2, rgb48, rgb48le, PIX_FMT_RGB48LE)
|
||||
YUV2PACKED16WRAPPER(yuv2, rgb48, bgr48be, PIX_FMT_BGR48BE)
|
||||
YUV2PACKED16WRAPPER(yuv2, rgb48, bgr48le, PIX_FMT_BGR48LE)
|
||||
|
||||
/*
|
||||
* Write out 2 RGB pixels in the target pixel format. This function takes a
|
||||
* R/G/B LUT as generated by ff_yuv2rgb_c_init_tables(), which takes care of
|
||||
* things like endianness conversion and shifting. The caller takes care of
|
||||
* setting the correct offset in these tables from the chroma (U/V) values.
|
||||
* This function then uses the luminance (Y1/Y2) values to write out the
|
||||
* correct RGB values into the destination buffer.
|
||||
*/
|
||||
static av_always_inline void
|
||||
yuv2rgb_write(uint8_t *_dest, int i, unsigned Y1, unsigned Y2,
|
||||
unsigned U, unsigned V, unsigned A1, unsigned A2,
|
||||
unsigned A1, unsigned A2,
|
||||
const void *_r, const void *_g, const void *_b, int y,
|
||||
enum PixelFormat target, int hasAlpha)
|
||||
{
|
||||
@ -1151,7 +1159,7 @@ yuv2rgb_X_c_template(SwsContext *c, const int16_t *lumFilter,
|
||||
g = (c->table_gU[U] + c->table_gV[V]);
|
||||
b = c->table_bU[U];
|
||||
|
||||
yuv2rgb_write(dest, i, Y1, Y2, U, V, hasAlpha ? A1 : 0, hasAlpha ? A2 : 0,
|
||||
yuv2rgb_write(dest, i, Y1, Y2, hasAlpha ? A1 : 0, hasAlpha ? A2 : 0,
|
||||
r, g, b, y, target, hasAlpha);
|
||||
}
|
||||
}
|
||||
@ -1187,7 +1195,7 @@ yuv2rgb_2_c_template(SwsContext *c, const int16_t *buf[2],
|
||||
A2 = (abuf0[i * 2 + 1] * yalpha1 + abuf1[i * 2 + 1] * yalpha) >> 19;
|
||||
}
|
||||
|
||||
yuv2rgb_write(dest, i, Y1, Y2, U, V, hasAlpha ? A1 : 0, hasAlpha ? A2 : 0,
|
||||
yuv2rgb_write(dest, i, Y1, Y2, hasAlpha ? A1 : 0, hasAlpha ? A2 : 0,
|
||||
r, g, b, y, target, hasAlpha);
|
||||
}
|
||||
}
|
||||
@ -1219,7 +1227,7 @@ yuv2rgb_1_c_template(SwsContext *c, const int16_t *buf0,
|
||||
A2 = abuf0[i * 2 + 1] >> 7;
|
||||
}
|
||||
|
||||
yuv2rgb_write(dest, i, Y1, Y2, U, V, hasAlpha ? A1 : 0, hasAlpha ? A2 : 0,
|
||||
yuv2rgb_write(dest, i, Y1, Y2, hasAlpha ? A1 : 0, hasAlpha ? A2 : 0,
|
||||
r, g, b, y, target, hasAlpha);
|
||||
}
|
||||
} else {
|
||||
@ -1238,7 +1246,7 @@ yuv2rgb_1_c_template(SwsContext *c, const int16_t *buf0,
|
||||
A2 = abuf0[i * 2 + 1] >> 7;
|
||||
}
|
||||
|
||||
yuv2rgb_write(dest, i, Y1, Y2, U, V, hasAlpha ? A1 : 0, hasAlpha ? A2 : 0,
|
||||
yuv2rgb_write(dest, i, Y1, Y2, hasAlpha ? A1 : 0, hasAlpha ? A2 : 0,
|
||||
r, g, b, y, target, hasAlpha);
|
||||
}
|
||||
}
|
||||
|
@ -7,6 +7,9 @@ fate-ea-vp61: CMD = framecrc -i $(SAMPLES)/ea-vp6/MovieSkirmishGondor.vp6 -t 4
|
||||
FATE_TESTS += fate-vp3
|
||||
fate-vp3: CMD = framecrc -i $(SAMPLES)/vp3/vp31.avi
|
||||
|
||||
FATE_TESTS += fate-vp3-coeff-level64
|
||||
fate-vp3-coeff-level64: CMD = framecrc -i $(SAMPLES)/vp3/coeff_level64.mkv
|
||||
|
||||
FATE_TESTS += fate-vp5
|
||||
fate-vp5: CMD = framecrc -i $(SAMPLES)/vp5/potter512-400-partial.avi -an
|
||||
|
||||
|
8
tests/ref/fate/vp3-coeff-level64
Normal file
8
tests/ref/fate/vp3-coeff-level64
Normal file
@ -0,0 +1,8 @@
|
||||
0, 0, 4617600, 0x4ba6df50
|
||||
0, 6000, 4617600, 0x419fdeaf
|
||||
0, 12000, 4617600, 0xeb2edced
|
||||
0, 18000, 4617600, 0xa2bb3a1a
|
||||
0, 24000, 4617600, 0x411cfb36
|
||||
0, 30000, 4617600, 0xb2dc22ed
|
||||
0, 36000, 4617600, 0x236d23b5
|
||||
0, 42000, 4617600, 0x7fef275e
|
Loading…
x
Reference in New Issue
Block a user