2006-07-19 09:38:22 +00:00
|
|
|
/*
|
|
|
|
* GXF muxer.
|
2009-01-19 15:46:40 +00:00
|
|
|
* Copyright (c) 2006 SmartJog S.A., Baptiste Coudurier <baptiste dot coudurier at smartjog dot com>
|
2006-07-19 09:38:22 +00:00
|
|
|
*
|
2006-10-07 15:30:46 +00:00
|
|
|
* This file is part of FFmpeg.
|
|
|
|
*
|
2007-07-13 17:15:56 +00:00
|
|
|
* FFmpeg is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU Lesser General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
|
|
|
* version 2.1 of the License, or (at your option) any later version.
|
2006-07-19 09:38:22 +00:00
|
|
|
*
|
2006-10-07 15:30:46 +00:00
|
|
|
* FFmpeg is distributed in the hope that it will be useful,
|
2006-07-19 09:38:22 +00:00
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
2007-07-13 17:15:56 +00:00
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Lesser General Public License for more details.
|
2006-07-19 09:38:22 +00:00
|
|
|
*
|
2007-07-13 17:15:56 +00:00
|
|
|
* You should have received a copy of the GNU Lesser General Public
|
|
|
|
* License along with FFmpeg; if not, write to the Free Software
|
2006-10-07 15:30:46 +00:00
|
|
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
2006-07-19 09:38:22 +00:00
|
|
|
*/
|
|
|
|
|
2012-09-09 12:31:44 +00:00
|
|
|
#include "libavutil/avassert.h"
|
2011-11-27 14:04:16 +00:00
|
|
|
#include "libavutil/intfloat.h"
|
2011-12-06 20:56:01 +00:00
|
|
|
#include "libavutil/opt.h"
|
2011-06-04 11:58:23 +00:00
|
|
|
#include "libavutil/mathematics.h"
|
2012-01-16 15:10:22 +00:00
|
|
|
#include "libavutil/timecode.h"
|
2006-07-19 09:38:22 +00:00
|
|
|
#include "avformat.h"
|
2011-11-29 18:28:15 +00:00
|
|
|
#include "internal.h"
|
2006-07-19 09:38:22 +00:00
|
|
|
#include "gxf.h"
|
2009-02-08 04:33:53 +00:00
|
|
|
#include "audiointerleave.h"
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
#define GXF_AUDIO_PACKET_SIZE 65536
|
|
|
|
|
2011-12-06 20:56:01 +00:00
|
|
|
#define GXF_TIMECODE(c, d, h, m, s, f) \
|
|
|
|
((c) << 30 | (d) << 29 | (h) << 24 | (m) << 16 | (s) << 8 | (f))
|
|
|
|
|
|
|
|
typedef struct GXFTimecode{
|
|
|
|
int hh;
|
|
|
|
int mm;
|
|
|
|
int ss;
|
|
|
|
int ff;
|
|
|
|
int color;
|
|
|
|
int drop;
|
|
|
|
} GXFTimecode;
|
|
|
|
|
2006-07-19 09:38:22 +00:00
|
|
|
typedef struct GXFStreamContext {
|
2009-02-08 04:33:53 +00:00
|
|
|
AudioInterleaveContext aic;
|
2006-07-19 09:38:22 +00:00
|
|
|
uint32_t track_type;
|
|
|
|
uint32_t sample_size;
|
|
|
|
uint32_t sample_rate;
|
|
|
|
uint16_t media_type;
|
|
|
|
uint16_t media_info;
|
|
|
|
int frame_rate_index;
|
|
|
|
int lines_index;
|
|
|
|
int fields;
|
|
|
|
int iframes;
|
|
|
|
int pframes;
|
|
|
|
int bframes;
|
|
|
|
int p_per_gop;
|
2016-04-27 17:45:23 +00:00
|
|
|
int b_per_i_or_p; ///< number of B-frames per I-frame or P-frame
|
2006-08-21 15:14:36 +00:00
|
|
|
int first_gop_closed;
|
2009-03-09 01:10:28 +00:00
|
|
|
unsigned order; ///< interleaving order
|
2006-07-19 09:38:22 +00:00
|
|
|
} GXFStreamContext;
|
|
|
|
|
|
|
|
typedef struct GXFContext {
|
2011-12-06 20:56:01 +00:00
|
|
|
AVClass *av_class;
|
2009-02-13 21:32:18 +00:00
|
|
|
uint32_t nb_fields;
|
2006-07-19 09:38:22 +00:00
|
|
|
uint16_t audio_tracks;
|
|
|
|
uint16_t mpeg_tracks;
|
|
|
|
int64_t creation_time;
|
|
|
|
uint32_t umf_start_offset;
|
|
|
|
uint32_t umf_track_offset;
|
|
|
|
uint32_t umf_media_offset;
|
|
|
|
uint32_t umf_length;
|
|
|
|
uint16_t umf_track_size;
|
|
|
|
uint16_t umf_media_size;
|
2009-03-09 01:10:28 +00:00
|
|
|
AVRational time_base;
|
2006-07-19 09:38:22 +00:00
|
|
|
int flags;
|
2009-03-09 00:14:55 +00:00
|
|
|
GXFStreamContext timecode_track;
|
2009-03-09 01:35:00 +00:00
|
|
|
unsigned *flt_entries; ///< offsets of packets /1024, starts after 2nd video field
|
|
|
|
unsigned flt_entries_nb;
|
2009-03-09 02:47:32 +00:00
|
|
|
uint64_t *map_offsets; ///< offset of map packets
|
|
|
|
unsigned map_offsets_nb;
|
|
|
|
unsigned packet_count;
|
2011-12-06 20:56:01 +00:00
|
|
|
GXFTimecode tc;
|
2006-07-19 09:38:22 +00:00
|
|
|
} GXFContext;
|
|
|
|
|
2009-03-09 02:49:47 +00:00
|
|
|
static const struct {
|
|
|
|
int height, index;
|
|
|
|
} gxf_lines_tab[] = {
|
2006-07-19 09:38:22 +00:00
|
|
|
{ 480, 1 }, /* NTSC */
|
|
|
|
{ 512, 1 }, /* NTSC + VBI */
|
|
|
|
{ 576, 2 }, /* PAL */
|
|
|
|
{ 608, 2 }, /* PAL + VBI */
|
|
|
|
{ 1080, 4 },
|
|
|
|
{ 720, 6 },
|
|
|
|
};
|
|
|
|
|
2007-01-21 01:39:17 +00:00
|
|
|
static const AVCodecTag gxf_media_types[] = {
|
2012-08-05 09:11:04 +00:00
|
|
|
{ AV_CODEC_ID_MJPEG , 3 }, /* NTSC */
|
|
|
|
{ AV_CODEC_ID_MJPEG , 4 }, /* PAL */
|
|
|
|
{ AV_CODEC_ID_PCM_S24LE , 9 },
|
|
|
|
{ AV_CODEC_ID_PCM_S16LE , 10 },
|
|
|
|
{ AV_CODEC_ID_MPEG2VIDEO, 11 }, /* NTSC */
|
|
|
|
{ AV_CODEC_ID_MPEG2VIDEO, 12 }, /* PAL */
|
|
|
|
{ AV_CODEC_ID_DVVIDEO , 13 }, /* NTSC */
|
|
|
|
{ AV_CODEC_ID_DVVIDEO , 14 }, /* PAL */
|
|
|
|
{ AV_CODEC_ID_DVVIDEO , 15 }, /* 50M NTSC */
|
|
|
|
{ AV_CODEC_ID_DVVIDEO , 16 }, /* 50M PAL */
|
|
|
|
{ AV_CODEC_ID_AC3 , 17 },
|
|
|
|
//{ AV_CODEC_ID_NONE, , 18 }, /* Non compressed 24 bit audio */
|
|
|
|
{ AV_CODEC_ID_MPEG2VIDEO, 20 }, /* MPEG HD */
|
|
|
|
{ AV_CODEC_ID_MPEG1VIDEO, 22 }, /* NTSC */
|
|
|
|
{ AV_CODEC_ID_MPEG1VIDEO, 23 }, /* PAL */
|
|
|
|
{ AV_CODEC_ID_NONE, 0 },
|
2006-07-19 09:38:22 +00:00
|
|
|
};
|
|
|
|
|
2009-03-09 03:34:15 +00:00
|
|
|
#define SERVER_PATH "EXT:/PDR/default/"
|
|
|
|
#define ES_NAME_PATTERN "EXT:/PDR/default/ES."
|
2006-07-19 09:38:22 +00:00
|
|
|
|
2009-03-08 21:55:08 +00:00
|
|
|
static int gxf_find_lines_index(AVStream *st)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2009-03-08 21:55:08 +00:00
|
|
|
GXFStreamContext *sc = st->priv_data;
|
2006-07-19 09:38:22 +00:00
|
|
|
int i;
|
|
|
|
|
|
|
|
for (i = 0; i < 6; ++i) {
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
if (st->codecpar->height == gxf_lines_tab[i].height) {
|
2009-03-08 21:55:08 +00:00
|
|
|
sc->lines_index = gxf_lines_tab[i].index;
|
2006-07-19 09:38:22 +00:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
2011-02-20 10:04:12 +00:00
|
|
|
static void gxf_write_padding(AVIOContext *pb, int64_t to_pad)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2006-08-01 10:00:12 +00:00
|
|
|
for (; to_pad > 0; to_pad--) {
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, 0);
|
2006-07-19 09:38:22 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-02-20 10:04:12 +00:00
|
|
|
static int64_t updatePacketSize(AVIOContext *pb, int64_t pos)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2008-10-03 10:16:29 +00:00
|
|
|
int64_t curpos;
|
2006-07-19 09:38:22 +00:00
|
|
|
int size;
|
|
|
|
|
2011-03-03 19:11:45 +00:00
|
|
|
size = avio_tell(pb) - pos;
|
2006-07-19 09:38:22 +00:00
|
|
|
if (size % 4) {
|
|
|
|
gxf_write_padding(pb, 4 - size % 4);
|
2011-03-03 19:11:45 +00:00
|
|
|
size = avio_tell(pb) - pos;
|
2006-07-19 09:38:22 +00:00
|
|
|
}
|
2011-03-03 19:11:45 +00:00
|
|
|
curpos = avio_tell(pb);
|
2011-02-28 13:57:54 +00:00
|
|
|
avio_seek(pb, pos + 6, SEEK_SET);
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wb32(pb, size);
|
2011-02-28 13:57:54 +00:00
|
|
|
avio_seek(pb, curpos, SEEK_SET);
|
2006-07-19 09:38:22 +00:00
|
|
|
return curpos - pos;
|
|
|
|
}
|
|
|
|
|
2011-02-20 10:04:12 +00:00
|
|
|
static int64_t updateSize(AVIOContext *pb, int64_t pos)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2008-10-03 10:16:29 +00:00
|
|
|
int64_t curpos;
|
2006-07-19 09:38:22 +00:00
|
|
|
|
2011-03-03 19:11:45 +00:00
|
|
|
curpos = avio_tell(pb);
|
2011-02-28 13:57:54 +00:00
|
|
|
avio_seek(pb, pos, SEEK_SET);
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wb16(pb, curpos - pos - 2);
|
2011-02-28 13:57:54 +00:00
|
|
|
avio_seek(pb, curpos, SEEK_SET);
|
2006-07-19 09:38:22 +00:00
|
|
|
return curpos - pos;
|
|
|
|
}
|
|
|
|
|
2011-02-20 10:04:12 +00:00
|
|
|
static void gxf_write_packet_header(AVIOContext *pb, GXFPktType type)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wb32(pb, 0); /* packet leader for synchro */
|
|
|
|
avio_w8(pb, 1);
|
|
|
|
avio_w8(pb, type); /* map packet */
|
|
|
|
avio_wb32(pb, 0); /* size */
|
|
|
|
avio_wb32(pb, 0); /* reserved */
|
|
|
|
avio_w8(pb, 0xE1); /* trailer 1 */
|
|
|
|
avio_w8(pb, 0xE2); /* trailer 2 */
|
2006-07-19 09:38:22 +00:00
|
|
|
}
|
|
|
|
|
2011-02-20 10:04:12 +00:00
|
|
|
static int gxf_write_mpeg_auxiliary(AVIOContext *pb, AVStream *st)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2009-03-08 21:55:08 +00:00
|
|
|
GXFStreamContext *sc = st->priv_data;
|
2006-07-19 09:38:22 +00:00
|
|
|
char buffer[1024];
|
2009-01-20 09:25:50 +00:00
|
|
|
int size, starting_line;
|
2006-07-19 09:38:22 +00:00
|
|
|
|
2009-03-08 21:55:08 +00:00
|
|
|
if (sc->iframes) {
|
|
|
|
sc->p_per_gop = sc->pframes / sc->iframes;
|
|
|
|
if (sc->pframes % sc->iframes)
|
|
|
|
sc->p_per_gop++;
|
|
|
|
if (sc->pframes) {
|
|
|
|
sc->b_per_i_or_p = sc->bframes / sc->pframes;
|
|
|
|
if (sc->bframes % sc->pframes)
|
|
|
|
sc->b_per_i_or_p++;
|
2009-02-12 00:08:44 +00:00
|
|
|
}
|
2009-03-08 21:55:08 +00:00
|
|
|
if (sc->p_per_gop > 9)
|
|
|
|
sc->p_per_gop = 9; /* ensure value won't take more than one char */
|
|
|
|
if (sc->b_per_i_or_p > 9)
|
|
|
|
sc->b_per_i_or_p = 9; /* ensure value won't take more than one char */
|
2006-07-19 09:38:22 +00:00
|
|
|
}
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
if (st->codecpar->height == 512 || st->codecpar->height == 608)
|
2009-01-20 09:25:50 +00:00
|
|
|
starting_line = 7; // VBI
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
else if (st->codecpar->height == 480)
|
2009-01-20 09:25:50 +00:00
|
|
|
starting_line = 20;
|
|
|
|
else
|
|
|
|
starting_line = 23; // default PAL
|
|
|
|
|
2012-09-09 12:31:06 +00:00
|
|
|
size = snprintf(buffer, sizeof(buffer), "Ver 1\nBr %.6f\nIpg 1\nPpi %d\nBpiop %d\n"
|
2009-01-20 09:25:50 +00:00
|
|
|
"Pix 0\nCf %d\nCg %d\nSl %d\nnl16 %d\nVi 1\nf1 1\n",
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
(float)st->codecpar->bit_rate, sc->p_per_gop, sc->b_per_i_or_p,
|
2016-04-10 19:58:15 +00:00
|
|
|
st->codecpar->format == AV_PIX_FMT_YUV422P ? 2 : 1, sc->first_gop_closed == 1,
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
starting_line, (st->codecpar->height + 15) / 16);
|
2012-09-09 12:31:44 +00:00
|
|
|
av_assert0(size < sizeof(buffer));
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, TRACK_MPG_AUX);
|
|
|
|
avio_w8(pb, size + 1);
|
|
|
|
avio_write(pb, (uint8_t *)buffer, size + 1);
|
2006-07-19 09:38:22 +00:00
|
|
|
return size + 3;
|
|
|
|
}
|
|
|
|
|
2013-06-13 04:17:21 +00:00
|
|
|
static int gxf_write_dv_auxiliary(AVIOContext *pb, AVStream *st)
|
|
|
|
{
|
|
|
|
int64_t track_aux_data = 0;
|
|
|
|
|
|
|
|
avio_w8(pb, TRACK_AUX);
|
|
|
|
avio_w8(pb, 8);
|
2016-04-10 19:58:15 +00:00
|
|
|
if (st->codecpar->format == AV_PIX_FMT_YUV420P)
|
2013-06-13 04:17:21 +00:00
|
|
|
track_aux_data |= 0x01; /* marks stream as DVCAM instead of DVPRO */
|
|
|
|
track_aux_data |= 0x40000000; /* aux data is valid */
|
|
|
|
avio_wl64(pb, track_aux_data);
|
|
|
|
return 8;
|
|
|
|
}
|
|
|
|
|
2011-12-06 20:56:01 +00:00
|
|
|
static int gxf_write_timecode_auxiliary(AVIOContext *pb, GXFContext *gxf)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2011-12-06 20:56:01 +00:00
|
|
|
uint32_t timecode = GXF_TIMECODE(gxf->tc.color, gxf->tc.drop,
|
|
|
|
gxf->tc.hh, gxf->tc.mm,
|
|
|
|
gxf->tc.ss, gxf->tc.ff);
|
|
|
|
|
2013-06-13 04:17:21 +00:00
|
|
|
avio_w8(pb, TRACK_AUX);
|
|
|
|
avio_w8(pb, 8);
|
2011-12-06 20:56:01 +00:00
|
|
|
avio_wl32(pb, timecode);
|
2006-07-19 09:38:22 +00:00
|
|
|
/* reserved */
|
2011-12-06 20:56:01 +00:00
|
|
|
avio_wl32(pb, 0);
|
2006-07-19 09:38:22 +00:00
|
|
|
return 8;
|
|
|
|
}
|
|
|
|
|
2009-03-09 00:14:55 +00:00
|
|
|
static int gxf_write_track_description(AVFormatContext *s, GXFStreamContext *sc, int index)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2011-12-06 20:56:01 +00:00
|
|
|
GXFContext *gxf = s->priv_data;
|
2011-02-20 10:04:12 +00:00
|
|
|
AVIOContext *pb = s->pb;
|
2008-10-03 10:16:29 +00:00
|
|
|
int64_t pos;
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
/* track description section */
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, sc->media_type + 0x80);
|
|
|
|
avio_w8(pb, index + 0xC0);
|
2006-07-19 09:38:22 +00:00
|
|
|
|
2011-03-03 19:11:45 +00:00
|
|
|
pos = avio_tell(pb);
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wb16(pb, 0); /* size */
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
/* media file name */
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, TRACK_NAME);
|
|
|
|
avio_w8(pb, strlen(ES_NAME_PATTERN) + 3);
|
2011-02-24 06:36:04 +00:00
|
|
|
avio_write(pb, ES_NAME_PATTERN, sizeof(ES_NAME_PATTERN) - 1);
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wb16(pb, sc->media_info);
|
|
|
|
avio_w8(pb, 0);
|
2006-07-19 09:38:22 +00:00
|
|
|
|
2013-06-13 04:17:21 +00:00
|
|
|
switch (sc->track_type) {
|
|
|
|
case 3: /* timecode */
|
2011-12-06 20:56:01 +00:00
|
|
|
gxf_write_timecode_auxiliary(pb, gxf);
|
2013-06-13 04:17:21 +00:00
|
|
|
break;
|
|
|
|
case 4: /* MPEG2 */
|
|
|
|
case 9: /* MPEG1 */
|
|
|
|
gxf_write_mpeg_auxiliary(pb, s->streams[index]);
|
|
|
|
break;
|
|
|
|
case 5: /* DV25 */
|
|
|
|
case 6: /* DV50 */
|
|
|
|
gxf_write_dv_auxiliary(pb, s->streams[index]);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
avio_w8(pb, TRACK_AUX);
|
|
|
|
avio_w8(pb, 8);
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl64(pb, 0);
|
2006-07-19 09:38:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/* file system version */
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, TRACK_VER);
|
|
|
|
avio_w8(pb, 4);
|
|
|
|
avio_wb32(pb, 0);
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
/* frame rate */
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, TRACK_FPS);
|
|
|
|
avio_w8(pb, 4);
|
|
|
|
avio_wb32(pb, sc->frame_rate_index);
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
/* lines per frame */
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, TRACK_LINES);
|
|
|
|
avio_w8(pb, 4);
|
|
|
|
avio_wb32(pb, sc->lines_index);
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
/* fields per frame */
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, TRACK_FPF);
|
|
|
|
avio_w8(pb, 4);
|
|
|
|
avio_wb32(pb, sc->fields);
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
return updateSize(pb, pos);
|
|
|
|
}
|
|
|
|
|
2009-03-08 21:55:08 +00:00
|
|
|
static int gxf_write_material_data_section(AVFormatContext *s)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2009-03-08 21:55:08 +00:00
|
|
|
GXFContext *gxf = s->priv_data;
|
2011-02-20 10:04:12 +00:00
|
|
|
AVIOContext *pb = s->pb;
|
2008-10-03 10:16:29 +00:00
|
|
|
int64_t pos;
|
2011-02-24 06:36:04 +00:00
|
|
|
int len;
|
2017-12-29 22:30:14 +00:00
|
|
|
const char *filename = strrchr(s->url, '/');
|
2006-07-19 09:38:22 +00:00
|
|
|
|
2011-03-03 19:11:45 +00:00
|
|
|
pos = avio_tell(pb);
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wb16(pb, 0); /* size */
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
/* name */
|
|
|
|
if (filename)
|
|
|
|
filename++;
|
|
|
|
else
|
2017-12-29 22:30:14 +00:00
|
|
|
filename = s->url;
|
2011-02-24 06:36:04 +00:00
|
|
|
len = strlen(filename);
|
|
|
|
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, MAT_NAME);
|
2011-02-24 06:36:04 +00:00
|
|
|
avio_w8(pb, strlen(SERVER_PATH) + len + 1);
|
|
|
|
avio_write(pb, SERVER_PATH, sizeof(SERVER_PATH) - 1);
|
|
|
|
avio_write(pb, filename, len);
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, 0);
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
/* first field */
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, MAT_FIRST_FIELD);
|
|
|
|
avio_w8(pb, 4);
|
|
|
|
avio_wb32(pb, 0);
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
/* last field */
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, MAT_LAST_FIELD);
|
|
|
|
avio_w8(pb, 4);
|
|
|
|
avio_wb32(pb, gxf->nb_fields);
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
/* reserved */
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, MAT_MARK_IN);
|
|
|
|
avio_w8(pb, 4);
|
|
|
|
avio_wb32(pb, 0);
|
2006-07-19 09:38:22 +00:00
|
|
|
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, MAT_MARK_OUT);
|
|
|
|
avio_w8(pb, 4);
|
|
|
|
avio_wb32(pb, gxf->nb_fields);
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
/* estimated size */
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, MAT_SIZE);
|
|
|
|
avio_w8(pb, 4);
|
2011-03-04 18:57:36 +00:00
|
|
|
avio_wb32(pb, avio_size(pb) / 1024);
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
return updateSize(pb, pos);
|
|
|
|
}
|
|
|
|
|
2009-03-08 21:55:08 +00:00
|
|
|
static int gxf_write_track_description_section(AVFormatContext *s)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2009-03-09 00:14:55 +00:00
|
|
|
GXFContext *gxf = s->priv_data;
|
2011-02-20 10:04:12 +00:00
|
|
|
AVIOContext *pb = s->pb;
|
2008-10-03 10:16:29 +00:00
|
|
|
int64_t pos;
|
2006-07-19 09:38:22 +00:00
|
|
|
int i;
|
|
|
|
|
2011-03-03 19:11:45 +00:00
|
|
|
pos = avio_tell(pb);
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wb16(pb, 0); /* size */
|
2009-03-08 21:55:08 +00:00
|
|
|
for (i = 0; i < s->nb_streams; ++i)
|
2009-03-09 00:14:55 +00:00
|
|
|
gxf_write_track_description(s, s->streams[i]->priv_data, i);
|
|
|
|
|
|
|
|
gxf_write_track_description(s, &gxf->timecode_track, s->nb_streams);
|
|
|
|
|
2006-07-19 09:38:22 +00:00
|
|
|
return updateSize(pb, pos);
|
|
|
|
}
|
|
|
|
|
2009-03-09 02:47:32 +00:00
|
|
|
static int gxf_write_map_packet(AVFormatContext *s, int rewrite)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2009-03-09 02:47:32 +00:00
|
|
|
GXFContext *gxf = s->priv_data;
|
2011-02-20 10:04:12 +00:00
|
|
|
AVIOContext *pb = s->pb;
|
2011-03-03 19:11:45 +00:00
|
|
|
int64_t pos = avio_tell(pb);
|
2006-07-19 09:38:22 +00:00
|
|
|
|
2009-03-09 02:47:32 +00:00
|
|
|
if (!rewrite) {
|
|
|
|
if (!(gxf->map_offsets_nb % 30)) {
|
2013-09-10 09:57:35 +00:00
|
|
|
int err;
|
|
|
|
if ((err = av_reallocp_array(&gxf->map_offsets,
|
|
|
|
gxf->map_offsets_nb + 30,
|
|
|
|
sizeof(*gxf->map_offsets))) < 0) {
|
|
|
|
gxf->map_offsets_nb = 0;
|
2009-03-09 02:47:32 +00:00
|
|
|
av_log(s, AV_LOG_ERROR, "could not realloc map offsets\n");
|
2013-09-10 09:57:35 +00:00
|
|
|
return err;
|
2009-03-09 02:47:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
gxf->map_offsets[gxf->map_offsets_nb++] = pos; // do not increment here
|
|
|
|
}
|
|
|
|
|
2006-07-19 09:38:22 +00:00
|
|
|
gxf_write_packet_header(pb, PKT_MAP);
|
|
|
|
|
|
|
|
/* preamble */
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, 0xE0); /* version */
|
|
|
|
avio_w8(pb, 0xFF); /* reserved */
|
2006-07-19 09:38:22 +00:00
|
|
|
|
2009-03-08 21:55:08 +00:00
|
|
|
gxf_write_material_data_section(s);
|
|
|
|
gxf_write_track_description_section(s);
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
return updatePacketSize(pb, pos);
|
|
|
|
}
|
|
|
|
|
2009-03-08 21:55:08 +00:00
|
|
|
static int gxf_write_flt_packet(AVFormatContext *s)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2009-03-08 21:55:08 +00:00
|
|
|
GXFContext *gxf = s->priv_data;
|
2011-02-20 10:04:12 +00:00
|
|
|
AVIOContext *pb = s->pb;
|
2011-03-03 19:11:45 +00:00
|
|
|
int64_t pos = avio_tell(pb);
|
2009-03-09 01:35:00 +00:00
|
|
|
int fields_per_flt = (gxf->nb_fields+1) / 1000 + 1;
|
2010-10-07 19:35:55 +00:00
|
|
|
int flt_entries = gxf->nb_fields / fields_per_flt;
|
2009-03-09 01:35:00 +00:00
|
|
|
int i = 0;
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
gxf_write_packet_header(pb, PKT_FLT);
|
|
|
|
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl32(pb, fields_per_flt); /* number of fields */
|
|
|
|
avio_wl32(pb, flt_entries); /* number of active flt entries */
|
2006-07-19 09:38:22 +00:00
|
|
|
|
2009-03-09 01:35:00 +00:00
|
|
|
if (gxf->flt_entries) {
|
|
|
|
for (i = 0; i < flt_entries; i++)
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl32(pb, gxf->flt_entries[(i*fields_per_flt)>>1]);
|
2006-07-19 09:38:22 +00:00
|
|
|
}
|
2009-03-09 01:35:00 +00:00
|
|
|
|
|
|
|
for (; i < 1000; i++)
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl32(pb, 0);
|
2009-03-09 01:35:00 +00:00
|
|
|
|
2006-07-19 09:38:22 +00:00
|
|
|
return updatePacketSize(pb, pos);
|
|
|
|
}
|
|
|
|
|
2009-03-08 21:55:08 +00:00
|
|
|
static int gxf_write_umf_material_description(AVFormatContext *s)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2009-03-08 21:55:08 +00:00
|
|
|
GXFContext *gxf = s->priv_data;
|
2011-02-20 10:04:12 +00:00
|
|
|
AVIOContext *pb = s->pb;
|
2009-03-09 01:10:28 +00:00
|
|
|
int timecode_base = gxf->time_base.den == 60000 ? 60 : 50;
|
2011-07-07 09:25:03 +00:00
|
|
|
int64_t timestamp = 0;
|
2011-12-06 20:56:01 +00:00
|
|
|
uint64_t nb_fields;
|
|
|
|
uint32_t timecode_in; // timecode at mark in
|
|
|
|
uint32_t timecode_out; // timecode at mark out
|
2011-07-07 09:25:03 +00:00
|
|
|
|
2016-02-06 16:58:48 +00:00
|
|
|
ff_parse_creation_time_metadata(s, ×tamp, 1);
|
2009-03-08 21:55:08 +00:00
|
|
|
|
2011-12-06 20:56:01 +00:00
|
|
|
timecode_in = GXF_TIMECODE(gxf->tc.color, gxf->tc.drop,
|
|
|
|
gxf->tc.hh, gxf->tc.mm,
|
|
|
|
gxf->tc.ss, gxf->tc.ff);
|
|
|
|
|
|
|
|
nb_fields = gxf->nb_fields +
|
|
|
|
gxf->tc.hh * (timecode_base * 3600) +
|
|
|
|
gxf->tc.mm * (timecode_base * 60) +
|
|
|
|
gxf->tc.ss * timecode_base +
|
|
|
|
gxf->tc.ff;
|
|
|
|
|
|
|
|
timecode_out = GXF_TIMECODE(gxf->tc.color, gxf->tc.drop,
|
|
|
|
nb_fields / (timecode_base * 3600) % 24,
|
|
|
|
nb_fields / (timecode_base * 60) % 60,
|
|
|
|
nb_fields / timecode_base % 60,
|
|
|
|
nb_fields % timecode_base);
|
2009-03-08 21:55:08 +00:00
|
|
|
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl32(pb, gxf->flags);
|
|
|
|
avio_wl32(pb, gxf->nb_fields); /* length of the longest track */
|
|
|
|
avio_wl32(pb, gxf->nb_fields); /* length of the shortest track */
|
|
|
|
avio_wl32(pb, 0); /* mark in */
|
|
|
|
avio_wl32(pb, gxf->nb_fields); /* mark out */
|
2011-12-06 20:56:01 +00:00
|
|
|
avio_wl32(pb, timecode_in); /* timecode mark in */
|
|
|
|
avio_wl32(pb, timecode_out); /* timecode mark out */
|
2011-07-07 09:25:03 +00:00
|
|
|
avio_wl64(pb, timestamp); /* modification time */
|
|
|
|
avio_wl64(pb, timestamp); /* creation time */
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl16(pb, 0); /* reserved */
|
|
|
|
avio_wl16(pb, 0); /* reserved */
|
|
|
|
avio_wl16(pb, gxf->audio_tracks);
|
|
|
|
avio_wl16(pb, 1); /* timecode track count */
|
|
|
|
avio_wl16(pb, 0); /* reserved */
|
|
|
|
avio_wl16(pb, gxf->mpeg_tracks);
|
2006-07-19 09:38:22 +00:00
|
|
|
return 48;
|
|
|
|
}
|
|
|
|
|
2009-03-08 21:55:08 +00:00
|
|
|
static int gxf_write_umf_payload(AVFormatContext *s)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2009-03-08 21:55:08 +00:00
|
|
|
GXFContext *gxf = s->priv_data;
|
2011-02-20 10:04:12 +00:00
|
|
|
AVIOContext *pb = s->pb;
|
2009-03-08 21:55:08 +00:00
|
|
|
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl32(pb, gxf->umf_length); /* total length of the umf data */
|
|
|
|
avio_wl32(pb, 3); /* version */
|
|
|
|
avio_wl32(pb, s->nb_streams+1);
|
|
|
|
avio_wl32(pb, gxf->umf_track_offset); /* umf track section offset */
|
|
|
|
avio_wl32(pb, gxf->umf_track_size);
|
|
|
|
avio_wl32(pb, s->nb_streams+1);
|
|
|
|
avio_wl32(pb, gxf->umf_media_offset);
|
|
|
|
avio_wl32(pb, gxf->umf_media_size);
|
|
|
|
avio_wl32(pb, gxf->umf_length); /* user data offset */
|
|
|
|
avio_wl32(pb, 0); /* user data size */
|
|
|
|
avio_wl32(pb, 0); /* reserved */
|
|
|
|
avio_wl32(pb, 0); /* reserved */
|
2006-07-19 09:38:22 +00:00
|
|
|
return 48;
|
|
|
|
}
|
|
|
|
|
2009-03-08 21:55:08 +00:00
|
|
|
static int gxf_write_umf_track_description(AVFormatContext *s)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2011-02-20 10:04:12 +00:00
|
|
|
AVIOContext *pb = s->pb;
|
2009-03-08 21:55:08 +00:00
|
|
|
GXFContext *gxf = s->priv_data;
|
2011-03-03 19:11:45 +00:00
|
|
|
int64_t pos = avio_tell(pb);
|
2006-07-19 09:38:22 +00:00
|
|
|
int i;
|
|
|
|
|
2009-03-08 21:55:08 +00:00
|
|
|
gxf->umf_track_offset = pos - gxf->umf_start_offset;
|
|
|
|
for (i = 0; i < s->nb_streams; ++i) {
|
2009-03-08 22:23:44 +00:00
|
|
|
GXFStreamContext *sc = s->streams[i]->priv_data;
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl16(pb, sc->media_info);
|
|
|
|
avio_wl16(pb, 1);
|
2006-07-19 09:38:22 +00:00
|
|
|
}
|
2009-03-09 00:14:55 +00:00
|
|
|
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl16(pb, gxf->timecode_track.media_info);
|
|
|
|
avio_wl16(pb, 1);
|
2009-03-09 00:14:55 +00:00
|
|
|
|
2011-03-03 19:11:45 +00:00
|
|
|
return avio_tell(pb) - pos;
|
2006-07-19 09:38:22 +00:00
|
|
|
}
|
|
|
|
|
2011-02-20 10:04:12 +00:00
|
|
|
static int gxf_write_umf_media_mpeg(AVIOContext *pb, AVStream *st)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2009-03-08 21:55:08 +00:00
|
|
|
GXFStreamContext *sc = st->priv_data;
|
|
|
|
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
if (st->codecpar->format == AV_PIX_FMT_YUV422P)
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl32(pb, 2);
|
2006-07-19 09:38:22 +00:00
|
|
|
else
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl32(pb, 1); /* default to 420 */
|
|
|
|
avio_wl32(pb, sc->first_gop_closed == 1); /* closed = 1, open = 0, unknown = 255 */
|
|
|
|
avio_wl32(pb, 3); /* top = 1, bottom = 2, frame = 3, unknown = 0 */
|
|
|
|
avio_wl32(pb, 1); /* I picture per GOP */
|
|
|
|
avio_wl32(pb, sc->p_per_gop);
|
|
|
|
avio_wl32(pb, sc->b_per_i_or_p);
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
if (st->codecpar->codec_id == AV_CODEC_ID_MPEG2VIDEO)
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl32(pb, 2);
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
else if (st->codecpar->codec_id == AV_CODEC_ID_MPEG1VIDEO)
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl32(pb, 1);
|
2006-07-19 09:38:22 +00:00
|
|
|
else
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl32(pb, 0);
|
|
|
|
avio_wl32(pb, 0); /* reserved */
|
2006-07-19 09:38:22 +00:00
|
|
|
return 32;
|
|
|
|
}
|
|
|
|
|
2011-12-06 20:56:01 +00:00
|
|
|
static int gxf_write_umf_media_timecode(AVIOContext *pb, int drop)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2011-12-06 20:56:01 +00:00
|
|
|
avio_wl32(pb, drop); /* drop frame */
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl32(pb, 0); /* reserved */
|
|
|
|
avio_wl32(pb, 0); /* reserved */
|
|
|
|
avio_wl32(pb, 0); /* reserved */
|
|
|
|
avio_wl32(pb, 0); /* reserved */
|
|
|
|
avio_wl32(pb, 0); /* reserved */
|
|
|
|
avio_wl32(pb, 0); /* reserved */
|
|
|
|
avio_wl32(pb, 0); /* reserved */
|
2006-07-19 09:38:22 +00:00
|
|
|
return 32;
|
|
|
|
}
|
|
|
|
|
2013-06-13 04:17:21 +00:00
|
|
|
static int gxf_write_umf_media_dv(AVIOContext *pb, GXFStreamContext *sc, AVStream *st)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2013-06-13 04:17:21 +00:00
|
|
|
int dv_umf_data = 0;
|
2006-07-19 09:38:22 +00:00
|
|
|
|
2016-04-10 19:58:15 +00:00
|
|
|
if (st->codecpar->format == AV_PIX_FMT_YUV420P)
|
2013-06-13 04:17:21 +00:00
|
|
|
dv_umf_data |= 0x20; /* marks as DVCAM instead of DVPRO */
|
|
|
|
avio_wl32(pb, dv_umf_data);
|
|
|
|
avio_wl32(pb, 0);
|
|
|
|
avio_wl32(pb, 0);
|
|
|
|
avio_wl32(pb, 0);
|
|
|
|
avio_wl32(pb, 0);
|
|
|
|
avio_wl32(pb, 0);
|
|
|
|
avio_wl32(pb, 0);
|
|
|
|
avio_wl32(pb, 0);
|
2006-07-19 09:38:22 +00:00
|
|
|
return 32;
|
|
|
|
}
|
|
|
|
|
2011-02-20 10:04:12 +00:00
|
|
|
static int gxf_write_umf_media_audio(AVIOContext *pb, GXFStreamContext *sc)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2011-11-27 14:04:16 +00:00
|
|
|
avio_wl64(pb, av_double2int(1)); /* sound level to begin to */
|
|
|
|
avio_wl64(pb, av_double2int(1)); /* sound level to begin to */
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl32(pb, 0); /* number of fields over which to ramp up sound level */
|
|
|
|
avio_wl32(pb, 0); /* number of fields over which to ramp down sound level */
|
|
|
|
avio_wl32(pb, 0); /* reserved */
|
|
|
|
avio_wl32(pb, 0); /* reserved */
|
2006-07-19 09:38:22 +00:00
|
|
|
return 32;
|
|
|
|
}
|
|
|
|
|
2009-03-08 21:55:08 +00:00
|
|
|
static int gxf_write_umf_media_description(AVFormatContext *s)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2009-03-08 21:55:08 +00:00
|
|
|
GXFContext *gxf = s->priv_data;
|
2011-02-20 10:04:12 +00:00
|
|
|
AVIOContext *pb = s->pb;
|
2008-10-03 10:16:29 +00:00
|
|
|
int64_t pos;
|
2009-03-09 03:34:15 +00:00
|
|
|
int i, j;
|
2006-07-19 09:38:22 +00:00
|
|
|
|
2011-03-03 19:11:45 +00:00
|
|
|
pos = avio_tell(pb);
|
2009-03-08 21:55:08 +00:00
|
|
|
gxf->umf_media_offset = pos - gxf->umf_start_offset;
|
2009-03-09 00:14:55 +00:00
|
|
|
for (i = 0; i <= s->nb_streams; ++i) {
|
|
|
|
GXFStreamContext *sc;
|
2008-10-03 10:16:29 +00:00
|
|
|
int64_t startpos, curpos;
|
2006-07-19 09:38:22 +00:00
|
|
|
|
2009-03-09 00:14:55 +00:00
|
|
|
if (i == s->nb_streams)
|
|
|
|
sc = &gxf->timecode_track;
|
|
|
|
else
|
|
|
|
sc = s->streams[i]->priv_data;
|
|
|
|
|
2011-03-03 19:11:45 +00:00
|
|
|
startpos = avio_tell(pb);
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl16(pb, 0); /* length */
|
|
|
|
avio_wl16(pb, sc->media_info);
|
|
|
|
avio_wl16(pb, 0); /* reserved */
|
|
|
|
avio_wl16(pb, 0); /* reserved */
|
|
|
|
avio_wl32(pb, gxf->nb_fields);
|
|
|
|
avio_wl32(pb, 0); /* attributes rw, ro */
|
|
|
|
avio_wl32(pb, 0); /* mark in */
|
|
|
|
avio_wl32(pb, gxf->nb_fields); /* mark out */
|
|
|
|
avio_write(pb, ES_NAME_PATTERN, strlen(ES_NAME_PATTERN));
|
|
|
|
avio_wb16(pb, sc->media_info);
|
2010-07-21 07:46:02 +00:00
|
|
|
for (j = strlen(ES_NAME_PATTERN)+2; j < 88; j++)
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, 0);
|
|
|
|
avio_wl32(pb, sc->track_type);
|
|
|
|
avio_wl32(pb, sc->sample_rate);
|
|
|
|
avio_wl32(pb, sc->sample_size);
|
|
|
|
avio_wl32(pb, 0); /* reserved */
|
2009-03-09 00:14:55 +00:00
|
|
|
|
|
|
|
if (sc == &gxf->timecode_track)
|
2011-12-06 20:56:01 +00:00
|
|
|
gxf_write_umf_media_timecode(pb, gxf->tc.drop);
|
2009-03-09 00:14:55 +00:00
|
|
|
else {
|
|
|
|
AVStream *st = s->streams[i];
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
switch (st->codecpar->codec_id) {
|
2012-08-05 09:11:04 +00:00
|
|
|
case AV_CODEC_ID_MPEG1VIDEO:
|
|
|
|
case AV_CODEC_ID_MPEG2VIDEO:
|
2009-03-09 04:17:12 +00:00
|
|
|
gxf_write_umf_media_mpeg(pb, st);
|
|
|
|
break;
|
2012-08-05 09:11:04 +00:00
|
|
|
case AV_CODEC_ID_PCM_S16LE:
|
2009-03-09 04:17:12 +00:00
|
|
|
gxf_write_umf_media_audio(pb, sc);
|
|
|
|
break;
|
2012-08-05 09:11:04 +00:00
|
|
|
case AV_CODEC_ID_DVVIDEO:
|
2013-06-13 04:17:21 +00:00
|
|
|
gxf_write_umf_media_dv(pb, sc, st);
|
2009-03-09 04:17:12 +00:00
|
|
|
break;
|
|
|
|
}
|
2009-03-09 00:14:55 +00:00
|
|
|
}
|
|
|
|
|
2011-03-03 19:11:45 +00:00
|
|
|
curpos = avio_tell(pb);
|
2011-02-28 13:57:54 +00:00
|
|
|
avio_seek(pb, startpos, SEEK_SET);
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wl16(pb, curpos - startpos);
|
2011-02-28 13:57:54 +00:00
|
|
|
avio_seek(pb, curpos, SEEK_SET);
|
2006-07-19 09:38:22 +00:00
|
|
|
}
|
2011-03-03 19:11:45 +00:00
|
|
|
return avio_tell(pb) - pos;
|
2006-07-19 09:38:22 +00:00
|
|
|
}
|
|
|
|
|
2009-03-08 21:55:08 +00:00
|
|
|
static int gxf_write_umf_packet(AVFormatContext *s)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2009-03-08 21:55:08 +00:00
|
|
|
GXFContext *gxf = s->priv_data;
|
2011-02-20 10:04:12 +00:00
|
|
|
AVIOContext *pb = s->pb;
|
2011-03-03 19:11:45 +00:00
|
|
|
int64_t pos = avio_tell(pb);
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
gxf_write_packet_header(pb, PKT_UMF);
|
|
|
|
|
|
|
|
/* preamble */
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, 3); /* first and last (only) packet */
|
|
|
|
avio_wb32(pb, gxf->umf_length); /* data length */
|
2009-03-08 21:55:08 +00:00
|
|
|
|
2011-03-03 19:11:45 +00:00
|
|
|
gxf->umf_start_offset = avio_tell(pb);
|
2009-03-08 21:55:08 +00:00
|
|
|
gxf_write_umf_payload(s);
|
|
|
|
gxf_write_umf_material_description(s);
|
|
|
|
gxf->umf_track_size = gxf_write_umf_track_description(s);
|
|
|
|
gxf->umf_media_size = gxf_write_umf_media_description(s);
|
2011-03-03 19:11:45 +00:00
|
|
|
gxf->umf_length = avio_tell(pb) - gxf->umf_start_offset;
|
2006-07-19 09:38:22 +00:00
|
|
|
return updatePacketSize(pb, pos);
|
|
|
|
}
|
|
|
|
|
2009-02-08 04:33:53 +00:00
|
|
|
static const int GXF_samples_per_frame[] = { 32768, 0 };
|
|
|
|
|
2009-03-09 00:14:55 +00:00
|
|
|
static void gxf_init_timecode_track(GXFStreamContext *sc, GXFStreamContext *vsc)
|
|
|
|
{
|
|
|
|
if (!vsc)
|
|
|
|
return;
|
|
|
|
|
|
|
|
sc->media_type = vsc->sample_rate == 60 ? 7 : 8;
|
|
|
|
sc->sample_rate = vsc->sample_rate;
|
|
|
|
sc->media_info = ('T'<<8) | '0';
|
|
|
|
sc->track_type = 3;
|
|
|
|
sc->frame_rate_index = vsc->frame_rate_index;
|
|
|
|
sc->lines_index = vsc->lines_index;
|
|
|
|
sc->sample_size = 16;
|
|
|
|
sc->fields = vsc->fields;
|
|
|
|
}
|
|
|
|
|
2012-05-30 08:26:53 +00:00
|
|
|
static int gxf_init_timecode(AVFormatContext *s, GXFTimecode *tc, const char *tcstr, int fields)
|
2011-12-06 20:56:01 +00:00
|
|
|
{
|
|
|
|
char c;
|
|
|
|
|
2012-05-30 08:26:53 +00:00
|
|
|
if (sscanf(tcstr, "%d:%d:%d%c%d", &tc->hh, &tc->mm, &tc->ss, &c, &tc->ff) != 5) {
|
2011-12-06 20:56:01 +00:00
|
|
|
av_log(s, AV_LOG_ERROR, "unable to parse timecode, "
|
|
|
|
"syntax: hh:mm:ss[:;.]ff\n");
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
tc->color = 0;
|
|
|
|
tc->drop = c != ':';
|
|
|
|
|
|
|
|
if (fields == 2)
|
|
|
|
tc->ff = tc->ff * 2;
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2006-07-19 09:38:22 +00:00
|
|
|
static int gxf_write_header(AVFormatContext *s)
|
|
|
|
{
|
2011-02-20 10:04:12 +00:00
|
|
|
AVIOContext *pb = s->pb;
|
2006-07-19 09:38:22 +00:00
|
|
|
GXFContext *gxf = s->priv_data;
|
2009-03-09 00:14:55 +00:00
|
|
|
GXFStreamContext *vsc = NULL;
|
2009-03-08 22:23:44 +00:00
|
|
|
uint8_t tracks[255] = {0};
|
|
|
|
int i, media_info = 0;
|
2013-09-11 11:14:12 +00:00
|
|
|
int ret;
|
2012-05-30 08:26:53 +00:00
|
|
|
AVDictionaryEntry *tcr = av_dict_get(s->metadata, "timecode", NULL, 0);
|
2006-07-19 09:38:22 +00:00
|
|
|
|
2016-09-27 14:26:37 +00:00
|
|
|
if (!(pb->seekable & AVIO_SEEKABLE_NORMAL)) {
|
2012-09-01 10:35:14 +00:00
|
|
|
av_log(s, AV_LOG_ERROR, "gxf muxer does not support streamed output, patch welcome\n");
|
2009-03-09 02:47:32 +00:00
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
2006-07-19 09:38:22 +00:00
|
|
|
gxf->flags |= 0x00080000; /* material is simple clip */
|
|
|
|
for (i = 0; i < s->nb_streams; ++i) {
|
|
|
|
AVStream *st = s->streams[i];
|
2009-01-31 11:06:20 +00:00
|
|
|
GXFStreamContext *sc = av_mallocz(sizeof(*sc));
|
|
|
|
if (!sc)
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
st->priv_data = sc;
|
2006-07-19 09:38:22 +00:00
|
|
|
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
sc->media_type = ff_codec_get_tag(gxf_media_types, st->codecpar->codec_id);
|
|
|
|
if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
|
|
|
|
if (st->codecpar->codec_id != AV_CODEC_ID_PCM_S16LE) {
|
2006-07-19 09:38:22 +00:00
|
|
|
av_log(s, AV_LOG_ERROR, "only 16 BIT PCM LE allowed for now\n");
|
|
|
|
return -1;
|
|
|
|
}
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
if (st->codecpar->sample_rate != 48000) {
|
2006-07-19 09:38:22 +00:00
|
|
|
av_log(s, AV_LOG_ERROR, "only 48000hz sampling rate is allowed\n");
|
|
|
|
return -1;
|
|
|
|
}
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
if (st->codecpar->channels != 1) {
|
2006-07-19 09:38:22 +00:00
|
|
|
av_log(s, AV_LOG_ERROR, "only mono tracks are allowed\n");
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
sc->track_type = 2;
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
sc->sample_rate = st->codecpar->sample_rate;
|
2011-11-29 18:28:15 +00:00
|
|
|
avpriv_set_pts_info(st, 64, 1, sc->sample_rate);
|
2006-07-19 09:38:22 +00:00
|
|
|
sc->sample_size = 16;
|
|
|
|
sc->frame_rate_index = -2;
|
|
|
|
sc->lines_index = -2;
|
|
|
|
sc->fields = -2;
|
|
|
|
gxf->audio_tracks++;
|
|
|
|
gxf->flags |= 0x04000000; /* audio is 16 bit pcm */
|
2009-03-08 22:23:44 +00:00
|
|
|
media_info = 'A';
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
} else if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
|
2009-03-09 01:10:28 +00:00
|
|
|
if (i != 0) {
|
|
|
|
av_log(s, AV_LOG_ERROR, "video stream must be the first track\n");
|
|
|
|
return -1;
|
|
|
|
}
|
2006-07-19 09:38:22 +00:00
|
|
|
/* FIXME check from time_base ? */
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
if (st->codecpar->height == 480 || st->codecpar->height == 512) { /* NTSC or NTSC+VBI */
|
2006-07-19 09:38:22 +00:00
|
|
|
sc->frame_rate_index = 5;
|
|
|
|
sc->sample_rate = 60;
|
|
|
|
gxf->flags |= 0x00000080;
|
2009-03-09 01:10:28 +00:00
|
|
|
gxf->time_base = (AVRational){ 1001, 60000 };
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
} else if (st->codecpar->height == 576 || st->codecpar->height == 608) { /* PAL or PAL+VBI */
|
2006-07-19 09:38:22 +00:00
|
|
|
sc->frame_rate_index = 6;
|
|
|
|
sc->media_type++;
|
|
|
|
sc->sample_rate = 50;
|
|
|
|
gxf->flags |= 0x00000040;
|
2009-03-09 01:10:28 +00:00
|
|
|
gxf->time_base = (AVRational){ 1, 50 };
|
2010-09-01 00:04:47 +00:00
|
|
|
} else {
|
|
|
|
av_log(s, AV_LOG_ERROR, "unsupported video resolution, "
|
|
|
|
"gxf muxer only accepts PAL or NTSC resolutions currently\n");
|
|
|
|
return -1;
|
2006-07-19 09:38:22 +00:00
|
|
|
}
|
2012-05-30 08:26:53 +00:00
|
|
|
if (!tcr)
|
|
|
|
tcr = av_dict_get(st->metadata, "timecode", NULL, 0);
|
2011-11-29 18:28:15 +00:00
|
|
|
avpriv_set_pts_info(st, 64, gxf->time_base.num, gxf->time_base.den);
|
2009-03-08 21:55:08 +00:00
|
|
|
if (gxf_find_lines_index(st) < 0)
|
2006-07-19 09:38:22 +00:00
|
|
|
sc->lines_index = -1;
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
sc->sample_size = st->codecpar->bit_rate;
|
2006-07-19 09:38:22 +00:00
|
|
|
sc->fields = 2; /* interlaced */
|
2009-03-09 00:14:55 +00:00
|
|
|
|
|
|
|
vsc = sc;
|
|
|
|
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
switch (st->codecpar->codec_id) {
|
2012-08-05 09:11:04 +00:00
|
|
|
case AV_CODEC_ID_MJPEG:
|
2009-03-08 22:23:44 +00:00
|
|
|
sc->track_type = 1;
|
|
|
|
gxf->flags |= 0x00004000;
|
|
|
|
media_info = 'J';
|
|
|
|
break;
|
2012-08-05 09:11:04 +00:00
|
|
|
case AV_CODEC_ID_MPEG1VIDEO:
|
2009-03-08 22:23:44 +00:00
|
|
|
sc->track_type = 9;
|
|
|
|
gxf->mpeg_tracks++;
|
|
|
|
media_info = 'L';
|
|
|
|
break;
|
2012-08-05 09:11:04 +00:00
|
|
|
case AV_CODEC_ID_MPEG2VIDEO:
|
2006-08-21 15:49:08 +00:00
|
|
|
sc->first_gop_closed = -1;
|
2006-07-19 09:38:22 +00:00
|
|
|
sc->track_type = 4;
|
|
|
|
gxf->mpeg_tracks++;
|
|
|
|
gxf->flags |= 0x00008000;
|
2009-03-08 22:23:44 +00:00
|
|
|
media_info = 'M';
|
2006-07-19 09:38:22 +00:00
|
|
|
break;
|
2012-08-05 09:11:04 +00:00
|
|
|
case AV_CODEC_ID_DVVIDEO:
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
if (st->codecpar->format == AV_PIX_FMT_YUV422P) {
|
2006-07-19 09:38:22 +00:00
|
|
|
sc->media_type += 2;
|
|
|
|
sc->track_type = 6;
|
|
|
|
gxf->flags |= 0x00002000;
|
2009-03-08 22:23:44 +00:00
|
|
|
media_info = 'E';
|
2006-07-19 09:38:22 +00:00
|
|
|
} else {
|
|
|
|
sc->track_type = 5;
|
|
|
|
gxf->flags |= 0x00001000;
|
2009-03-08 22:23:44 +00:00
|
|
|
media_info = 'D';
|
2006-07-19 09:38:22 +00:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
default:
|
2006-10-18 15:10:14 +00:00
|
|
|
av_log(s, AV_LOG_ERROR, "video codec not supported\n");
|
2006-07-19 09:38:22 +00:00
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
}
|
2009-03-08 22:23:44 +00:00
|
|
|
/* FIXME first 10 audio tracks are 0 to 9 next 22 are A to V */
|
|
|
|
sc->media_info = media_info<<8 | ('0'+tracks[media_info]++);
|
2009-03-09 01:10:28 +00:00
|
|
|
sc->order = s->nb_streams - st->index;
|
2006-07-19 09:38:22 +00:00
|
|
|
}
|
2009-02-08 04:33:53 +00:00
|
|
|
|
|
|
|
if (ff_audio_interleave_init(s, GXF_samples_per_frame, (AVRational){ 1, 48000 }) < 0)
|
|
|
|
return -1;
|
|
|
|
|
2012-10-19 21:21:41 +00:00
|
|
|
if (tcr && vsc)
|
2012-05-30 08:26:53 +00:00
|
|
|
gxf_init_timecode(s, &gxf->tc, tcr->value, vsc->fields);
|
2011-12-06 20:56:01 +00:00
|
|
|
|
2009-03-09 00:14:55 +00:00
|
|
|
gxf_init_timecode_track(&gxf->timecode_track, vsc);
|
|
|
|
gxf->flags |= 0x200000; // time code track is non-drop frame
|
|
|
|
|
2013-09-11 11:14:12 +00:00
|
|
|
if ((ret = gxf_write_map_packet(s, 0)) < 0)
|
|
|
|
return ret;
|
2009-03-09 01:35:00 +00:00
|
|
|
gxf_write_flt_packet(s);
|
2009-03-08 21:55:08 +00:00
|
|
|
gxf_write_umf_packet(s);
|
2009-03-09 02:47:32 +00:00
|
|
|
|
|
|
|
gxf->packet_count = 3;
|
|
|
|
|
2006-07-19 09:38:22 +00:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2011-02-20 10:04:12 +00:00
|
|
|
static int gxf_write_eos_packet(AVIOContext *pb)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2011-03-03 19:11:45 +00:00
|
|
|
int64_t pos = avio_tell(pb);
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
gxf_write_packet_header(pb, PKT_EOS);
|
|
|
|
return updatePacketSize(pb, pos);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int gxf_write_trailer(AVFormatContext *s)
|
|
|
|
{
|
2009-03-09 01:35:00 +00:00
|
|
|
GXFContext *gxf = s->priv_data;
|
2011-02-20 10:04:12 +00:00
|
|
|
AVIOContext *pb = s->pb;
|
2008-10-03 10:16:29 +00:00
|
|
|
int64_t end;
|
2009-03-09 02:47:32 +00:00
|
|
|
int i;
|
2013-09-11 11:14:12 +00:00
|
|
|
int ret;
|
2006-07-19 09:38:22 +00:00
|
|
|
|
2009-03-08 21:55:08 +00:00
|
|
|
gxf_write_eos_packet(pb);
|
2011-03-03 19:11:45 +00:00
|
|
|
end = avio_tell(pb);
|
2011-02-28 13:57:54 +00:00
|
|
|
avio_seek(pb, 0, SEEK_SET);
|
2009-03-09 01:35:00 +00:00
|
|
|
/* overwrite map, flt and umf packets with new values */
|
2013-09-11 11:14:12 +00:00
|
|
|
if ((ret = gxf_write_map_packet(s, 1)) < 0)
|
|
|
|
return ret;
|
2009-03-09 01:35:00 +00:00
|
|
|
gxf_write_flt_packet(s);
|
2009-03-08 21:55:08 +00:00
|
|
|
gxf_write_umf_packet(s);
|
2009-03-09 02:47:32 +00:00
|
|
|
/* update duration in all map packets */
|
|
|
|
for (i = 1; i < gxf->map_offsets_nb; i++) {
|
2011-02-28 13:57:54 +00:00
|
|
|
avio_seek(pb, gxf->map_offsets[i], SEEK_SET);
|
2013-09-11 11:14:12 +00:00
|
|
|
if ((ret = gxf_write_map_packet(s, 1)) < 0)
|
|
|
|
return ret;
|
2009-03-09 02:47:32 +00:00
|
|
|
}
|
|
|
|
|
2011-02-28 13:57:54 +00:00
|
|
|
avio_seek(pb, end, SEEK_SET);
|
2009-03-09 01:35:00 +00:00
|
|
|
|
2020-01-26 10:27:39 +00:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void gxf_deinit(AVFormatContext *s)
|
|
|
|
{
|
|
|
|
GXFContext *gxf = s->priv_data;
|
|
|
|
|
|
|
|
ff_audio_interleave_close(s);
|
|
|
|
|
2009-03-09 01:35:00 +00:00
|
|
|
av_freep(&gxf->flt_entries);
|
2009-03-09 02:47:32 +00:00
|
|
|
av_freep(&gxf->map_offsets);
|
2006-07-19 09:38:22 +00:00
|
|
|
}
|
|
|
|
|
2006-08-09 10:48:32 +00:00
|
|
|
static int gxf_parse_mpeg_frame(GXFStreamContext *sc, const uint8_t *buf, int size)
|
|
|
|
{
|
|
|
|
uint32_t c=-1;
|
|
|
|
int i;
|
|
|
|
for(i=0; i<size-4 && c!=0x100; i++){
|
|
|
|
c = (c<<8) + buf[i];
|
2006-08-21 15:49:08 +00:00
|
|
|
if(c == 0x1B8 && sc->first_gop_closed == -1) /* GOP start code */
|
2006-08-21 15:14:36 +00:00
|
|
|
sc->first_gop_closed= (buf[i+4]>>6)&1;
|
2006-08-09 10:48:32 +00:00
|
|
|
}
|
|
|
|
return (buf[i+1]>>3)&7;
|
|
|
|
}
|
|
|
|
|
2009-03-08 21:55:08 +00:00
|
|
|
static int gxf_write_media_preamble(AVFormatContext *s, AVPacket *pkt, int size)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2009-03-08 21:55:08 +00:00
|
|
|
GXFContext *gxf = s->priv_data;
|
2011-02-20 10:04:12 +00:00
|
|
|
AVIOContext *pb = s->pb;
|
2009-03-08 21:55:08 +00:00
|
|
|
AVStream *st = s->streams[pkt->stream_index];
|
|
|
|
GXFStreamContext *sc = st->priv_data;
|
2009-02-13 21:36:36 +00:00
|
|
|
unsigned field_nb;
|
|
|
|
/* If the video is frame-encoded, the frame numbers shall be represented by
|
|
|
|
* even field numbers.
|
|
|
|
* see SMPTE360M-2004 6.4.2.1.3 Media field number */
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
|
2009-03-08 21:55:08 +00:00
|
|
|
field_nb = gxf->nb_fields;
|
2009-02-13 21:36:36 +00:00
|
|
|
} else {
|
2009-03-09 01:10:28 +00:00
|
|
|
field_nb = av_rescale_rnd(pkt->dts, gxf->time_base.den,
|
|
|
|
(int64_t)48000*gxf->time_base.num, AV_ROUND_UP);
|
2009-02-13 21:36:36 +00:00
|
|
|
}
|
2006-07-19 09:38:22 +00:00
|
|
|
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, sc->media_type);
|
|
|
|
avio_w8(pb, st->index);
|
|
|
|
avio_wb32(pb, field_nb);
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wb16(pb, 0);
|
|
|
|
avio_wb16(pb, size / 2);
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
} else if (st->codecpar->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
|
2006-08-09 10:48:32 +00:00
|
|
|
int frame_type = gxf_parse_mpeg_frame(sc, pkt->data, pkt->size);
|
2011-04-27 23:40:44 +00:00
|
|
|
if (frame_type == AV_PICTURE_TYPE_I) {
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, 0x0d);
|
2006-07-19 09:38:22 +00:00
|
|
|
sc->iframes++;
|
2011-04-27 23:40:44 +00:00
|
|
|
} else if (frame_type == AV_PICTURE_TYPE_B) {
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, 0x0f);
|
2006-07-19 09:38:22 +00:00
|
|
|
sc->bframes++;
|
|
|
|
} else {
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, 0x0e);
|
2006-07-19 09:38:22 +00:00
|
|
|
sc->pframes++;
|
|
|
|
}
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wb24(pb, size);
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
} else if (st->codecpar->codec_id == AV_CODEC_ID_DVVIDEO) {
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_w8(pb, size / 4096);
|
|
|
|
avio_wb24(pb, 0);
|
2006-07-19 09:38:22 +00:00
|
|
|
} else
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_wb32(pb, size);
|
|
|
|
avio_wb32(pb, field_nb);
|
|
|
|
avio_w8(pb, 1); /* flags */
|
|
|
|
avio_w8(pb, 0); /* reserved */
|
2006-07-19 09:38:22 +00:00
|
|
|
return 16;
|
|
|
|
}
|
|
|
|
|
2009-03-08 22:14:26 +00:00
|
|
|
static int gxf_write_packet(AVFormatContext *s, AVPacket *pkt)
|
2006-07-19 09:38:22 +00:00
|
|
|
{
|
2009-03-08 21:55:08 +00:00
|
|
|
GXFContext *gxf = s->priv_data;
|
2011-02-20 10:04:12 +00:00
|
|
|
AVIOContext *pb = s->pb;
|
2009-03-08 21:55:08 +00:00
|
|
|
AVStream *st = s->streams[pkt->stream_index];
|
2011-03-03 19:11:45 +00:00
|
|
|
int64_t pos = avio_tell(pb);
|
2006-07-19 09:38:22 +00:00
|
|
|
int padding = 0;
|
2013-12-21 12:48:15 +00:00
|
|
|
unsigned packet_start_offset = avio_tell(pb) / 1024;
|
2013-09-11 11:14:12 +00:00
|
|
|
int ret;
|
2006-07-19 09:38:22 +00:00
|
|
|
|
|
|
|
gxf_write_packet_header(pb, PKT_MEDIA);
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
if (st->codecpar->codec_id == AV_CODEC_ID_MPEG2VIDEO && pkt->size % 4) /* MPEG-2 frames must be padded */
|
2006-07-19 09:38:22 +00:00
|
|
|
padding = 4 - pkt->size % 4;
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
|
2006-08-01 09:56:27 +00:00
|
|
|
padding = GXF_AUDIO_PACKET_SIZE - pkt->size;
|
2009-03-08 21:55:08 +00:00
|
|
|
gxf_write_media_preamble(s, pkt, pkt->size + padding);
|
2011-02-21 18:28:17 +00:00
|
|
|
avio_write(pb, pkt->data, pkt->size);
|
2006-07-19 09:38:22 +00:00
|
|
|
gxf_write_padding(pb, padding);
|
2009-01-20 09:43:58 +00:00
|
|
|
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
|
2009-03-09 01:35:00 +00:00
|
|
|
if (!(gxf->flt_entries_nb % 500)) {
|
2013-09-10 09:57:35 +00:00
|
|
|
int err;
|
|
|
|
if ((err = av_reallocp_array(&gxf->flt_entries,
|
|
|
|
gxf->flt_entries_nb + 500,
|
|
|
|
sizeof(*gxf->flt_entries))) < 0) {
|
|
|
|
gxf->flt_entries_nb = 0;
|
2013-09-11 11:16:03 +00:00
|
|
|
gxf->nb_fields = 0;
|
2009-03-09 01:35:00 +00:00
|
|
|
av_log(s, AV_LOG_ERROR, "could not reallocate flt entries\n");
|
2013-09-10 09:57:35 +00:00
|
|
|
return err;
|
2009-03-09 01:35:00 +00:00
|
|
|
}
|
|
|
|
}
|
2010-10-07 19:15:35 +00:00
|
|
|
gxf->flt_entries[gxf->flt_entries_nb++] = packet_start_offset;
|
2009-03-08 21:55:08 +00:00
|
|
|
gxf->nb_fields += 2; // count fields
|
2009-03-09 01:35:00 +00:00
|
|
|
}
|
2009-01-20 09:43:58 +00:00
|
|
|
|
2009-03-09 02:47:32 +00:00
|
|
|
updatePacketSize(pb, pos);
|
|
|
|
|
|
|
|
gxf->packet_count++;
|
|
|
|
if (gxf->packet_count == 100) {
|
2013-09-11 11:14:12 +00:00
|
|
|
if ((ret = gxf_write_map_packet(s, 0)) < 0)
|
|
|
|
return ret;
|
2009-03-09 02:47:32 +00:00
|
|
|
gxf->packet_count = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
2006-07-19 09:38:22 +00:00
|
|
|
}
|
|
|
|
|
2019-08-13 02:47:16 +00:00
|
|
|
static int gxf_compare_field_nb(AVFormatContext *s, const AVPacket *next,
|
|
|
|
const AVPacket *cur)
|
2009-03-09 01:10:28 +00:00
|
|
|
{
|
|
|
|
GXFContext *gxf = s->priv_data;
|
2019-08-13 02:47:16 +00:00
|
|
|
const AVPacket *pkt[2] = { cur, next };
|
2009-03-09 01:10:28 +00:00
|
|
|
int i, field_nb[2];
|
|
|
|
GXFStreamContext *sc[2];
|
|
|
|
|
|
|
|
for (i = 0; i < 2; i++) {
|
|
|
|
AVStream *st = s->streams[pkt[i]->stream_index];
|
|
|
|
sc[i] = st->priv_data;
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
|
2009-03-09 01:10:28 +00:00
|
|
|
field_nb[i] = av_rescale_rnd(pkt[i]->dts, gxf->time_base.den,
|
|
|
|
(int64_t)48000*gxf->time_base.num, AV_ROUND_UP);
|
|
|
|
field_nb[i] &= ~1; // compare against even field number because audio must be before video
|
|
|
|
} else
|
|
|
|
field_nb[i] = pkt[i]->dts; // dts are field based
|
|
|
|
}
|
|
|
|
|
|
|
|
return field_nb[1] > field_nb[0] ||
|
|
|
|
(field_nb[1] == field_nb[0] && sc[1]->order > sc[0]->order);
|
|
|
|
}
|
|
|
|
|
2006-07-19 09:38:22 +00:00
|
|
|
static int gxf_interleave_packet(AVFormatContext *s, AVPacket *out, AVPacket *pkt, int flush)
|
|
|
|
{
|
lavf: replace AVStream.codec with AVStream.codecpar
Currently, AVStream contains an embedded AVCodecContext instance, which
is used by demuxers to export stream parameters to the caller and by
muxers to receive stream parameters from the caller. It is also used
internally as the codec context that is passed to parsers.
In addition, it is also widely used by the callers as the decoding (when
demuxer) or encoding (when muxing) context, though this has been
officially discouraged since Libav 11.
There are multiple important problems with this approach:
- the fields in AVCodecContext are in general one of
* stream parameters
* codec options
* codec state
However, it's not clear which ones are which. It is consequently
unclear which fields are a demuxer allowed to set or a muxer allowed to
read. This leads to erratic behaviour depending on whether decoding or
encoding is being performed or not (and whether it uses the AVStream
embedded codec context).
- various synchronization issues arising from the fact that the same
context is used by several different APIs (muxers/demuxers,
parsers, bitstream filters and encoders/decoders) simultaneously, with
there being no clear rules for who can modify what and the different
processes being typically delayed with respect to each other.
- avformat_find_stream_info() making it necessary to support opening
and closing a single codec context multiple times, thus
complicating the semantics of freeing various allocated objects in the
codec context.
Those problems are resolved by replacing the AVStream embedded codec
context with a newly added AVCodecParameters instance, which stores only
the stream parameters exported by the demuxers or read by the muxers.
2014-06-18 18:42:52 +00:00
|
|
|
if (pkt && s->streams[pkt->stream_index]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
|
2009-03-09 01:10:28 +00:00
|
|
|
pkt->duration = 2; // enforce 2 fields
|
2009-02-09 21:34:20 +00:00
|
|
|
return ff_audio_rechunk_interleave(s, out, pkt, flush,
|
2012-03-15 12:37:43 +00:00
|
|
|
ff_interleave_packet_per_dts, gxf_compare_field_nb);
|
2006-07-19 09:38:22 +00:00
|
|
|
}
|
|
|
|
|
2011-01-25 22:03:28 +00:00
|
|
|
AVOutputFormat ff_gxf_muxer = {
|
2011-07-16 20:18:12 +00:00
|
|
|
.name = "gxf",
|
2012-07-24 01:23:48 +00:00
|
|
|
.long_name = NULL_IF_CONFIG_SMALL("GXF (General eXchange Format)"),
|
2011-07-16 20:18:12 +00:00
|
|
|
.extensions = "gxf",
|
|
|
|
.priv_data_size = sizeof(GXFContext),
|
2012-08-05 09:11:04 +00:00
|
|
|
.audio_codec = AV_CODEC_ID_PCM_S16LE,
|
|
|
|
.video_codec = AV_CODEC_ID_MPEG2VIDEO,
|
2011-07-16 20:18:12 +00:00
|
|
|
.write_header = gxf_write_header,
|
|
|
|
.write_packet = gxf_write_packet,
|
|
|
|
.write_trailer = gxf_write_trailer,
|
2020-01-26 10:27:39 +00:00
|
|
|
.deinit = gxf_deinit,
|
2011-07-16 20:18:12 +00:00
|
|
|
.interleave_packet = gxf_interleave_packet,
|
2006-07-19 09:38:22 +00:00
|
|
|
};
|