added MPEG2TS support in RTP, SDP and RTSP - replaced fake RTP demux by a specific API

Originally committed as revision 2448 to svn://svn.ffmpeg.org/ffmpeg/trunk
This commit is contained in:
Fabrice Bellard 2003-10-29 14:25:27 +00:00
parent da24c5e330
commit 8b1ab7bf21
3 changed files with 354 additions and 278 deletions

View File

@ -17,6 +17,7 @@
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/ */
#include "avformat.h" #include "avformat.h"
#include "mpegts.h"
#include <unistd.h> #include <unistd.h>
#include <sys/types.h> #include <sys/types.h>
@ -72,23 +73,9 @@ typedef enum {
RTCP_SDES_SOURCE = 11 RTCP_SDES_SOURCE = 11
} rtcp_sdes_type_t; } rtcp_sdes_type_t;
enum RTPPayloadType { struct RTPDemuxContext {
RTP_PT_ULAW = 0, AVFormatContext *ic;
RTP_PT_GSM = 3, AVStream *st;
RTP_PT_G723 = 4,
RTP_PT_ALAW = 8,
RTP_PT_S16BE_STEREO = 10,
RTP_PT_S16BE_MONO = 11,
RTP_PT_MPEGAUDIO = 14,
RTP_PT_JPEG = 26,
RTP_PT_H261 = 31,
RTP_PT_MPEGVIDEO = 32,
RTP_PT_MPEG2TS = 33,
RTP_PT_H263 = 34, /* old H263 encapsulation */
RTP_PT_PRIVATE = 96,
};
typedef struct RTPContext {
int payload_type; int payload_type;
uint32_t ssrc; uint32_t ssrc;
uint16_t seq; uint16_t seq;
@ -96,6 +83,10 @@ typedef struct RTPContext {
uint32_t base_timestamp; uint32_t base_timestamp;
uint32_t cur_timestamp; uint32_t cur_timestamp;
int max_payload_size; int max_payload_size;
MpegTSContext *ts; /* only used for RTP_PT_MPEG2TS payloads */
int read_buf_index;
int read_buf_size;
/* rtcp sender statistics receive */ /* rtcp sender statistics receive */
int64_t last_rtcp_ntp_time; int64_t last_rtcp_ntp_time;
int64_t first_rtcp_ntp_time; int64_t first_rtcp_ntp_time;
@ -108,40 +99,51 @@ typedef struct RTPContext {
/* buffer for output */ /* buffer for output */
uint8_t buf[RTP_MAX_PACKET_LENGTH]; uint8_t buf[RTP_MAX_PACKET_LENGTH];
uint8_t *buf_ptr; uint8_t *buf_ptr;
} RTPContext; };
int rtp_get_codec_info(AVCodecContext *codec, int payload_type) int rtp_get_codec_info(AVCodecContext *codec, int payload_type)
{ {
switch(payload_type) { switch(payload_type) {
case RTP_PT_ULAW: case RTP_PT_ULAW:
codec->codec_type = CODEC_TYPE_AUDIO;
codec->codec_id = CODEC_ID_PCM_MULAW; codec->codec_id = CODEC_ID_PCM_MULAW;
codec->channels = 1; codec->channels = 1;
codec->sample_rate = 8000; codec->sample_rate = 8000;
break; break;
case RTP_PT_ALAW: case RTP_PT_ALAW:
codec->codec_type = CODEC_TYPE_AUDIO;
codec->codec_id = CODEC_ID_PCM_ALAW; codec->codec_id = CODEC_ID_PCM_ALAW;
codec->channels = 1; codec->channels = 1;
codec->sample_rate = 8000; codec->sample_rate = 8000;
break; break;
case RTP_PT_S16BE_STEREO: case RTP_PT_S16BE_STEREO:
codec->codec_type = CODEC_TYPE_AUDIO;
codec->codec_id = CODEC_ID_PCM_S16BE; codec->codec_id = CODEC_ID_PCM_S16BE;
codec->channels = 2; codec->channels = 2;
codec->sample_rate = 44100; codec->sample_rate = 44100;
break; break;
case RTP_PT_S16BE_MONO: case RTP_PT_S16BE_MONO:
codec->codec_type = CODEC_TYPE_AUDIO;
codec->codec_id = CODEC_ID_PCM_S16BE; codec->codec_id = CODEC_ID_PCM_S16BE;
codec->channels = 1; codec->channels = 1;
codec->sample_rate = 44100; codec->sample_rate = 44100;
break; break;
case RTP_PT_MPEGAUDIO: case RTP_PT_MPEGAUDIO:
codec->codec_type = CODEC_TYPE_AUDIO;
codec->codec_id = CODEC_ID_MP2; codec->codec_id = CODEC_ID_MP2;
break; break;
case RTP_PT_JPEG: case RTP_PT_JPEG:
codec->codec_type = CODEC_TYPE_VIDEO;
codec->codec_id = CODEC_ID_MJPEG; codec->codec_id = CODEC_ID_MJPEG;
break; break;
case RTP_PT_MPEGVIDEO: case RTP_PT_MPEGVIDEO:
codec->codec_type = CODEC_TYPE_VIDEO;
codec->codec_id = CODEC_ID_MPEG1VIDEO; codec->codec_id = CODEC_ID_MPEG1VIDEO;
break; break;
case RTP_PT_MPEG2TS:
codec->codec_type = CODEC_TYPE_DATA;
codec->codec_id = CODEC_ID_MPEG2TS;
break;
default: default:
return -1; return -1;
} }
@ -179,6 +181,9 @@ int rtp_get_payload_type(AVCodecContext *codec)
case CODEC_ID_MPEG1VIDEO: case CODEC_ID_MPEG1VIDEO:
payload_type = RTP_PT_MPEGVIDEO; payload_type = RTP_PT_MPEGVIDEO;
break; break;
case CODEC_ID_MPEG2TS:
payload_type = RTP_PT_MPEG2TS;
break;
default: default:
break; break;
} }
@ -195,10 +200,8 @@ static inline uint64_t decode_be64(const uint8_t *p)
return ((uint64_t)decode_be32(p) << 32) | decode_be32(p + 4); return ((uint64_t)decode_be32(p) << 32) | decode_be32(p + 4);
} }
static int rtcp_parse_packet(AVFormatContext *s1, const unsigned char *buf, int len) static int rtcp_parse_packet(RTPDemuxContext *s, const unsigned char *buf, int len)
{ {
RTPContext *s = s1->priv_data;
if (buf[1] != 200) if (buf[1] != 200)
return -1; return -1;
s->last_rtcp_ntp_time = decode_be64(buf + 8); s->last_rtcp_ntp_time = decode_be64(buf + 8);
@ -209,30 +212,71 @@ static int rtcp_parse_packet(AVFormatContext *s1, const unsigned char *buf, int
} }
/** /**
* Parse an RTP packet directly sent as raw data. Can only be used if * open a new RTP parse context for stream 'st'. 'st' can be NULL for
* 'raw' is given as input file * MPEG2TS streams to indicate that they should be demuxed inside the
* @param s1 media file context * rtp demux (otherwise CODEC_ID_MPEG2TS packets are returned)
* @param pkt returned packet
* @param buf input buffer
* @param len buffer len
* @return zero if no error.
*/ */
int rtp_parse_packet(AVFormatContext *s1, AVPacket *pkt, RTPDemuxContext *rtp_parse_open(AVFormatContext *s1, AVStream *st, int payload_type)
const unsigned char *buf, int len) {
RTPDemuxContext *s;
s = av_mallocz(sizeof(RTPDemuxContext));
if (!s)
return NULL;
s->payload_type = payload_type;
s->last_rtcp_ntp_time = AV_NOPTS_VALUE;
s->first_rtcp_ntp_time = AV_NOPTS_VALUE;
s->ic = s1;
s->st = st;
if (payload_type == RTP_PT_MPEG2TS) {
s->ts = mpegts_parse_open(s->ic);
if (s->ts == NULL) {
av_free(s);
return NULL;
}
}
return s;
}
/**
* Parse an RTP or RTCP packet directly sent as a buffer.
* @param s RTP parse context.
* @param pkt returned packet
* @param buf input buffer or NULL to read the next packets
* @param len buffer len
* @return 0 if a packet is returned, 1 if a packet is returned and more can follow
* (use buf as NULL to read the next). -1 if no packet (error or no more packet).
*/
int rtp_parse_packet(RTPDemuxContext *s, AVPacket *pkt,
const uint8_t *buf, int len)
{ {
RTPContext *s = s1->priv_data;
unsigned int ssrc, h; unsigned int ssrc, h;
int payload_type, seq, delta_timestamp; int payload_type, seq, delta_timestamp, ret;
AVStream *st; AVStream *st;
uint32_t timestamp; uint32_t timestamp;
if (!buf) {
/* return the next packets, if any */
if (s->read_buf_index >= s->read_buf_size)
return -1;
ret = mpegts_parse_packet(s->ts, pkt, s->buf + s->read_buf_index,
s->read_buf_size - s->read_buf_index);
if (ret < 0)
return -1;
s->read_buf_index += ret;
if (s->read_buf_index < s->read_buf_size)
return 1;
else
return 0;
}
if (len < 12) if (len < 12)
return -1; return -1;
if ((buf[0] & 0xc0) != (RTP_VERSION << 6)) if ((buf[0] & 0xc0) != (RTP_VERSION << 6))
return -1; return -1;
if (buf[1] >= 200 && buf[1] <= 204) { if (buf[1] >= 200 && buf[1] <= 204) {
rtcp_parse_packet(s1, buf, len); rtcp_parse_packet(s, buf, len);
return -1; return -1;
} }
payload_type = buf[1] & 0x7f; payload_type = buf[1] & 0x7f;
@ -240,20 +284,6 @@ int rtp_parse_packet(AVFormatContext *s1, AVPacket *pkt,
timestamp = decode_be32(buf + 4); timestamp = decode_be32(buf + 4);
ssrc = decode_be32(buf + 8); ssrc = decode_be32(buf + 8);
if (s->payload_type < 0) {
s->payload_type = payload_type;
if (payload_type == RTP_PT_MPEG2TS) {
/* XXX: special case : not a single codec but a whole stream */
return -1;
} else {
st = av_new_stream(s1, 0);
if (!st)
return -1;
rtp_get_codec_info(&st->codec, payload_type);
}
}
/* NOTE: we can handle only one payload type */ /* NOTE: we can handle only one payload type */
if (s->payload_type != payload_type) if (s->payload_type != payload_type)
return -1; return -1;
@ -266,7 +296,20 @@ int rtp_parse_packet(AVFormatContext *s1, AVPacket *pkt,
#endif #endif
len -= 12; len -= 12;
buf += 12; buf += 12;
st = s1->streams[0];
st = s->st;
if (!st) {
/* specific MPEG2TS demux support */
ret = mpegts_parse_packet(s->ts, pkt, buf, len);
if (ret < 0)
return -1;
if (ret < len) {
s->read_buf_size = len - ret;
memcpy(s->buf, buf + ret, s->read_buf_size);
s->read_buf_index = 0;
return 1;
}
} else {
switch(st->codec.codec_id) { switch(st->codec.codec_id) {
case CODEC_ID_MP2: case CODEC_ID_MP2:
/* better than nothing: skip mpeg audio RTP header */ /* better than nothing: skip mpeg audio RTP header */
@ -319,54 +362,25 @@ int rtp_parse_packet(AVFormatContext *s1, AVPacket *pkt,
/* no timestamp info yet */ /* no timestamp info yet */
break; break;
} }
return 0; pkt->stream_index = s->st->index;
}
static int rtp_read_header(AVFormatContext *s1,
AVFormatParameters *ap)
{
RTPContext *s = s1->priv_data;
s->payload_type = -1;
s->last_rtcp_ntp_time = AV_NOPTS_VALUE;
s->first_rtcp_ntp_time = AV_NOPTS_VALUE;
return 0;
}
static int rtp_read_packet(AVFormatContext *s1, AVPacket *pkt)
{
char buf[RTP_MAX_PACKET_LENGTH];
int ret;
/* XXX: needs a better API for packet handling ? */
for(;;) {
ret = url_read(url_fileno(&s1->pb), buf, sizeof(buf));
if (ret < 0)
return AVERROR_IO;
if (rtp_parse_packet(s1, pkt, buf, ret) == 0)
break;
} }
return 0; return 0;
} }
static int rtp_read_close(AVFormatContext *s1) void rtp_parse_close(RTPDemuxContext *s)
{ {
// RTPContext *s = s1->priv_data; if (s->payload_type == RTP_PT_MPEG2TS) {
return 0; mpegts_parse_close(s->ts);
} }
av_free(s);
static int rtp_probe(AVProbeData *p)
{
if (strstart(p->filename, "rtp://", NULL))
return AVPROBE_SCORE_MAX;
return 0;
} }
/* rtp output */ /* rtp output */
static int rtp_write_header(AVFormatContext *s1) static int rtp_write_header(AVFormatContext *s1)
{ {
RTPContext *s = s1->priv_data; RTPDemuxContext *s = s1->priv_data;
int payload_type, max_packet_size; int payload_type, max_packet_size, n;
AVStream *st; AVStream *st;
if (s1->nb_streams != 1) if (s1->nb_streams != 1)
@ -397,6 +411,13 @@ static int rtp_write_header(AVFormatContext *s1)
case CODEC_ID_MPEG1VIDEO: case CODEC_ID_MPEG1VIDEO:
s->cur_timestamp = 0; s->cur_timestamp = 0;
break; break;
case CODEC_ID_MPEG2TS:
n = s->max_payload_size / TS_PACKET_SIZE;
if (n < 1)
n = 1;
s->max_payload_size = n * TS_PACKET_SIZE;
s->buf_ptr = s->buf;
break;
default: default:
s->buf_ptr = s->buf; s->buf_ptr = s->buf;
break; break;
@ -408,7 +429,7 @@ static int rtp_write_header(AVFormatContext *s1)
/* send an rtcp sender report packet */ /* send an rtcp sender report packet */
static void rtcp_send_sr(AVFormatContext *s1, int64_t ntp_time) static void rtcp_send_sr(AVFormatContext *s1, int64_t ntp_time)
{ {
RTPContext *s = s1->priv_data; RTPDemuxContext *s = s1->priv_data;
#if defined(DEBUG) #if defined(DEBUG)
printf("RTCP: %02x %Lx %x\n", s->payload_type, ntp_time, s->timestamp); printf("RTCP: %02x %Lx %x\n", s->payload_type, ntp_time, s->timestamp);
#endif #endif
@ -427,7 +448,7 @@ static void rtcp_send_sr(AVFormatContext *s1, int64_t ntp_time)
must update the timestamp itself */ must update the timestamp itself */
static void rtp_send_data(AVFormatContext *s1, const uint8_t *buf1, int len) static void rtp_send_data(AVFormatContext *s1, const uint8_t *buf1, int len)
{ {
RTPContext *s = s1->priv_data; RTPDemuxContext *s = s1->priv_data;
#ifdef DEBUG #ifdef DEBUG
printf("rtp_send_data size=%d\n", len); printf("rtp_send_data size=%d\n", len);
@ -453,7 +474,7 @@ static void rtp_send_data(AVFormatContext *s1, const uint8_t *buf1, int len)
static void rtp_send_samples(AVFormatContext *s1, static void rtp_send_samples(AVFormatContext *s1,
const uint8_t *buf1, int size, int sample_size) const uint8_t *buf1, int size, int sample_size)
{ {
RTPContext *s = s1->priv_data; RTPDemuxContext *s = s1->priv_data;
int len, max_packet_size, n; int len, max_packet_size, n;
max_packet_size = (s->max_payload_size / sample_size) * sample_size; max_packet_size = (s->max_payload_size / sample_size) * sample_size;
@ -486,7 +507,7 @@ static void rtp_send_samples(AVFormatContext *s1,
static void rtp_send_mpegaudio(AVFormatContext *s1, static void rtp_send_mpegaudio(AVFormatContext *s1,
const uint8_t *buf1, int size) const uint8_t *buf1, int size)
{ {
RTPContext *s = s1->priv_data; RTPDemuxContext *s = s1->priv_data;
AVStream *st = s1->streams[0]; AVStream *st = s1->streams[0];
int len, count, max_packet_size; int len, count, max_packet_size;
@ -542,7 +563,7 @@ static void rtp_send_mpegaudio(AVFormatContext *s1,
static void rtp_send_mpegvideo(AVFormatContext *s1, static void rtp_send_mpegvideo(AVFormatContext *s1,
const uint8_t *buf1, int size) const uint8_t *buf1, int size)
{ {
RTPContext *s = s1->priv_data; RTPDemuxContext *s = s1->priv_data;
AVStream *st = s1->streams[0]; AVStream *st = s1->streams[0];
int len, h, max_packet_size; int len, h, max_packet_size;
uint8_t *q; uint8_t *q;
@ -589,7 +610,7 @@ static void rtp_send_mpegvideo(AVFormatContext *s1,
static void rtp_send_raw(AVFormatContext *s1, static void rtp_send_raw(AVFormatContext *s1,
const uint8_t *buf1, int size) const uint8_t *buf1, int size)
{ {
RTPContext *s = s1->priv_data; RTPDemuxContext *s = s1->priv_data;
AVStream *st = s1->streams[0]; AVStream *st = s1->streams[0];
int len, max_packet_size; int len, max_packet_size;
@ -611,11 +632,35 @@ static void rtp_send_raw(AVFormatContext *s1,
s->cur_timestamp++; s->cur_timestamp++;
} }
/* NOTE: size is assumed to be an integer multiple of TS_PACKET_SIZE */
static void rtp_send_mpegts_raw(AVFormatContext *s1,
const uint8_t *buf1, int size)
{
RTPDemuxContext *s = s1->priv_data;
int len, out_len;
while (size >= TS_PACKET_SIZE) {
len = s->max_payload_size - (s->buf_ptr - s->buf);
if (len > size)
len = size;
memcpy(s->buf_ptr, buf1, len);
buf1 += len;
size -= len;
s->buf_ptr += len;
out_len = s->buf_ptr - s->buf;
if (out_len >= s->max_payload_size) {
rtp_send_data(s1, s->buf, out_len);
s->buf_ptr = s->buf;
}
}
}
/* write an RTP packet. 'buf1' must contain a single specific frame. */ /* write an RTP packet. 'buf1' must contain a single specific frame. */
static int rtp_write_packet(AVFormatContext *s1, int stream_index, static int rtp_write_packet(AVFormatContext *s1, int stream_index,
const uint8_t *buf1, int size, int64_t pts) const uint8_t *buf1, int size, int64_t pts)
{ {
RTPContext *s = s1->priv_data; RTPDemuxContext *s = s1->priv_data;
AVStream *st = s1->streams[0]; AVStream *st = s1->streams[0];
int rtcp_bytes; int rtcp_bytes;
int64_t ntp_time; int64_t ntp_time;
@ -656,6 +701,9 @@ static int rtp_write_packet(AVFormatContext *s1, int stream_index,
case CODEC_ID_MPEG1VIDEO: case CODEC_ID_MPEG1VIDEO:
rtp_send_mpegvideo(s1, buf1, size); rtp_send_mpegvideo(s1, buf1, size);
break; break;
case CODEC_ID_MPEG2TS:
rtp_send_mpegts_raw(s1, buf1, size);
break;
default: default:
/* better than nothing : send the codec raw data */ /* better than nothing : send the codec raw data */
rtp_send_raw(s1, buf1, size); rtp_send_raw(s1, buf1, size);
@ -666,27 +714,16 @@ static int rtp_write_packet(AVFormatContext *s1, int stream_index,
static int rtp_write_trailer(AVFormatContext *s1) static int rtp_write_trailer(AVFormatContext *s1)
{ {
// RTPContext *s = s1->priv_data; // RTPDemuxContext *s = s1->priv_data;
return 0; return 0;
} }
AVInputFormat rtp_demux = {
"rtp",
"RTP input format",
sizeof(RTPContext),
rtp_probe,
rtp_read_header,
rtp_read_packet,
rtp_read_close,
.flags = AVFMT_NOHEADER,
};
AVOutputFormat rtp_mux = { AVOutputFormat rtp_mux = {
"rtp", "rtp",
"RTP output format", "RTP output format",
NULL, NULL,
NULL, NULL,
sizeof(RTPContext), sizeof(RTPDemuxContext),
CODEC_ID_PCM_MULAW, CODEC_ID_PCM_MULAW,
CODEC_ID_NONE, CODEC_ID_NONE,
rtp_write_header, rtp_write_header,
@ -697,6 +734,5 @@ AVOutputFormat rtp_mux = {
int rtp_init(void) int rtp_init(void)
{ {
av_register_output_format(&rtp_mux); av_register_output_format(&rtp_mux);
av_register_input_format(&rtp_demux);
return 0; return 0;
} }

View File

@ -19,14 +19,35 @@
#ifndef RTP_H #ifndef RTP_H
#define RTP_H #define RTP_H
enum RTPPayloadType {
RTP_PT_ULAW = 0,
RTP_PT_GSM = 3,
RTP_PT_G723 = 4,
RTP_PT_ALAW = 8,
RTP_PT_S16BE_STEREO = 10,
RTP_PT_S16BE_MONO = 11,
RTP_PT_MPEGAUDIO = 14,
RTP_PT_JPEG = 26,
RTP_PT_H261 = 31,
RTP_PT_MPEGVIDEO = 32,
RTP_PT_MPEG2TS = 33,
RTP_PT_H263 = 34, /* old H263 encapsulation */
RTP_PT_PRIVATE = 96,
};
#define RTP_MIN_PACKET_LENGTH 12 #define RTP_MIN_PACKET_LENGTH 12
#define RTP_MAX_PACKET_LENGTH 1500 /* XXX: suppress this define */ #define RTP_MAX_PACKET_LENGTH 1500 /* XXX: suppress this define */
int rtp_init(void); int rtp_init(void);
int rtp_get_codec_info(AVCodecContext *codec, int payload_type); int rtp_get_codec_info(AVCodecContext *codec, int payload_type);
int rtp_get_payload_type(AVCodecContext *codec); int rtp_get_payload_type(AVCodecContext *codec);
int rtp_parse_packet(AVFormatContext *s1, AVPacket *pkt,
const unsigned char *buf, int len); typedef struct RTPDemuxContext RTPDemuxContext;
RTPDemuxContext *rtp_parse_open(AVFormatContext *s1, AVStream *st, int payload_type);
int rtp_parse_packet(RTPDemuxContext *s, AVPacket *pkt,
const uint8_t *buf, int len);
void rtp_parse_close(RTPDemuxContext *s);
extern AVOutputFormat rtp_mux; extern AVOutputFormat rtp_mux;
extern AVInputFormat rtp_demux; extern AVInputFormat rtp_demux;

View File

@ -33,16 +33,23 @@
typedef struct RTSPState { typedef struct RTSPState {
URLContext *rtsp_hd; /* RTSP TCP connexion handle */ URLContext *rtsp_hd; /* RTSP TCP connexion handle */
int nb_rtsp_streams;
struct RTSPStream **rtsp_streams;
/* XXX: currently we use unbuffered input */ /* XXX: currently we use unbuffered input */
// ByteIOContext rtsp_gb; // ByteIOContext rtsp_gb;
int seq; /* RTSP command sequence number */ int seq; /* RTSP command sequence number */
char session_id[512]; char session_id[512];
enum RTSPProtocol protocol; enum RTSPProtocol protocol;
char last_reply[2048]; /* XXX: allocate ? */ char last_reply[2048]; /* XXX: allocate ? */
RTPDemuxContext *cur_rtp;
} RTSPState; } RTSPState;
typedef struct RTSPStream { typedef struct RTSPStream {
AVFormatContext *ic; URLContext *rtp_handle; /* RTP stream handle */
RTPDemuxContext *rtp_ctx; /* RTP parse context */
int stream_index; /* corresponding stream index, if any. -1 if none (MPEG2TS case) */
int interleaved_min, interleaved_max; /* interleave ids, if TCP transport */ int interleaved_min, interleaved_max; /* interleave ids, if TCP transport */
char control_url[1024]; /* url for this stream (from SDP) */ char control_url[1024]; /* url for this stream (from SDP) */
@ -218,6 +225,7 @@ typedef struct SDPParseState {
static void sdp_parse_line(AVFormatContext *s, SDPParseState *s1, static void sdp_parse_line(AVFormatContext *s, SDPParseState *s1,
int letter, const char *buf) int letter, const char *buf)
{ {
RTSPState *rt = s->priv_data;
char buf1[64], st_type[64]; char buf1[64], st_type[64];
const char *p; const char *p;
int codec_type, payload_type, i; int codec_type, payload_type, i;
@ -280,16 +288,12 @@ static void sdp_parse_line(AVFormatContext *s, SDPParseState *s1,
rtsp_st = av_mallocz(sizeof(RTSPStream)); rtsp_st = av_mallocz(sizeof(RTSPStream));
if (!rtsp_st) if (!rtsp_st)
return; return;
st = av_new_stream(s, s->nb_streams); rtsp_st->stream_index = -1;
if (!st) dynarray_add(&rt->rtsp_streams, &rt->nb_rtsp_streams, rtsp_st);
return;
st->priv_data = rtsp_st;
rtsp_st->sdp_ip = s1->default_ip; rtsp_st->sdp_ip = s1->default_ip;
rtsp_st->sdp_ttl = s1->default_ttl; rtsp_st->sdp_ttl = s1->default_ttl;
st->codec.codec_type = codec_type;
get_word(buf1, sizeof(buf1), &p); /* port */ get_word(buf1, sizeof(buf1), &p); /* port */
rtsp_st->sdp_port = atoi(buf1); rtsp_st->sdp_port = atoi(buf1);
@ -298,11 +302,21 @@ static void sdp_parse_line(AVFormatContext *s, SDPParseState *s1,
/* XXX: handle list of formats */ /* XXX: handle list of formats */
get_word(buf1, sizeof(buf1), &p); /* format list */ get_word(buf1, sizeof(buf1), &p); /* format list */
rtsp_st->sdp_payload_type = atoi(buf1); rtsp_st->sdp_payload_type = atoi(buf1);
if (rtsp_st->sdp_payload_type == RTP_PT_MPEG2TS) {
/* no corresponding stream */
} else {
st = av_new_stream(s, 0);
if (!st)
return;
st->priv_data = rtsp_st;
rtsp_st->stream_index = st->index;
st->codec.codec_type = codec_type;
if (rtsp_st->sdp_payload_type < 96) { if (rtsp_st->sdp_payload_type < 96) {
/* if standard payload type, we can find the codec right now */ /* if standard payload type, we can find the codec right now */
rtp_get_codec_info(&st->codec, rtsp_st->sdp_payload_type); rtp_get_codec_info(&st->codec, rtsp_st->sdp_payload_type);
} }
}
/* put a default control url */ /* put a default control url */
pstrcpy(rtsp_st->control_url, sizeof(rtsp_st->control_url), s->filename); pstrcpy(rtsp_st->control_url, sizeof(rtsp_st->control_url), s->filename);
break; break;
@ -629,6 +643,25 @@ void rtsp_set_callback(FFRTSPCallback *rtsp_cb)
} }
/* close and free RTSP streams */
static void rtsp_close_streams(RTSPState *rt)
{
int i;
RTSPStream *rtsp_st;
for(i=0;i<rt->nb_rtsp_streams;i++) {
rtsp_st = rt->rtsp_streams[i];
if (rtsp_st) {
if (rtsp_st->rtp_ctx)
rtp_parse_close(rtsp_st->rtp_ctx);
if (rtsp_st->rtp_handle)
url_close(rtsp_st->rtp_handle);
}
av_free(rtsp_st);
}
av_free(rt->rtsp_streams);
}
static int rtsp_read_header(AVFormatContext *s, static int rtsp_read_header(AVFormatContext *s,
AVFormatParameters *ap) AVFormatParameters *ap)
{ {
@ -638,9 +671,9 @@ static int rtsp_read_header(AVFormatContext *s,
int port, i, ret, err; int port, i, ret, err;
RTSPHeader reply1, *reply = &reply1; RTSPHeader reply1, *reply = &reply1;
unsigned char *content = NULL; unsigned char *content = NULL;
AVStream *st;
RTSPStream *rtsp_st; RTSPStream *rtsp_st;
int protocol_mask; int protocol_mask;
AVStream *st;
/* extract hostname and port */ /* extract hostname and port */
url_split(NULL, 0, url_split(NULL, 0,
@ -683,12 +716,10 @@ static int rtsp_read_header(AVFormatContext *s,
/* for each stream, make the setup request */ /* for each stream, make the setup request */
/* XXX: we assume the same server is used for the control of each /* XXX: we assume the same server is used for the control of each
RTSP stream */ RTSP stream */
for(i=0;i<s->nb_streams;i++) { for(i=0;i<rt->nb_rtsp_streams;i++) {
char transport[2048]; char transport[2048];
AVInputFormat *fmt;
st = s->streams[i]; rtsp_st = rt->rtsp_streams[i];
rtsp_st = st->priv_data;
/* compute available transports */ /* compute available transports */
transport[0] = '\0'; transport[0] = '\0';
@ -702,21 +733,19 @@ static int rtsp_read_header(AVFormatContext *s,
if (rtsp_rtp_port_min != 0) { if (rtsp_rtp_port_min != 0) {
for(j=rtsp_rtp_port_min;j<=rtsp_rtp_port_max;j++) { for(j=rtsp_rtp_port_min;j<=rtsp_rtp_port_max;j++) {
snprintf(buf, sizeof(buf), "rtp://?localport=%d", j); snprintf(buf, sizeof(buf), "rtp://?localport=%d", j);
if (!av_open_input_file(&rtsp_st->ic, buf, if (url_open(&rtsp_st->rtp_handle, buf, URL_RDONLY) == 0)
&rtp_demux, 0, NULL))
goto rtp_opened; goto rtp_opened;
} }
} }
/* then try on any port */ /* then try on any port */
if (av_open_input_file(&rtsp_st->ic, "rtp://", if (url_open(&rtsp_st->rtp_handle, "rtp://", URL_RDONLY) < 0) {
&rtp_demux, 0, NULL) < 0) {
err = AVERROR_INVALIDDATA; err = AVERROR_INVALIDDATA;
goto fail; goto fail;
} }
rtp_opened: rtp_opened:
port = rtp_get_local_port(url_fileno(&rtsp_st->ic->pb)); port = rtp_get_local_port(rtsp_st->rtp_handle);
if (transport[0] != '\0') if (transport[0] != '\0')
pstrcat(transport, sizeof(transport), ","); pstrcat(transport, sizeof(transport), ",");
snprintf(transport + strlen(transport), sizeof(transport) - strlen(transport) - 1, snprintf(transport + strlen(transport), sizeof(transport) - strlen(transport) - 1,
@ -763,17 +792,12 @@ static int rtsp_read_header(AVFormatContext *s,
/* close RTP connection if not choosen */ /* close RTP connection if not choosen */
if (reply->transports[0].protocol != RTSP_PROTOCOL_RTP_UDP && if (reply->transports[0].protocol != RTSP_PROTOCOL_RTP_UDP &&
(protocol_mask & (1 << RTSP_PROTOCOL_RTP_UDP))) { (protocol_mask & (1 << RTSP_PROTOCOL_RTP_UDP))) {
av_close_input_file(rtsp_st->ic); url_close(rtsp_st->rtp_handle);
rtsp_st->ic = NULL; rtsp_st->rtp_handle = NULL;
} }
switch(reply->transports[0].protocol) { switch(reply->transports[0].protocol) {
case RTSP_PROTOCOL_RTP_TCP: case RTSP_PROTOCOL_RTP_TCP:
fmt = &rtp_demux;
if (av_open_input_file(&rtsp_st->ic, "null", fmt, 0, NULL) < 0) {
err = AVERROR_INVALIDDATA;
goto fail;
}
rtsp_st->interleaved_min = reply->transports[0].interleaved_min; rtsp_st->interleaved_min = reply->transports[0].interleaved_min;
rtsp_st->interleaved_max = reply->transports[0].interleaved_max; rtsp_st->interleaved_max = reply->transports[0].interleaved_max;
break; break;
@ -785,7 +809,7 @@ static int rtsp_read_header(AVFormatContext *s,
/* XXX: also use address if specified */ /* XXX: also use address if specified */
snprintf(url, sizeof(url), "rtp://%s:%d", snprintf(url, sizeof(url), "rtp://%s:%d",
host, reply->transports[0].server_port_min); host, reply->transports[0].server_port_min);
if (rtp_set_remote_url(url_fileno(&rtsp_st->ic->pb), url) < 0) { if (rtp_set_remote_url(rtsp_st->rtp_handle, url) < 0) {
err = AVERROR_INVALIDDATA; err = AVERROR_INVALIDDATA;
goto fail; goto fail;
} }
@ -796,7 +820,6 @@ static int rtsp_read_header(AVFormatContext *s,
char url[1024]; char url[1024];
int ttl; int ttl;
fmt = &rtp_demux;
ttl = reply->transports[0].ttl; ttl = reply->transports[0].ttl;
if (!ttl) if (!ttl)
ttl = 16; ttl = 16;
@ -804,13 +827,24 @@ static int rtsp_read_header(AVFormatContext *s,
host, host,
reply->transports[0].server_port_min, reply->transports[0].server_port_min,
ttl); ttl);
if (av_open_input_file(&rtsp_st->ic, url, fmt, 0, NULL) < 0) { if (url_open(&rtsp_st->rtp_handle, url, URL_RDONLY) < 0) {
err = AVERROR_INVALIDDATA; err = AVERROR_INVALIDDATA;
goto fail; goto fail;
} }
} }
break; break;
} }
/* open the RTP context */
st = NULL;
if (rtsp_st->stream_index >= 0)
st = s->streams[rtsp_st->stream_index];
if (!st)
s->ctx_flags |= AVFMTCTX_NOHEADER;
rtsp_st->rtp_ctx = rtp_parse_open(s, st, rtsp_st->sdp_payload_type);
if (!rtsp_st->rtp_ctx) {
err = AVERROR_NOMEM;
goto fail;
}
} }
/* use callback if available to extend setup */ /* use callback if available to extend setup */
@ -845,28 +879,18 @@ static int rtsp_read_header(AVFormatContext *s,
return 0; return 0;
fail: fail:
for(i=0;i<s->nb_streams;i++) { rtsp_close_streams(rt);
st = s->streams[i];
rtsp_st = st->priv_data;
if (rtsp_st) {
if (rtsp_st->ic)
av_close_input_file(rtsp_st->ic);
}
av_free(rtsp_st);
}
av_freep(&content); av_freep(&content);
url_close(rt->rtsp_hd); url_close(rt->rtsp_hd);
return err; return err;
} }
static int tcp_read_packet(AVFormatContext *s, static int tcp_read_packet(AVFormatContext *s, RTSPStream **prtsp_st,
AVPacket *pkt) uint8_t *buf, int buf_size)
{ {
RTSPState *rt = s->priv_data; RTSPState *rt = s->priv_data;
int id, len, i, ret; int id, len, i, ret;
AVStream *st;
RTSPStream *rtsp_st; RTSPStream *rtsp_st;
uint8_t buf[RTP_MAX_PACKET_LENGTH];
#ifdef DEBUG_RTP_TCP #ifdef DEBUG_RTP_TCP
printf("tcp_read_packet:\n"); printf("tcp_read_packet:\n");
@ -878,84 +902,71 @@ static int tcp_read_packet(AVFormatContext *s,
printf("ret=%d c=%02x [%c]\n", ret, buf[0], buf[0]); printf("ret=%d c=%02x [%c]\n", ret, buf[0], buf[0]);
#endif #endif
if (ret != 1) if (ret != 1)
return AVERROR_IO; return -1;
if (buf[0] == '$') if (buf[0] == '$')
break; break;
} }
ret = url_read(rt->rtsp_hd, buf, 3); ret = url_read(rt->rtsp_hd, buf, 3);
if (ret != 3) if (ret != 3)
return AVERROR_IO; return -1;
id = buf[0]; id = buf[0];
len = (buf[1] << 8) | buf[2]; len = (buf[1] << 8) | buf[2];
#ifdef DEBUG_RTP_TCP #ifdef DEBUG_RTP_TCP
printf("id=%d len=%d\n", id, len); printf("id=%d len=%d\n", id, len);
#endif #endif
if (len > RTP_MAX_PACKET_LENGTH || len < 12) if (len > buf_size || len < 12)
goto redo; goto redo;
/* get the data */ /* get the data */
ret = url_read(rt->rtsp_hd, buf, len); ret = url_read(rt->rtsp_hd, buf, len);
if (ret != len) if (ret != len)
return AVERROR_IO; return -1;
/* find the matching stream */ /* find the matching stream */
for(i = 0; i < s->nb_streams; i++) { for(i = 0; i < rt->nb_rtsp_streams; i++) {
st = s->streams[i]; rtsp_st = rt->rtsp_streams[i];
rtsp_st = st->priv_data;
if (id >= rtsp_st->interleaved_min && if (id >= rtsp_st->interleaved_min &&
id <= rtsp_st->interleaved_max) id <= rtsp_st->interleaved_max)
goto found; goto found;
} }
goto redo; goto redo;
found: found:
ret = rtp_parse_packet(rtsp_st->ic, pkt, buf, len); *prtsp_st = rtsp_st;
if (ret < 0) return len;
goto redo;
pkt->stream_index = i;
return ret;
} }
/* NOTE: output one packet at a time. May need to add a small fifo */ static int udp_read_packet(AVFormatContext *s, RTSPStream **prtsp_st,
static int udp_read_packet(AVFormatContext *s, uint8_t *buf, int buf_size)
AVPacket *pkt)
{ {
AVFormatContext *ic; RTSPState *rt = s->priv_data;
AVStream *st;
RTSPStream *rtsp_st; RTSPStream *rtsp_st;
fd_set rfds; fd_set rfds;
int fd1, fd2, fd_max, n, i, ret; int fd1, fd2, fd_max, n, i, ret;
char buf[RTP_MAX_PACKET_LENGTH];
struct timeval tv; struct timeval tv;
for(;;) { for(;;) {
if (url_interrupt_cb()) if (url_interrupt_cb())
return -EIO; return -1;
FD_ZERO(&rfds); FD_ZERO(&rfds);
fd_max = -1; fd_max = -1;
for(i = 0; i < s->nb_streams; i++) { for(i = 0; i < rt->nb_rtsp_streams; i++) {
st = s->streams[i]; rtsp_st = rt->rtsp_streams[i];
rtsp_st = st->priv_data;
ic = rtsp_st->ic;
/* currently, we cannot probe RTCP handle because of blocking restrictions */ /* currently, we cannot probe RTCP handle because of blocking restrictions */
rtp_get_file_handles(url_fileno(&ic->pb), &fd1, &fd2); rtp_get_file_handles(rtsp_st->rtp_handle, &fd1, &fd2);
if (fd1 > fd_max) if (fd1 > fd_max)
fd_max = fd1; fd_max = fd1;
FD_SET(fd1, &rfds); FD_SET(fd1, &rfds);
} }
/* XXX: also add proper API to abort */
tv.tv_sec = 0; tv.tv_sec = 0;
tv.tv_usec = 100 * 1000; tv.tv_usec = 100 * 1000;
n = select(fd_max + 1, &rfds, NULL, NULL, &tv); n = select(fd_max + 1, &rfds, NULL, NULL, &tv);
if (n > 0) { if (n > 0) {
for(i = 0; i < s->nb_streams; i++) { for(i = 0; i < rt->nb_rtsp_streams; i++) {
st = s->streams[i]; rtsp_st = rt->rtsp_streams[i];
rtsp_st = st->priv_data; rtp_get_file_handles(rtsp_st->rtp_handle, &fd1, &fd2);
ic = rtsp_st->ic;
rtp_get_file_handles(url_fileno(&ic->pb), &fd1, &fd2);
if (FD_ISSET(fd1, &rfds)) { if (FD_ISSET(fd1, &rfds)) {
ret = url_read(url_fileno(&ic->pb), buf, sizeof(buf)); ret = url_read(rtsp_st->rtp_handle, buf, buf_size);
if (ret >= 0 && if (ret > 0) {
rtp_parse_packet(ic, pkt, buf, ret) == 0) { *prtsp_st = rtsp_st;
pkt->stream_index = i;
return ret; return ret;
} }
} }
@ -968,18 +979,45 @@ static int rtsp_read_packet(AVFormatContext *s,
AVPacket *pkt) AVPacket *pkt)
{ {
RTSPState *rt = s->priv_data; RTSPState *rt = s->priv_data;
int ret; RTSPStream *rtsp_st;
int ret, len;
uint8_t buf[RTP_MAX_PACKET_LENGTH];
/* get next frames from the same RTP packet */
if (rt->cur_rtp) {
ret = rtp_parse_packet(rt->cur_rtp, pkt, NULL, 0);
if (ret == 0) {
rt->cur_rtp = NULL;
return 0;
} else if (ret == 1) {
return 0;
} else {
rt->cur_rtp = NULL;
}
}
/* read next RTP packet */
redo:
switch(rt->protocol) { switch(rt->protocol) {
default: default:
case RTSP_PROTOCOL_RTP_TCP: case RTSP_PROTOCOL_RTP_TCP:
ret = tcp_read_packet(s, pkt); len = tcp_read_packet(s, &rtsp_st, buf, sizeof(buf));
break; break;
case RTSP_PROTOCOL_RTP_UDP: case RTSP_PROTOCOL_RTP_UDP:
ret = udp_read_packet(s, pkt); case RTSP_PROTOCOL_RTP_UDP_MULTICAST:
len = udp_read_packet(s, &rtsp_st, buf, sizeof(buf));
break; break;
} }
return ret; if (len < 0)
return AVERROR_IO;
ret = rtp_parse_packet(rtsp_st->rtp_ctx, pkt, buf, len);
if (ret < 0)
goto redo;
if (ret == 1) {
/* more packets may follow, so we save the RTP context */
rt->cur_rtp = rtsp_st->rtp_ctx;
}
return 0;
} }
/* pause the stream */ /* pause the stream */
@ -1031,10 +1069,7 @@ int rtsp_resume(AVFormatContext *s)
static int rtsp_read_close(AVFormatContext *s) static int rtsp_read_close(AVFormatContext *s)
{ {
RTSPState *rt = s->priv_data; RTSPState *rt = s->priv_data;
AVStream *st;
RTSPStream *rtsp_st;
RTSPHeader reply1, *reply = &reply1; RTSPHeader reply1, *reply = &reply1;
int i;
char cmd[1024]; char cmd[1024];
#if 0 #if 0
@ -1053,15 +1088,7 @@ static int rtsp_read_close(AVFormatContext *s)
NULL, 0, NULL); NULL, 0, NULL);
} }
for(i=0;i<s->nb_streams;i++) { rtsp_close_streams(rt);
st = s->streams[i];
rtsp_st = st->priv_data;
if (rtsp_st) {
if (rtsp_st->ic)
av_close_input_file(rtsp_st->ic);
}
av_free(rtsp_st);
}
url_close(rt->rtsp_hd); url_close(rt->rtsp_hd);
return 0; return 0;
} }
@ -1101,11 +1128,12 @@ static int sdp_probe(AVProbeData *p1)
static int sdp_read_header(AVFormatContext *s, static int sdp_read_header(AVFormatContext *s,
AVFormatParameters *ap) AVFormatParameters *ap)
{ {
AVStream *st; RTSPState *rt = s->priv_data;
RTSPStream *rtsp_st; RTSPStream *rtsp_st;
int size, i, err; int size, i, err;
char *content; char *content;
char url[1024]; char url[1024];
AVStream *st;
/* read the whole sdp file */ /* read the whole sdp file */
/* XXX: better loading */ /* XXX: better loading */
@ -1121,54 +1149,45 @@ static int sdp_read_header(AVFormatContext *s,
av_free(content); av_free(content);
/* open each RTP stream */ /* open each RTP stream */
for(i=0;i<s->nb_streams;i++) { for(i=0;i<rt->nb_rtsp_streams;i++) {
st = s->streams[i]; rtsp_st = rt->rtsp_streams[i];
rtsp_st = st->priv_data;
snprintf(url, sizeof(url), "rtp://%s:%d?multicast=1&ttl=%d", snprintf(url, sizeof(url), "rtp://%s:%d?multicast=1&ttl=%d",
inet_ntoa(rtsp_st->sdp_ip), inet_ntoa(rtsp_st->sdp_ip),
rtsp_st->sdp_port, rtsp_st->sdp_port,
rtsp_st->sdp_ttl); rtsp_st->sdp_ttl);
if (av_open_input_file(&rtsp_st->ic, url, &rtp_demux, 0, NULL) < 0) { if (url_open(&rtsp_st->rtp_handle, url, URL_RDONLY) < 0) {
err = AVERROR_INVALIDDATA; err = AVERROR_INVALIDDATA;
goto fail; goto fail;
} }
/* open the RTP context */
st = NULL;
if (rtsp_st->stream_index >= 0)
st = s->streams[rtsp_st->stream_index];
if (!st)
s->ctx_flags |= AVFMTCTX_NOHEADER;
rtsp_st->rtp_ctx = rtp_parse_open(s, st, rtsp_st->sdp_payload_type);
if (!rtsp_st->rtp_ctx) {
err = AVERROR_NOMEM;
goto fail;
}
} }
return 0; return 0;
fail: fail:
for(i=0;i<s->nb_streams;i++) { rtsp_close_streams(rt);
st = s->streams[i];
rtsp_st = st->priv_data;
if (rtsp_st) {
if (rtsp_st->ic)
av_close_input_file(rtsp_st->ic);
}
av_free(rtsp_st);
}
return err; return err;
} }
static int sdp_read_packet(AVFormatContext *s, static int sdp_read_packet(AVFormatContext *s,
AVPacket *pkt) AVPacket *pkt)
{ {
return udp_read_packet(s, pkt); return rtsp_read_packet(s, pkt);
} }
static int sdp_read_close(AVFormatContext *s) static int sdp_read_close(AVFormatContext *s)
{ {
AVStream *st; RTSPState *rt = s->priv_data;
RTSPStream *rtsp_st; rtsp_close_streams(rt);
int i;
for(i=0;i<s->nb_streams;i++) {
st = s->streams[i];
rtsp_st = st->priv_data;
if (rtsp_st) {
if (rtsp_st->ic)
av_close_input_file(rtsp_st->ic);
}
av_free(rtsp_st);
}
return 0; return 0;
} }