changed audio and video grab interface (simpler now)

Originally committed as revision 148 to svn://svn.ffmpeg.org/ffmpeg/trunk
This commit is contained in:
Fabrice Bellard 2001-09-24 23:27:06 +00:00
parent 46a3d0685d
commit 4972b26f24
3 changed files with 291 additions and 205 deletions

View File

@ -30,87 +30,28 @@
const char *audio_device = "/dev/dsp";
typedef struct {
int fd;
int rate;
int channels;
} AudioData;
#define AUDIO_BLOCK_SIZE 4096
/* audio read support */
typedef struct {
int fd;
int sample_rate;
int channels;
int frame_size; /* in bytes ! */
int codec_id;
UINT8 buffer[AUDIO_BLOCK_SIZE];
int buffer_ptr;
} AudioData;
static int audio_read(URLContext *h, UINT8 *buf, int size)
static int audio_open(AudioData *s, int is_output)
{
AudioData *s = h->priv_data;
int ret;
ret = read(s->fd, buf, size);
if (ret < 0)
return -errno;
else
return ret;
}
static int audio_write(URLContext *h, UINT8 *buf, int size)
{
AudioData *s = h->priv_data;
int ret;
ret = write(s->fd, buf, size);
if (ret < 0)
return -errno;
else
return ret;
}
static int audio_get_format(URLContext *h, URLFormat *f)
{
AudioData *s = h->priv_data;
strcpy(f->format_name, "pcm");
f->sample_rate = s->rate;
f->channels = s->channels;
return 0;
}
/* URI syntax: 'audio:[rate[,channels]]'
default: rate=44100, channels=2
*/
static int audio_open(URLContext *h, const char *uri, int flags)
{
AudioData *s;
const char *p;
int freq, channels, audio_fd;
int audio_fd;
int tmp, err;
h->is_streamed = 1;
h->packet_size = AUDIO_BLOCK_SIZE;
s = malloc(sizeof(AudioData));
if (!s)
return -ENOMEM;
h->priv_data = s;
/* extract parameters */
p = uri;
strstart(p, "audio:", &p);
freq = strtol(p, (char **)&p, 0);
if (freq <= 0)
freq = 44100;
if (*p == ',')
p++;
channels = strtol(p, (char **)&p, 0);
if (channels <= 0)
channels = 2;
s->rate = freq;
s->channels = channels;
/* open linux audio device */
if (flags & URL_WRONLY)
audio_fd = open(audio_device,O_WRONLY);
if (is_output)
audio_fd = open(audio_device, O_WRONLY);
else
audio_fd = open(audio_device,O_RDONLY);
audio_fd = open(audio_device, O_RDONLY);
if (audio_fd < 0) {
perror(audio_device);
return -EIO;
@ -119,60 +60,233 @@ static int audio_open(URLContext *h, const char *uri, int flags)
/* non blocking mode */
fcntl(audio_fd, F_SETFL, O_NONBLOCK);
s->frame_size = AUDIO_BLOCK_SIZE;
#if 0
tmp=(NB_FRAGMENTS << 16) | FRAGMENT_BITS;
err=ioctl(audio_fd, SNDCTL_DSP_SETFRAGMENT, &tmp);
tmp = (NB_FRAGMENTS << 16) | FRAGMENT_BITS;
err = ioctl(audio_fd, SNDCTL_DSP_SETFRAGMENT, &tmp);
if (err < 0) {
perror("SNDCTL_DSP_SETFRAGMENT");
}
#endif
tmp=AFMT_S16_LE;
err=ioctl(audio_fd,SNDCTL_DSP_SETFMT,&tmp);
/* select format : favour native format */
err = ioctl(audio_fd, SNDCTL_DSP_GETFMTS, &tmp);
#ifdef WORDS_BIGENDIAN
if (tmp & AFMT_S16_BE) {
tmp = AFMT_S16_BE;
} else if (tmp & AFMT_S16_LE) {
tmp = AFMT_S16_LE;
} else {
tmp = 0;
}
#else
if (tmp & AFMT_S16_LE) {
tmp = AFMT_S16_LE;
} else if (tmp & AFMT_S16_BE) {
tmp = AFMT_S16_BE;
} else {
tmp = 0;
}
#endif
switch(tmp) {
case AFMT_S16_LE:
s->codec_id = CODEC_ID_PCM_S16LE;
break;
case AFMT_S16_BE:
s->codec_id = CODEC_ID_PCM_S16BE;
break;
default:
fprintf(stderr, "Soundcard does not support 16 bit sample format\n");
close(audio_fd);
return -EIO;
}
err=ioctl(audio_fd, SNDCTL_DSP_SETFMT, &tmp);
if (err < 0) {
perror("SNDCTL_DSP_SETFMT");
goto fail;
}
tmp= (channels == 2);
err=ioctl(audio_fd,SNDCTL_DSP_STEREO,&tmp);
tmp = (s->channels == 2);
err = ioctl(audio_fd, SNDCTL_DSP_STEREO, &tmp);
if (err < 0) {
perror("SNDCTL_DSP_STEREO");
goto fail;
}
tmp = freq;
err=ioctl(audio_fd, SNDCTL_DSP_SPEED, &tmp);
tmp = s->sample_rate;
err = ioctl(audio_fd, SNDCTL_DSP_SPEED, &tmp);
if (err < 0) {
perror("SNDCTL_DSP_SPEED");
goto fail;
}
s->rate = tmp;
s->sample_rate = tmp; /* store real sample rate */
s->fd = audio_fd;
return 0;
fail:
close(audio_fd);
free(s);
return -EIO;
}
static int audio_close(URLContext *h)
static int audio_close(AudioData *s)
{
AudioData *s = h->priv_data;
close(s->fd);
return 0;
}
/* sound output support */
static int audio_write_header(AVFormatContext *s1)
{
AudioData *s;
AVStream *st;
int ret;
s = av_mallocz(sizeof(AudioData));
if (!s)
return -ENOMEM;
s1->priv_data = s;
st = s1->streams[0];
s->sample_rate = st->codec.sample_rate;
s->channels = st->codec.channels;
ret = audio_open(s, 1);
if (ret < 0) {
free(s);
return -EIO;
} else {
return 0;
}
}
static int audio_write_packet(AVFormatContext *s1, int stream_index,
UINT8 *buf, int size)
{
AudioData *s = s1->priv_data;
int len, ret;
while (size > 0) {
len = AUDIO_BLOCK_SIZE - s->buffer_ptr;
if (len > size)
len = size;
memcpy(s->buffer + s->buffer_ptr, buf, len);
s->buffer_ptr += len;
if (s->buffer_ptr >= AUDIO_BLOCK_SIZE) {
for(;;) {
ret = write(s->fd, s->buffer, AUDIO_BLOCK_SIZE);
if (ret != 0)
break;
if (ret < 0 && (errno != EAGAIN && errno != EINTR))
return -EIO;
}
s->buffer_ptr = 0;
}
buf += len;
size -= len;
}
return 0;
}
static int audio_write_trailer(AVFormatContext *s1)
{
AudioData *s = s1->priv_data;
audio_close(s);
free(s);
return 0;
}
URLProtocol audio_protocol = {
"audio",
audio_open,
audio_read,
audio_write,
NULL, /* seek */
audio_close,
audio_get_format,
/* grab support */
static int audio_read_header(AVFormatContext *s1, AVFormatParameters *ap)
{
AudioData *s;
AVStream *st;
int ret;
if (!ap || ap->sample_rate <= 0 || ap->channels <= 0)
return -1;
s = av_mallocz(sizeof(AudioData));
if (!s)
return -ENOMEM;
st = av_mallocz(sizeof(AVStream));
if (!st) {
free(s);
return -ENOMEM;
}
s1->priv_data = s;
s1->nb_streams = 1;
s1->streams[0] = st;
s->sample_rate = ap->sample_rate;
s->channels = ap->channels;
ret = audio_open(s, 0);
if (ret < 0) {
free(st);
free(s);
return -EIO;
} else {
/* take real parameters */
st->codec.codec_type = CODEC_TYPE_AUDIO;
st->codec.codec_id = s->codec_id;
st->codec.sample_rate = s->sample_rate;
st->codec.channels = s->channels;
return 0;
}
}
static int audio_read_packet(AVFormatContext *s1, AVPacket *pkt)
{
AudioData *s = s1->priv_data;
int ret;
if (av_new_packet(pkt, s->frame_size) < 0)
return -EIO;
for(;;) {
ret = read(s->fd, pkt->data, pkt->size);
if (ret > 0)
break;
if (!(ret == 0 || (ret == -1 && (errno == EAGAIN || errno == EINTR)))) {
av_free_packet(pkt);
return -EIO;
}
}
pkt->size = ret;
return 0;
}
static int audio_read_close(AVFormatContext *s1)
{
AudioData *s = s1->priv_data;
audio_close(s);
free(s);
return 0;
}
AVFormat audio_device_format = {
"audio_device",
"audio grab and output",
"",
"",
/* XXX: we make the assumption that the soundcard accepts this format */
/* XXX: find better solution with "preinit" method, needed also in
other formats */
#ifdef WORDS_BIGENDIAN
CODEC_ID_PCM_S16BE,
#else
CODEC_ID_PCM_S16LE,
#endif
CODEC_ID_NONE,
audio_write_header,
audio_write_packet,
audio_write_trailer,
audio_read_header,
audio_read_packet,
audio_read_close,
NULL,
AVFMT_NOFILE,
};

View File

@ -141,11 +141,3 @@ extern URLProtocol udp_protocol;
/* http.c */
extern URLProtocol http_protocol;
/* audio.c */
extern const char *audio_device;
extern URLProtocol audio_protocol;
/* grab.c */
extern const char *v4l_device;
extern URLProtocol video_protocol;

View File

@ -30,8 +30,9 @@ typedef struct {
int frame_format; /* see VIDEO_PALETTE_xxx */
int use_mmap;
int width, height;
float rate;
int frame_rate;
INT64 time_frame;
int frame_size;
} VideoData;
const char *v4l_device = "/dev/video";
@ -45,20 +46,41 @@ static struct video_mmap gb_buf;
static struct video_audio audio, audio_saved;
static int gb_frame = 0;
static int v4l_init(URLContext *h)
static int grab_read_header(AVFormatContext *s1, AVFormatParameters *ap)
{
VideoData *s = h->priv_data;
VideoData *s;
AVStream *st;
int width, height;
int ret;
int video_fd, frame_size;
int ret, frame_rate;
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
return -1;
width = s->width;
height = s->height;
width = ap->width;
height = ap->height;
frame_rate = ap->frame_rate;
s = av_mallocz(sizeof(VideoData));
if (!s)
return -ENOMEM;
st = av_mallocz(sizeof(AVStream));
if (!st) {
free(s);
return -ENOMEM;
}
s1->priv_data = s;
s1->nb_streams = 1;
s1->streams[0] = st;
s->width = width;
s->height = height;
s->frame_rate = frame_rate;
video_fd = open(v4l_device, O_RDWR);
if (video_fd < 0) {
perror(v4l_device);
return -EIO;
goto fail;
}
if (ioctl(video_fd,VIDIOCGCAP,&video_cap) < 0) {
@ -166,27 +188,38 @@ static int v4l_init(URLContext *h)
switch(s->frame_format) {
case VIDEO_PALETTE_YUV420P:
frame_size = (width * height * 3) / 2;
st->codec.pix_fmt = PIX_FMT_YUV420P;
break;
case VIDEO_PALETTE_YUV422:
frame_size = width * height * 2;
st->codec.pix_fmt = PIX_FMT_YUV422;
break;
case VIDEO_PALETTE_RGB24:
frame_size = width * height * 3;
st->codec.pix_fmt = PIX_FMT_BGR24; /* NOTE: v4l uses BGR24, not RGB24 ! */
break;
default:
goto fail;
}
s->fd = video_fd;
h->packet_size = frame_size;
s->frame_size = frame_size;
st->codec.codec_id = CODEC_ID_RAWVIDEO;
st->codec.width = width;
st->codec.height = height;
st->codec.frame_rate = frame_rate;
return 0;
fail:
close(video_fd);
if (video_fd >= 0)
close(video_fd);
free(st);
free(s);
return -EIO;
}
static int v4l_mm_read_picture(URLContext *h, UINT8 *buf)
static int v4l_mm_read_picture(VideoData *s, UINT8 *buf)
{
VideoData *s = h->priv_data;
UINT8 *ptr;
gb_buf.frame = gb_frame;
@ -203,105 +236,44 @@ static int v4l_mm_read_picture(URLContext *h, UINT8 *buf)
(errno == EAGAIN || errno == EINTR));
ptr = video_buf + gb_buffers.offsets[gb_frame];
memcpy(buf, ptr, h->packet_size);
return h->packet_size;
memcpy(buf, ptr, s->frame_size);
return s->frame_size;
}
/* note: we support only one picture read at a time */
static int video_read(URLContext *h, UINT8 *buf, int size)
static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
{
VideoData *s = h->priv_data;
INT64 curtime;
if (size != h->packet_size)
return -EINVAL;
VideoData *s = s1->priv_data;
INT64 curtime, delay;
struct timespec ts;
/* wait based on the frame rate */
s->time_frame += (int)(1000000 / s->rate);
do {
s->time_frame += (INT64_C(1000000) * FRAME_RATE_BASE) / s->frame_rate;
for(;;) {
curtime = gettime();
} while (curtime < s->time_frame);
delay = s->time_frame - curtime;
if (delay <= 0)
break;
ts.tv_sec = delay / 1000000;
ts.tv_nsec = (delay % 1000000) * 1000;
nanosleep(&ts, NULL);
}
if (av_new_packet(pkt, s->frame_size) < 0)
return -EIO;
/* read one frame */
if (s->use_mmap) {
return v4l_mm_read_picture(h, buf);
return v4l_mm_read_picture(s, pkt->data);
} else {
if (read(s->fd, buf, size) != size)
if (read(s->fd, pkt->data, pkt->size) != pkt->size)
return -EIO;
return h->packet_size;
return s->frame_size;
}
}
static int video_get_format(URLContext *h, URLFormat *f)
static int grab_read_close(AVFormatContext *s1)
{
VideoData *s = h->priv_data;
f->width = s->width;
f->height = s->height;
f->frame_rate = (int)(s->rate * FRAME_RATE_BASE);
strcpy(f->format_name, "rawvideo");
switch(s->frame_format) {
case VIDEO_PALETTE_YUV420P:
f->pix_fmt = PIX_FMT_YUV420P;
break;
case VIDEO_PALETTE_YUV422:
f->pix_fmt = PIX_FMT_YUV422;
break;
case VIDEO_PALETTE_RGB24:
f->pix_fmt = PIX_FMT_BGR24; /* NOTE: v4l uses BGR24, not RGB24 ! */
break;
default:
abort();
}
return 0;
}
/* URI syntax: 'video:width,height,rate'
*/
static int video_open(URLContext *h, const char *uri, int flags)
{
VideoData *s;
const char *p;
int width, height;
int ret;
float rate;
/* extract parameters */
p = uri;
strstart(p, "video:", &p);
width = strtol(p, (char **)&p, 0);
if (width <= 0)
return -EINVAL;
if (*p == ',')
p++;
height = strtol(p, (char **)&p, 0);
if (height <= 0)
return -EINVAL;
if (*p == ',')
p++;
rate = strtod(p, (char **)&p);
if (rate <= 0)
return -EINVAL;
s = malloc(sizeof(VideoData));
if (!s)
return -ENOMEM;
h->priv_data = s;
h->is_streamed = 1;
s->width = width;
s->height = height;
s->rate = rate;
ret = v4l_init(h);
if (ret)
free(s);
return ret;
}
static int video_close(URLContext *h)
{
VideoData *s = h->priv_data;
VideoData *s = s1->priv_data;
/* restore audio settings */
ioctl(s->fd, VIDIOCSAUDIO, &audio_saved);
@ -310,12 +282,20 @@ static int video_close(URLContext *h)
return 0;
}
URLProtocol video_protocol = {
"video",
video_open,
video_read,
AVFormat video_grab_device_format = {
"video_grab_device",
"video grab",
"",
"",
CODEC_ID_NONE,
CODEC_ID_NONE,
NULL,
NULL, /* seek */
video_close,
video_get_format,
NULL,
NULL,
grab_read_header,
grab_read_packet,
grab_read_close,
NULL,
AVFMT_NOFILE,
};