fftools/ffplay: stop using AVFrame.pkt_pos

This field is ad-hoc and will be deprecated. Use the recently-added
AV_CODEC_FLAG_COPY_OPAQUE to pass arbitrary user data from packets to
frames.
This commit is contained in:
Anton Khirnov 2023-03-09 16:59:12 +01:00
parent c29e5ab5c1
commit 2fb3ee1787
1 changed files with 24 additions and 2 deletions

View File

@ -146,6 +146,10 @@ typedef struct Clock {
int *queue_serial; /* pointer to the current packet queue serial, used for obsolete clock detection */
} Clock;
typedef struct FrameData {
int64_t pkt_pos;
} FrameData;
/* Common struct for handling all types of decoded data and allocated render buffers. */
typedef struct Frame {
AVFrame *frame;
@ -640,6 +644,16 @@ static int decoder_decode_frame(Decoder *d, AVFrame *frame, AVSubtitle *sub) {
}
av_packet_unref(d->pkt);
} else {
if (d->pkt->buf && !d->pkt->opaque_ref) {
FrameData *fd;
d->pkt->opaque_ref = av_buffer_allocz(sizeof(*fd));
if (!d->pkt->opaque_ref)
return AVERROR(ENOMEM);
fd = (FrameData*)d->pkt->opaque_ref->data;
fd->pkt_pos = d->pkt->pos;
}
if (avcodec_send_packet(d->avctx, d->pkt) == AVERROR(EAGAIN)) {
av_log(d->avctx, AV_LOG_ERROR, "Receive_frame and send_packet both returned EAGAIN, which is an API violation.\n");
d->packet_pending = 1;
@ -2041,12 +2055,13 @@ static int audio_thread(void *arg)
goto the_end;
while ((ret = av_buffersink_get_frame_flags(is->out_audio_filter, frame, 0)) >= 0) {
FrameData *fd = frame->opaque_ref ? (FrameData*)frame->opaque_ref->data : NULL;
tb = av_buffersink_get_time_base(is->out_audio_filter);
if (!(af = frame_queue_peek_writable(&is->sampq)))
goto the_end;
af->pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(tb);
af->pos = frame->pkt_pos;
af->pos = fd ? fd->pkt_pos : -1;
af->serial = is->auddec.pkt_serial;
af->duration = av_q2d((AVRational){frame->nb_samples, frame->sample_rate});
@ -2145,6 +2160,8 @@ static int video_thread(void *arg)
goto the_end;
while (ret >= 0) {
FrameData *fd;
is->frame_last_returned_time = av_gettime_relative() / 1000000.0;
ret = av_buffersink_get_frame_flags(filt_out, frame, 0);
@ -2155,13 +2172,15 @@ static int video_thread(void *arg)
break;
}
fd = frame->opaque_ref ? (FrameData*)frame->opaque_ref->data : NULL;
is->frame_last_filter_delay = av_gettime_relative() / 1000000.0 - is->frame_last_returned_time;
if (fabs(is->frame_last_filter_delay) > AV_NOSYNC_THRESHOLD / 10.0)
is->frame_last_filter_delay = 0;
tb = av_buffersink_get_time_base(filt_out);
duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0);
pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(tb);
ret = queue_picture(is, frame, pts, duration, frame->pkt_pos, is->viddec.pkt_serial);
ret = queue_picture(is, frame, pts, duration, fd ? fd->pkt_pos : -1, is->viddec.pkt_serial);
av_frame_unref(frame);
if (is->videoq.serial != is->viddec.pkt_serial)
break;
@ -2567,6 +2586,9 @@ static int stream_component_open(VideoState *is, int stream_index)
av_dict_set(&opts, "threads", "auto", 0);
if (stream_lowres)
av_dict_set_int(&opts, "lowres", stream_lowres, 0);
av_dict_set(&opts, "flags", "+copy_opaque", AV_DICT_MULTIKEY);
if ((ret = avcodec_open2(avctx, codec, &opts)) < 0) {
goto fail;
}