msdkav1enc: Reorder pts

This is a workaround for pts because oneVPL cannot handle the pts
correctly when there is b-frames. We first cache the input frame pts in
a queue then retrive the smallest one for the output encoded frame as
we always output the coded frame when this frame is displayable.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/2089>
This commit is contained in:
Mengkejiergeli Ba 2022-04-22 06:46:07 +00:00 committed by Haihao Xiang
parent ae7dfb9680
commit 7696ca83b2
4 changed files with 46 additions and 4 deletions

View file

@ -209,6 +209,24 @@ profile_to_string (gint profile)
return NULL;
}
static void
gst_msdkav1enc_set_timestamp (GstMsdkEnc * encoder, mfxU64 timestamp)
{
GstMsdkAV1Enc *thiz = GST_MSDKAV1ENC (encoder);
g_queue_push_tail (thiz->timestamp, (gpointer) timestamp);
}
static mfxU64
gst_msdkav1enc_get_timestamp (GstMsdkEnc * encoder)
{
GstMsdkAV1Enc *thiz = GST_MSDKAV1ENC (encoder);
mfxU64 pts;
pts = (mfxU64) g_queue_pop_head (thiz->timestamp);
return pts;
}
static gint
gst_msdkav1enc_find_show_frame (GstMsdkAV1Enc * thiz, guint8 * data, gsize size,
gsize * offset)
@ -291,6 +309,7 @@ gst_msdkav1enc_flush_frames (GstMsdkEnc * encoder)
{
GstVideoCodecFrame *frame;
GstBuffer *out_buf = NULL;
mfxU64 pts;
while (1) {
if (!gst_msdkav1enc_pre_finish (encoder, &out_buf, NULL, 0))
@ -299,6 +318,8 @@ gst_msdkav1enc_flush_frames (GstMsdkEnc * encoder)
break;
frame = gst_video_encoder_get_oldest_frame (GST_VIDEO_ENCODER (encoder));
frame->output_buffer = out_buf;
pts = gst_msdkav1enc_get_timestamp (encoder);
frame->pts = gst_util_uint64_scale (pts, GST_SECOND, 90000);
gst_video_codec_frame_unref (frame);
gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (encoder), frame);
}
@ -429,6 +450,8 @@ gst_msdkav1enc_class_init (GstMsdkAV1EncClass * klass)
encoder_class->qp_min = 0;
encoder_class->pre_finish = gst_msdkav1enc_pre_finish;
encoder_class->flush_frames = gst_msdkav1enc_flush_frames;
encoder_class->set_timestamp = gst_msdkav1enc_set_timestamp;
encoder_class->get_timestamp = gst_msdkav1enc_get_timestamp;
gst_msdkenc_install_common_properties (encoder_class);
@ -474,4 +497,5 @@ gst_msdkav1enc_init (GstMsdkAV1Enc * thiz)
thiz->p_pyramid = PROP_P_PYRAMID_DEFAULT;
thiz->adapter = gst_adapter_new ();
thiz->parser = gst_av1_parser_new ();
thiz->timestamp = g_queue_new ();
}

View file

@ -67,6 +67,7 @@ struct _GstMsdkAV1Enc
GstAdapter *adapter;
GstAV1Parser *parser;
GQueue *timestamp;
};
struct _GstMsdkAV1EncClass

View file

@ -1115,6 +1115,7 @@ gst_msdkenc_finish_frame (GstMsdkEnc * thiz, MsdkEncTask * task,
GstMsdkEncClass *klass = GST_MSDKENC_GET_CLASS (thiz);
GstVideoCodecFrame *frame;
GList *list;
mfxU64 pts;
if (!task->sync_point)
return GST_FLOW_OK;
@ -1160,13 +1161,21 @@ gst_msdkenc_finish_frame (GstMsdkEnc * thiz, MsdkEncTask * task,
}
frame->output_buffer = out_buf;
frame->pts =
gst_util_uint64_scale (task->output_bitstream.TimeStamp, GST_SECOND,
90000);
/* This is a workaround for output pts, because oneVPL cannot return the
* correct pts for each display frame. We just use the input frame's pts
* as output ones as oneVPL return each coded frames as display.
*/
if (klass->get_timestamp) {
pts = klass->get_timestamp (thiz);
frame->pts = gst_util_uint64_scale (pts, GST_SECOND, 90000);
} else {
frame->pts =
gst_util_uint64_scale (task->output_bitstream.TimeStamp, GST_SECOND,
90000);
}
frame->dts =
gst_util_uint64_scale (task->output_bitstream.DecodeTimeStamp,
GST_SECOND, 90000);
if ((task->output_bitstream.FrameType & MFX_FRAMETYPE_IDR) != 0 ||
(task->output_bitstream.FrameType & MFX_FRAMETYPE_xIDR) != 0) {
GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
@ -1214,6 +1223,7 @@ gst_msdkenc_encode_frame (GstMsdkEnc * thiz, mfxFrameSurface1 * surface,
status =
MFXVideoENCODE_EncodeFrameAsync (session, &thiz->enc_cntrl, surface,
&task->output_bitstream, &task->sync_point);
if (status != MFX_WRN_DEVICE_BUSY)
break;
/* If device is busy, wait 1ms and retry, as per MSDK's recomendation */
@ -1922,6 +1932,9 @@ gst_msdkenc_handle_frame (GstVideoEncoder * encoder, GstVideoCodecFrame * frame)
if (frame->pts != GST_CLOCK_TIME_NONE) {
surface->surface->Data.TimeStamp =
gst_util_uint64_scale (frame->pts, 90000, GST_SECOND);
if (klass->set_timestamp)
klass->set_timestamp (thiz, surface->surface->Data.TimeStamp);
} else {
surface->surface->Data.TimeStamp = MFX_TIMESTAMP_UNKNOWN;
}

View file

@ -192,6 +192,10 @@ struct _GstMsdkEncClass
guint8 *data, gsize size);
void (*flush_frames) (GstMsdkEnc * encoder);
void (*set_timestamp) (GstMsdkEnc * encoder, mfxU64 pts);
mfxU64 (*get_timestamp) (GstMsdkEnc * encoder);
guint qp_max;
guint qp_min;
};