vp8enc: finish support for temporally scaled encoding

- introduce two new properties:

    * temporal-scalability-layer-flags:

      Provide fine-grained control of layer encoding to the
      outside world. The flags sequence should be a multiple of
      the periodicity and is indexed by a running count of encoded
      frames modulo the sequence length.

    * temporal-scalability-layer-sync-flags:

      Specify the pattern of inter-layer synchronisation (i.e.
      which of the frames generated by the layer encoding
      specification represent an inter-layer synchronisation).
      There must be one entry per entry in
      temporal-scalability-layer-flags.

  - apply temporal scalability settings and expose as buffer
    metadata.

    This allows the codec to allocate a given frame to the correct
    internal bitrate allocator. Additionally, all the
    non-bitstream metadata needed to payload a temporally scaled
    stream is now attached to each output buffer as a
    GstVideoVP8Meta.

  - add unit test for temporally scaled encoding.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-good/-/merge_requests/728>
This commit is contained in:
John-Mark Bell 2017-09-08 08:19:20 +01:00 committed by Tim-Philipp Müller
parent 0429c24637
commit d9cedee042
6 changed files with 554 additions and 8 deletions

View file

@ -25699,6 +25699,17 @@
"type": "gint",
"writable": true
},
"temporal-scalability-layer-flags": {
"blurb": "Sequence defining coding layer flags",
"conditionally-available": false,
"construct": false,
"construct-only": false,
"controllable": false,
"mutable": "null",
"readable": true,
"type": "GstValueArray",
"writable": true
},
"temporal-scalability-layer-id": {
"blurb": "Sequence defining coding layer membership",
"conditionally-available": false,
@ -25710,6 +25721,17 @@
"type": "GValueArray",
"writable": true
},
"temporal-scalability-layer-sync-flags": {
"blurb": "Sequence defining coding layer sync flags",
"conditionally-available": false,
"construct": false,
"construct-only": false,
"controllable": false,
"mutable": "null",
"readable": true,
"type": "GstValueArray",
"writable": true
},
"temporal-scalability-number-layers": {
"blurb": "Number of coding layers to use",
"conditionally-available": false,

View file

@ -72,6 +72,9 @@ typedef struct
{
vpx_image_t *image;
GList *invisible;
guint layer_id;
guint8 tl0picidx;
gboolean layer_sync;
} GstVP8EncUserData;
static void
@ -104,6 +107,15 @@ static GstFlowReturn gst_vp8_enc_handle_invisible_frame_buffer (GstVPXEnc * enc,
void *user_data, GstBuffer * buffer);
static void gst_vp8_enc_set_frame_user_data (GstVPXEnc * enc,
GstVideoCodecFrame * frame, vpx_image_t * image);
static void gst_vp8_enc_apply_frame_temporal_settings (GstVPXEnc * enc,
GstVideoCodecFrame * frame, guint layer_id, guint8 tl0picidx,
gboolean layer_sync);
static void gst_vp8_enc_get_frame_temporal_settings (GstVPXEnc * enc,
GstVideoCodecFrame * frame, guint * layer_id, guint8 * tl0picidx,
gboolean * layer_sync);
static void gst_vp8_enc_preflight_buffer (GstVPXEnc * enc,
GstVideoCodecFrame * frame, GstBuffer * buffer,
gboolean layer_sync, guint layer_id, guint8 tl0picidx);
static GstFlowReturn gst_vp8_enc_pre_push (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame);
@ -163,6 +175,11 @@ gst_vp8_enc_class_init (GstVP8EncClass * klass)
vpx_encoder_class->handle_invisible_frame_buffer =
gst_vp8_enc_handle_invisible_frame_buffer;
vpx_encoder_class->set_frame_user_data = gst_vp8_enc_set_frame_user_data;
vpx_encoder_class->apply_frame_temporal_settings =
gst_vp8_enc_apply_frame_temporal_settings;
vpx_encoder_class->get_frame_temporal_settings =
gst_vp8_enc_get_frame_temporal_settings;
vpx_encoder_class->preflight_buffer = gst_vp8_enc_preflight_buffer;
GST_DEBUG_CATEGORY_INIT (gst_vp8enc_debug, "vp8enc", 0, "VP8 Encoder");
}
@ -330,6 +347,67 @@ gst_vp8_enc_set_frame_user_data (GstVPXEnc * enc, GstVideoCodecFrame * frame,
return;
}
static void
gst_vp8_enc_apply_frame_temporal_settings (GstVPXEnc * enc,
GstVideoCodecFrame * frame, guint layer_id, guint8 tl0picidx,
gboolean layer_sync)
{
GstVP8EncUserData *user_data;
user_data = gst_video_codec_frame_get_user_data (frame);
if (!user_data) {
GST_ERROR_OBJECT (enc, "Have no frame user data");
return;
}
vpx_codec_control (&enc->encoder, VP8E_SET_TEMPORAL_LAYER_ID, layer_id);
user_data->layer_id = layer_id;
user_data->tl0picidx = tl0picidx;
user_data->layer_sync = layer_sync;
return;
}
static void
gst_vp8_enc_get_frame_temporal_settings (GstVPXEnc * enc,
GstVideoCodecFrame * frame, guint * layer_id, guint8 * tl0picidx,
gboolean * layer_sync)
{
GstVP8EncUserData *user_data;
user_data = gst_video_codec_frame_get_user_data (frame);
if (!user_data) {
GST_ERROR_OBJECT (enc, "Have no frame user data");
*layer_id = 0;
*tl0picidx = 0;
*layer_sync = FALSE;
return;
}
*layer_id = user_data->layer_id;
*tl0picidx = user_data->tl0picidx;
*layer_sync = user_data->layer_sync;
return;
}
static void
gst_vp8_enc_preflight_buffer (GstVPXEnc * enc,
GstVideoCodecFrame * frame, GstBuffer * buffer,
gboolean layer_sync, guint layer_id, guint8 tl0picidx)
{
GstCustomMeta *meta = gst_buffer_add_custom_meta (buffer, "GstVP8Meta");
GstStructure *s = gst_custom_meta_get_structure (meta);
gst_structure_set (s,
"use-temporal-scaling", G_TYPE_BOOLEAN, (enc->cfg.ts_periodicity != 0),
"layer-sync", G_TYPE_BOOLEAN, layer_sync,
"layer-id", G_TYPE_UINT, layer_id,
"tl0picidx", G_TYPE_UINT, tl0picidx, NULL);
}
static guint64
_to_granulepos (guint64 frame_end_number, guint inv_count, guint keyframe_dist)
{

View file

@ -75,6 +75,8 @@ GST_DEBUG_CATEGORY_STATIC (gst_vpxenc_debug);
#define DEFAULT_TS_RATE_DECIMATOR NULL
#define DEFAULT_TS_PERIODICITY 0
#define DEFAULT_TS_LAYER_ID NULL
#define DEFAULT_TS_LAYER_FLAGS NULL
#define DEFAULT_TS_LAYER_SYNC_FLAGS NULL
#define DEFAULT_ERROR_RESILIENT 0
#define DEFAULT_LAG_IN_FRAMES 0
@ -130,6 +132,8 @@ enum
PROP_TS_RATE_DECIMATOR,
PROP_TS_PERIODICITY,
PROP_TS_LAYER_ID,
PROP_TS_LAYER_FLAGS,
PROP_TS_LAYER_SYNC_FLAGS,
PROP_MULTIPASS_MODE,
PROP_MULTIPASS_CACHE_FILE,
PROP_ERROR_RESILIENT,
@ -316,6 +320,35 @@ gst_vpx_enc_er_flags_get_type (void)
return id;
}
#define GST_VPX_ENC_TS_LAYER_FLAGS_TYPE (gst_vpx_enc_ts_layer_flags_get_type())
static GType
gst_vpx_enc_ts_layer_flags_get_type (void)
{
static const GFlagsValue values[] = {
{VP8_EFLAG_NO_REF_LAST, "Don't reference the last frame", "no-ref-last"},
{VP8_EFLAG_NO_REF_GF, "Don't reference the golden frame", "no-ref-golden"},
{VP8_EFLAG_NO_REF_ARF, "Don't reference the alternate reference frame",
"no-ref-alt"},
{VP8_EFLAG_NO_UPD_LAST, "Don't update the last frame", "no-upd-last"},
{VP8_EFLAG_NO_UPD_GF, "Don't update the golden frame", "no-upd-golden"},
{VP8_EFLAG_NO_UPD_ARF, "Don't update the alternate reference frame",
"no-upd-alt"},
{VP8_EFLAG_NO_UPD_ENTROPY, "Disable entropy update", "no-upd-entropy"},
{0, NULL, NULL}
};
static volatile GType id = 0;
if (g_once_init_enter ((gsize *) & id)) {
GType _id;
_id = g_flags_register_static ("GstVPXEncTsLayerFlags", values);
g_once_init_leave ((gsize *) & id, _id);
}
return id;
}
static void gst_vpx_enc_finalize (GObject * object);
static void gst_vpx_enc_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
@ -335,12 +368,13 @@ static gboolean gst_vpx_enc_sink_event (GstVideoEncoder *
video_encoder, GstEvent * event);
static gboolean gst_vpx_enc_propose_allocation (GstVideoEncoder * encoder,
GstQuery * query);
static gboolean gst_vpx_enc_transform_meta (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame, GstMeta * meta);
#define parent_class gst_vpx_enc_parent_class
G_DEFINE_TYPE_WITH_CODE (GstVPXEnc, gst_vpx_enc, GST_TYPE_VIDEO_ENCODER,
G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL);
G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL);
);
G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL););
static void
gst_vpx_enc_class_init (GstVPXEncClass * klass)
@ -363,6 +397,7 @@ gst_vpx_enc_class_init (GstVPXEncClass * klass)
video_encoder_class->finish = gst_vpx_enc_finish;
video_encoder_class->sink_event = gst_vpx_enc_sink_event;
video_encoder_class->propose_allocation = gst_vpx_enc_propose_allocation;
video_encoder_class->transform_meta = gst_vpx_enc_transform_meta;
g_object_class_install_property (gobject_class, PROP_RC_END_USAGE,
g_param_spec_enum ("end-usage", "Rate control mode",
@ -555,6 +590,36 @@ gst_vpx_enc_class_init (GstVPXEncClass * klass)
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_DOC_SHOW_DEFAULT));
/**
* GstVPXEnc:temporal-scalability-layer-flags:
*
* Sequence defining coding layer flags
*
* Since: 1.20
*/
g_object_class_install_property (gobject_class, PROP_TS_LAYER_FLAGS,
gst_param_spec_array ("temporal-scalability-layer-flags",
"Coding layer flags", "Sequence defining coding layer flags",
g_param_spec_flags ("flags", "Flags", "Flags",
GST_VPX_ENC_TS_LAYER_FLAGS_TYPE, 0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS),
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/**
* GstVPXEnc:temporal-scalability-layer-sync-flags:
*
* Sequence defining coding layer sync flags
*
* Since: 1.20
*/
g_object_class_install_property (gobject_class, PROP_TS_LAYER_SYNC_FLAGS,
gst_param_spec_array ("temporal-scalability-layer-sync-flags",
"Coding layer sync flags",
"Sequence defining coding layer sync flags",
g_param_spec_boolean ("flags", "Flags", "Flags", FALSE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS),
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_LAG_IN_FRAMES,
g_param_spec_int ("lag-in-frames", "Lag in frames",
"Maximum number of frames to lag",
@ -744,6 +809,10 @@ gst_vpx_enc_init (GstVPXEnc * gst_vpx_enc)
gst_vpx_enc->n_ts_rate_decimator = 0;
gst_vpx_enc->cfg.ts_periodicity = DEFAULT_TS_PERIODICITY;
gst_vpx_enc->n_ts_layer_id = 0;
gst_vpx_enc->n_ts_layer_flags = 0;
gst_vpx_enc->ts_layer_flags = NULL;
gst_vpx_enc->n_ts_layer_sync_flags = 0;
gst_vpx_enc->ts_layer_sync_flags = NULL;
gst_vpx_enc->cfg.g_error_resilient = DEFAULT_ERROR_RESILIENT;
gst_vpx_enc->cfg.g_lag_in_frames = DEFAULT_LAG_IN_FRAMES;
gst_vpx_enc->cfg.g_threads = DEFAULT_THREADS;
@ -765,6 +834,8 @@ gst_vpx_enc_init (GstVPXEnc * gst_vpx_enc)
gst_vpx_enc->timebase_n = DEFAULT_TIMEBASE_N;
gst_vpx_enc->timebase_d = DEFAULT_TIMEBASE_D;
gst_vpx_enc->bits_per_pixel = DEFAULT_BITS_PER_PIXEL;
gst_vpx_enc->tl0picidx = 0;
gst_vpx_enc->prev_was_keyframe = FALSE;
gst_vpx_enc->cfg.g_profile = DEFAULT_PROFILE;
@ -781,6 +852,9 @@ gst_vpx_enc_finalize (GObject * object)
g_return_if_fail (GST_IS_VPX_ENC (object));
gst_vpx_enc = GST_VPX_ENC (object);
g_free (gst_vpx_enc->ts_layer_flags);
g_free (gst_vpx_enc->ts_layer_sync_flags);
g_free (gst_vpx_enc->multipass_cache_prefix);
g_free (gst_vpx_enc->multipass_cache_file);
gst_vpx_enc->multipass_cache_idx = 0;
@ -1002,6 +1076,45 @@ gst_vpx_enc_set_property (GObject * object, guint prop_id,
global = TRUE;
break;
}
case PROP_TS_LAYER_FLAGS:{
gint l = gst_value_array_get_size (value);
g_free (gst_vpx_enc->ts_layer_flags);
gst_vpx_enc->n_ts_layer_flags = 0;
if (l > 0) {
gint i;
gst_vpx_enc->ts_layer_flags = g_new (gint, l);
for (i = 0; i < l; i++)
gst_vpx_enc->ts_layer_flags[i] =
g_value_get_flags (gst_value_array_get_value (value, i));
gst_vpx_enc->n_ts_layer_flags = l;
} else {
gst_vpx_enc->ts_layer_flags = NULL;
}
break;
}
case PROP_TS_LAYER_SYNC_FLAGS:{
gint l = gst_value_array_get_size (value);
g_free (gst_vpx_enc->ts_layer_sync_flags);
gst_vpx_enc->n_ts_layer_sync_flags = 0;
if (l > 0) {
gint i;
gst_vpx_enc->ts_layer_sync_flags = g_new (gboolean, l);
for (i = 0; i < l; i++)
gst_vpx_enc->ts_layer_sync_flags[i] =
g_value_get_boolean (gst_value_array_get_value (value, i));
gst_vpx_enc->n_ts_layer_sync_flags = l;
} else {
gst_vpx_enc->ts_layer_sync_flags = NULL;
}
break;
}
case PROP_ERROR_RESILIENT:
gst_vpx_enc->cfg.g_error_resilient = g_value_get_flags (value);
global = TRUE;
@ -1368,6 +1481,32 @@ gst_vpx_enc_get_property (GObject * object, guint prop_id, GValue * value,
}
break;
}
case PROP_TS_LAYER_FLAGS:{
gint i;
for (i = 0; i < gst_vpx_enc->n_ts_layer_flags; i++) {
GValue v = { 0, };
g_value_init (&v, GST_VPX_ENC_TS_LAYER_FLAGS_TYPE);
g_value_set_flags (&v, gst_vpx_enc->ts_layer_flags[i]);
gst_value_array_append_value (value, &v);
g_value_unset (&v);
}
break;
}
case PROP_TS_LAYER_SYNC_FLAGS:{
gint i;
for (i = 0; i < gst_vpx_enc->n_ts_layer_sync_flags; i++) {
GValue v = { 0, };
g_value_init (&v, G_TYPE_BOOLEAN);
g_value_set_boolean (&v, gst_vpx_enc->ts_layer_sync_flags[i]);
gst_value_array_append_value (value, &v);
g_value_unset (&v);
}
break;
}
case PROP_ERROR_RESILIENT:
g_value_set_flags (value, gst_vpx_enc->cfg.g_error_resilient);
break;
@ -1845,6 +1984,9 @@ gst_vpx_enc_process (GstVPXEnc * encoder)
GstFlowReturn ret = GST_FLOW_OK;
GstVPXEncClass *vpx_enc_class;
vpx_codec_pts_t pts;
guint layer_id = 0;
guint8 tl0picidx = 0;
gboolean layer_sync = FALSE;
video_encoder = GST_VIDEO_ENCODER (encoder);
vpx_enc_class = GST_VPX_ENC_GET_CLASS (encoder);
@ -1901,10 +2043,6 @@ gst_vpx_enc_process (GstVPXEnc * encoder)
} while (pkt->data.frame.pts > pts);
g_assert (frame != NULL);
if ((pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0)
GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
else
GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);
/* FIXME : It would be nice to avoid the memory copy ... */
buffer =
@ -1912,6 +2050,42 @@ gst_vpx_enc_process (GstVPXEnc * encoder)
pkt->data.frame.sz), pkt->data.frame.sz);
user_data = vpx_enc_class->process_frame_user_data (encoder, frame);
if (vpx_enc_class->get_frame_temporal_settings &&
encoder->cfg.ts_periodicity != 0) {
vpx_enc_class->get_frame_temporal_settings (encoder, frame,
&layer_id, &tl0picidx, &layer_sync);
}
if (layer_id != 0 && encoder->prev_was_keyframe) {
/* Non-base layer frame immediately after a keyframe is a layer sync */
layer_sync = TRUE;
}
if ((pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0) {
GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
/* Key frames always live on layer 0 */
layer_id = 0;
layer_sync = TRUE;
encoder->prev_was_keyframe = TRUE;
} else {
GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);
encoder->prev_was_keyframe = FALSE;
}
if ((pkt->data.frame.flags & VPX_FRAME_IS_DROPPABLE) != 0)
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DROPPABLE);
else
GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DROPPABLE);
if (layer_id == 0) {
/* Allocate a new tl0picidx if this is layer 0 */
tl0picidx = ++encoder->tl0picidx;
}
if (vpx_enc_class->preflight_buffer) {
vpx_enc_class->preflight_buffer (encoder, frame, buffer,
layer_sync, layer_id, tl0picidx);
}
if (invisible) {
ret =
@ -2095,6 +2269,25 @@ gst_vpx_enc_handle_frame (GstVideoEncoder * video_encoder,
duration = 1;
}
if (encoder->n_ts_layer_flags != 0) {
/* If we need a keyframe, then the pattern is irrelevant */
if ((flags & VPX_EFLAG_FORCE_KF) == 0) {
flags |=
encoder->ts_layer_flags[frame->system_frame_number %
encoder->n_ts_layer_flags];
}
}
if (vpx_enc_class->apply_frame_temporal_settings &&
encoder->cfg.ts_periodicity != 0 &&
encoder->n_ts_layer_id >= encoder->cfg.ts_periodicity) {
vpx_enc_class->apply_frame_temporal_settings (encoder, frame,
encoder->cfg.ts_layer_id[frame->system_frame_number %
encoder->cfg.ts_periodicity], encoder->tl0picidx,
encoder->ts_layer_sync_flags[frame->system_frame_number %
encoder->n_ts_layer_sync_flags]);
}
status = vpx_codec_encode (&encoder->encoder, image,
pts, duration, flags, encoder->deadline);
@ -2142,4 +2335,22 @@ gst_vpx_enc_propose_allocation (GstVideoEncoder * encoder, GstQuery * query)
query);
}
static gboolean
gst_vpx_enc_transform_meta (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame, GstMeta * meta)
{
const GstMetaInfo *info = meta->info;
gboolean ret = FALSE;
/* Do not copy GstVP8Meta from input to output buffer */
if (gst_meta_info_is_custom (info)
&& gst_custom_meta_has_name ((GstCustomMeta *) meta, "GstVP8Meta"))
goto done;
ret = TRUE;
done:
return ret;
}
#endif /* HAVE_VP8_ENCODER || HAVE_VP9_ENCODER */

View file

@ -72,6 +72,10 @@ struct _GstVPXEnc
gint n_ts_target_bitrate;
gint n_ts_rate_decimator;
gint n_ts_layer_id;
gint n_ts_layer_flags;
gint *ts_layer_flags;
gint n_ts_layer_sync_flags;
gboolean *ts_layer_sync_flags;
/* Global two-pass options */
gchar *multipass_cache_file;
gchar *multipass_cache_prefix;
@ -105,6 +109,8 @@ struct _GstVPXEnc
/* state */
gboolean inited;
guint8 tl0picidx;
gboolean prev_was_keyframe;
vpx_image_t image;
@ -131,9 +137,23 @@ struct _GstVPXEncClass
/*process user data*/
void* (*process_frame_user_data) (GstVPXEnc *enc, GstVideoCodecFrame* frame);
/*set frame user data*/
void (*set_frame_user_data) (GstVPXEnc *enc, GstVideoCodecFrame* frame, vpx_image_t *image);
void (*set_frame_user_data) (GstVPXEnc *enc, GstVideoCodecFrame* frame,
vpx_image_t *image);
/*Handle invisible frame*/
GstFlowReturn (*handle_invisible_frame_buffer) (GstVPXEnc *enc, void* user_data, GstBuffer* buffer);
GstFlowReturn (*handle_invisible_frame_buffer) (GstVPXEnc *enc,
void* user_data, GstBuffer* buffer);
/*apply temporal settings -- called with encoder lock*/
void (*apply_frame_temporal_settings) (GstVPXEnc *enc,
GstVideoCodecFrame* frame, guint layer_id, guint8 tl0picidx,
gboolean layer_sync);
/*get temporal settings*/
void (*get_frame_temporal_settings) (GstVPXEnc *enc,
GstVideoCodecFrame *frame, guint * layer_id, guint8 *tl0picidx,
gboolean *layer_sync);
/* preflight buffer */
void (*preflight_buffer) (GstVPXEnc *enc,
GstVideoCodecFrame *frame, GstBuffer *buffer,
gboolean layer_sync, guint layer_id, guint8 tl0picidx);
};
GType gst_vpx_enc_get_type (void);

View file

@ -33,6 +33,8 @@
static gboolean
plugin_init (GstPlugin * plugin)
{
static const gchar *tags[] = { NULL };
#ifdef HAVE_VP8_DECODER
gst_element_register (plugin, "vp8dec", GST_RANK_PRIMARY,
gst_vp8_dec_get_type ());
@ -53,6 +55,9 @@ plugin_init (GstPlugin * plugin)
gst_vp9_enc_get_type ());
#endif
if (!gst_meta_register_custom ("GstVP8Meta", tags, NULL, NULL, NULL))
return FALSE;
return TRUE;
}

View file

@ -17,6 +17,9 @@
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#define GLIB_DISABLE_DEPRECATION_WARNINGS
#include <gst/check/gstharness.h>
#include <gst/check/gstcheck.h>
#include <gst/video/video.h>
@ -210,6 +213,211 @@ GST_START_TEST (test_autobitrate_changes_with_caps)
GST_END_TEST;
#define verify_meta(buffer, usets, ybit, tid, tl0picindex) \
G_STMT_START { \
gboolean use_temporal_scaling, layer_sync; \
guint temporal_layer_id, tl0picidx; \
GstCustomMeta *meta = gst_buffer_get_custom_meta (buffer, \
"GstVP8Meta"); \
GstStructure *s; \
fail_unless (meta != NULL); \
s = gst_custom_meta_get_structure (meta); \
fail_unless (gst_structure_get (s, \
"use-temporal-scaling", G_TYPE_BOOLEAN, &use_temporal_scaling, \
"layer-sync", G_TYPE_BOOLEAN, &layer_sync, \
"layer-id", G_TYPE_UINT, &temporal_layer_id, \
"tl0picidx", G_TYPE_UINT, &tl0picidx, NULL)); \
fail_unless_equals_int (usets, use_temporal_scaling); \
fail_unless_equals_int (ybit, layer_sync); \
fail_unless_equals_int (tid, temporal_layer_id); \
fail_unless_equals_int (tl0picindex, tl0picidx); \
} G_STMT_END
static void
configure_vp8ts (GstHarness * h)
{
gint i;
GValue layer_sync_flags = G_VALUE_INIT;
GValueArray *decimators = g_value_array_new (3);
GValueArray *layer_ids = g_value_array_new (4);
GValueArray *bitrates = g_value_array_new (3);
GValue ival = { 0, }, bval = {
0,};
gst_value_array_init (&layer_sync_flags, 8);
g_value_init (&ival, G_TYPE_INT);
for (i = 0; i < 3; i++) {
/* 7.5, 15, 30fps */
static const gint d[] = { 4, 2, 1 };
g_value_set_int (&ival, d[i]);
g_value_array_append (decimators, &ival);
}
for (i = 0; i < 4; i++) {
static const gint d[] = { 0, 2, 1, 2 };
g_value_set_int (&ival, d[i]);
g_value_array_append (layer_ids, &ival);
}
for (i = 0; i < 3; i++) {
/* Split 512kbps 40%, 20%, 40% */
static const gint d[] = { 204800, 307200, 512000 };
g_value_set_int (&ival, d[i]);
g_value_array_append (bitrates, &ival);
}
gst_util_set_object_arg (G_OBJECT (h->element),
"temporal-scalability-layer-flags",
/* layer 0 */
"<no-ref-golden+no-upd-golden+no-upd-alt,"
/* layer 2 (sync) */
"no-ref-golden+no-upd-last+no-upd-golden+no-upd-alt+no-upd-entropy,"
/* layer 1 (sync) */
"no-ref-golden+no-upd-last+no-upd-alt,"
/* layer 2 */
"no-upd-last+no-upd-golden+no-upd-alt+no-upd-entropy,"
/* layer 0 */
"no-ref-golden+no-upd-golden+no-upd-alt,"
/* layer 2 */
"no-upd-last+no-upd-golden+no-upd-alt+no-upd-entropy,"
/* layer 1 */
"no-upd-last+no-upd-alt,"
/* layer 2 */
"no-upd-last+no-upd-golden+no-upd-alt+no-upd-entropy>");
g_value_init (&bval, G_TYPE_BOOLEAN);
for (i = 0; i < 8; i++) {
/* Reflect pattern above */
static const gboolean d[] = {
FALSE,
TRUE,
TRUE,
FALSE,
FALSE,
FALSE,
FALSE,
FALSE
};
g_value_set_boolean (&bval, d[i]);
gst_value_array_append_value (&layer_sync_flags, &bval);
}
g_object_set_property (G_OBJECT (h->element),
"temporal-scalability-layer-sync-flags", &layer_sync_flags);
g_object_set (h->element,
"temporal-scalability-number-layers", decimators->n_values,
"temporal-scalability-periodicity", layer_ids->n_values,
"temporal-scalability-rate-decimator", decimators,
"temporal-scalability-layer-id", layer_ids,
"temporal-scalability-target-bitrate", bitrates,
"error-resilient", 1, NULL);
g_value_array_free (decimators);
g_value_array_free (layer_ids);
g_value_array_free (bitrates);
g_value_unset (&layer_sync_flags);
}
GST_START_TEST (test_encode_temporally_scaled)
{
gint i;
struct
{
gboolean ybit;
gint tid;
gint tl0picidx;
gboolean droppable;
} expected[] = {
{
TRUE, 0, 1, FALSE}, /* This is an intra */
{
TRUE, 2, 1, TRUE}, {
TRUE, 1, 1, FALSE}, {
FALSE, 2, 1, TRUE}, {
FALSE, 0, 2, FALSE}, {
FALSE, 2, 2, TRUE}, {
FALSE, 1, 2, FALSE}, {
FALSE, 2, 2, TRUE}, {
FALSE, 0, 3, FALSE}, {
TRUE, 2, 3, TRUE}, {
TRUE, 1, 3, FALSE}, {
FALSE, 2, 3, TRUE}, {
FALSE, 0, 4, FALSE}, {
FALSE, 2, 4, TRUE}, {
FALSE, 1, 4, FALSE}, {
FALSE, 2, 4, TRUE},};
GstHarness *h = gst_harness_new ("vp8enc");
gst_harness_set_src_caps (h, gst_caps_new_i420 (320, 240));
configure_vp8ts (h);
for (i = 0; i < 16; i++) {
GstBuffer *in, *out;
in = gst_harness_create_video_buffer_full (h, 0x42,
320, 240, gst_util_uint64_scale (i, GST_SECOND, 30),
gst_util_uint64_scale (1, GST_SECOND, 30));
gst_harness_push (h, in);
out = gst_harness_pull (h);
/* Ensure first frame is encoded as an intra */
if (i == 0)
fail_if (GST_BUFFER_FLAG_IS_SET (out, GST_BUFFER_FLAG_DELTA_UNIT));
else
fail_unless (GST_BUFFER_FLAG_IS_SET (out, GST_BUFFER_FLAG_DELTA_UNIT));
fail_unless_equals_int (expected[i].droppable,
GST_BUFFER_FLAG_IS_SET (out, GST_BUFFER_FLAG_DROPPABLE));
verify_meta (out, TRUE, expected[i].ybit, expected[i].tid,
expected[i].tl0picidx);
gst_buffer_unref (out);
}
gst_harness_teardown (h);
}
GST_END_TEST;
GST_START_TEST (test_encode_fresh_meta)
{
gint i;
GstBuffer *buffer;
GstHarness *h = gst_harness_new ("vp8enc");
GstCustomMeta *meta;
GstStructure *s;
gst_harness_set_src_caps (h, gst_caps_new_i420_full (320, 240, 25, 1, 1, 1));
buffer = gst_harness_create_video_buffer_full (h, 0x0,
320, 240, gst_util_uint64_scale (0, GST_SECOND, 25),
gst_util_uint64_scale (1, GST_SECOND, 25));
/* Attach bogus meta to input buffer */
meta = gst_buffer_add_custom_meta (buffer, "GstVP8Meta");
s = gst_custom_meta_get_structure (meta);
gst_structure_set (s,
"use-temporal-scaling", G_TYPE_BOOLEAN, FALSE,
"layer-sync", G_TYPE_BOOLEAN, FALSE,
"layer-id", G_TYPE_UINT, 0, "tl0picidx", G_TYPE_UINT, 0, NULL);
for (i = 0; i < 2; i++) {
GstBuffer *out;
fail_unless_equals_int (GST_FLOW_OK,
gst_harness_push (h, gst_buffer_ref (buffer)));
out = gst_harness_pull (h);
/* Ensure that output buffer has fresh meta */
verify_meta (out, FALSE, (i == 0), 0, i + 1);
gst_buffer_unref (out);
}
gst_buffer_unref (buffer);
gst_harness_teardown (h);
}
GST_END_TEST;
static Suite *
vp8enc_suite (void)
{
@ -222,6 +430,8 @@ vp8enc_suite (void)
tcase_add_test (tc_chain, test_encode_lag_in_frames);
tcase_add_test (tc_chain, test_encode_simple_when_bitrate_set_to_zero);
tcase_add_test (tc_chain, test_autobitrate_changes_with_caps);
tcase_add_test (tc_chain, test_encode_temporally_scaled);
tcase_add_test (tc_chain, test_encode_fresh_meta);
return s;
}