nvcodec: Remove old nvenc implementation

Stop shipping deprecated implementation

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/6754>
This commit is contained in:
Seungha Yang 2024-05-12 18:49:09 +09:00 committed by GStreamer Marge Bot
parent 0ffbe20d31
commit b74422dcbc
10 changed files with 0 additions and 5414 deletions

File diff suppressed because it is too large Load diff

View file

@ -1,194 +0,0 @@
/* GStreamer NVENC plugin
* Copyright (C) 2015 Centricular Ltd
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_NV_BASE_ENC_H_INCLUDED__
#define __GST_NV_BASE_ENC_H_INCLUDED__
#include "gstnvenc.h"
#include <gst/video/gstvideoencoder.h>
#include <gst/cuda/gstcuda.h>
#define GST_TYPE_NV_BASE_ENC \
(gst_nv_base_enc_get_type())
#define GST_NV_BASE_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_NV_BASE_ENC,GstNvBaseEnc))
#define GST_NV_BASE_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_NV_BASE_ENC,GstNvBaseEncClass))
#define GST_NV_BASE_ENC_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_NV_BASE_ENC,GstNvBaseEncClass))
#define GST_IS_NV_BASE_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_NV_BASE_ENC))
#define GST_IS_NV_BASE_ENC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_NV_BASE_ENC))
typedef enum {
GST_NV_PRESET_DEFAULT,
GST_NV_PRESET_HP,
GST_NV_PRESET_HQ,
/* FIXME: problematic GST_NV_PRESET_BD, */
GST_NV_PRESET_LOW_LATENCY_DEFAULT,
GST_NV_PRESET_LOW_LATENCY_HQ,
GST_NV_PRESET_LOW_LATENCY_HP,
GST_NV_PRESET_LOSSLESS_DEFAULT,
GST_NV_PRESET_LOSSLESS_HP,
} GstNvPreset;
typedef enum {
GST_NV_RC_MODE_DEFAULT,
GST_NV_RC_MODE_CONSTQP,
GST_NV_RC_MODE_CBR,
GST_NV_RC_MODE_VBR,
GST_NV_RC_MODE_VBR_MINQP,
GST_NV_RC_MODE_CBR_LOWDELAY_HQ,
GST_NV_RC_MODE_CBR_HQ,
GST_NV_RC_MODE_VBR_HQ,
} GstNvRCMode;
typedef enum
{
GST_NVENC_MEM_TYPE_SYSTEM = 0,
GST_NVENC_MEM_TYPE_GL,
GST_NVENC_MEM_TYPE_CUDA,
/* FIXME: add support D3D11 memory */
} GstNvEncMemType;
typedef struct {
gboolean weighted_prediction;
gint rc_modes;
gboolean custom_vbv_bufsize;
gboolean lookahead;
gboolean temporal_aq;
gint bframes;
} GstNvEncDeviceCaps;
typedef struct {
gint qp_i;
gint qp_p;
gint qp_b;
} GstNvEncQP;
typedef struct {
GstVideoEncoder video_encoder;
/* properties */
GstNvPreset preset_enum;
GUID selected_preset;
GstNvRCMode rate_control_mode;
gint qp_min;
GstNvEncQP qp_min_detail;
gint qp_max;
GstNvEncQP qp_max_detail;
gint qp_const;
GstNvEncQP qp_const_detail;
guint bitrate;
gint gop_size;
guint max_bitrate;
gboolean spatial_aq;
guint aq_strength;
gboolean non_refp;
/* zero reorder delay (consistent naming with x264) */
gboolean zerolatency;
gboolean strict_gop;
gdouble const_quality;
gboolean i_adapt;
GstCudaContext * cuda_ctx;
GstCudaStream * stream;
void * encoder;
NV_ENC_INITIALIZE_PARAMS init_params;
NV_ENC_CONFIG config;
/* the supported input formats */
GValue * input_formats; /* OBJECT LOCK */
GstVideoCodecState *input_state;
gint reconfig; /* ATOMIC */
GstNvEncMemType mem_type;
/* array of allocated input/output buffers (GstNvEncFrameState),
* and hold the ownership of the GstNvEncFrameState. */
GArray *items;
/* (GstNvEncFrameState) available empty items which could be submitted
* to encoder */
GAsyncQueue *available_queue;
/* (GstNvEncFrameState) submitted to encoder but not ready to finish
* (due to bframe or lookhead operation) */
GAsyncQueue *pending_queue;
/* (GstNvEncFrameState) submitted to encoder and ready to finish.
* finished items will go back to available item queue */
GAsyncQueue *bitstream_queue;
/* we spawn a thread that does the (blocking) waits for output buffers
* to become available, so we can continue to feed data to the encoder
* while we wait */
GThread *bitstream_thread;
GstObject *display; /* GstGLDisplay */
GstObject *other_context; /* GstGLContext */
GstObject *gl_context; /* GstGLContext */
GstVideoInfo input_info; /* buffer configuration for buffers sent to NVENC */
GstFlowReturn last_flow; /* ATOMIC */
/* the first frame when bframe was enabled */
GstVideoCodecFrame *first_frame;
GstClockTime dts_offset;
/*< protected >*/
/* device capability dependent properties, set by subclass */
gboolean weighted_pred;
guint vbv_buffersize;
guint rc_lookahead;
gboolean temporal_aq;
guint bframes;
gboolean b_adapt;
} GstNvBaseEnc;
typedef struct {
GstVideoEncoderClass video_encoder_class;
GUID codec_id;
guint cuda_device_id;
GstNvEncDeviceCaps device_caps;
gboolean (*set_src_caps) (GstNvBaseEnc * nvenc,
GstVideoCodecState * state);
gboolean (*set_pic_params) (GstNvBaseEnc * nvenc,
GstVideoCodecFrame * frame,
NV_ENC_PIC_PARAMS * pic_params);
gboolean (*set_encoder_config) (GstNvBaseEnc * nvenc,
GstVideoCodecState * state,
NV_ENC_CONFIG * config);
} GstNvBaseEncClass;
GType gst_nv_base_enc_get_type (void);
GType gst_nv_base_enc_register (const char * codec,
guint device_id,
GstNvEncDeviceCaps * device_caps);
void gst_nv_base_enc_schedule_reconfig (GstNvBaseEnc * nvenc);
#endif /* __GST_NV_BASE_ENC_H_INCLUDED__ */

View file

@ -22,10 +22,7 @@
#endif
#include "gstnvenc.h"
#include "gstnvh264enc.h"
#include "gstnvh265enc.h"
#include <gst/cuda/gstcudautils.h>
#include <gst/cuda/gstcudabufferpool.h>
#include <string.h>
#include <gmodule.h>
@ -305,598 +302,6 @@ NvEncSetIOCudaStreams (void *encoder, NV_ENC_CUSTREAM_PTR input_stream,
return nvenc_api.nvEncSetIOCudaStreams (encoder, input_stream, output_stream);
}
gboolean
gst_nvenc_cmp_guid (GUID g1, GUID g2)
{
return (g1.Data1 == g2.Data1 && g1.Data2 == g2.Data2 && g1.Data3 == g2.Data3
&& g1.Data4[0] == g2.Data4[0] && g1.Data4[1] == g2.Data4[1]
&& g1.Data4[2] == g2.Data4[2] && g1.Data4[3] == g2.Data4[3]
&& g1.Data4[4] == g2.Data4[4] && g1.Data4[5] == g2.Data4[5]
&& g1.Data4[6] == g2.Data4[6] && g1.Data4[7] == g2.Data4[7]);
}
NV_ENC_BUFFER_FORMAT
gst_nvenc_get_nv_buffer_format (GstVideoFormat fmt)
{
switch (fmt) {
case GST_VIDEO_FORMAT_NV12:
return NV_ENC_BUFFER_FORMAT_NV12_PL;
case GST_VIDEO_FORMAT_YV12:
return NV_ENC_BUFFER_FORMAT_YV12_PL;
case GST_VIDEO_FORMAT_I420:
return NV_ENC_BUFFER_FORMAT_IYUV_PL;
case GST_VIDEO_FORMAT_Y444:
return NV_ENC_BUFFER_FORMAT_YUV444_PL;
case GST_VIDEO_FORMAT_P010_10LE:
case GST_VIDEO_FORMAT_P010_10BE:
return NV_ENC_BUFFER_FORMAT_YUV420_10BIT;
case GST_VIDEO_FORMAT_BGRA:
return NV_ENC_BUFFER_FORMAT_ARGB;
case GST_VIDEO_FORMAT_RGBA:
return NV_ENC_BUFFER_FORMAT_ABGR;
case GST_VIDEO_FORMAT_BGR10A2_LE:
return NV_ENC_BUFFER_FORMAT_ARGB10;
case GST_VIDEO_FORMAT_RGB10A2_LE:
return NV_ENC_BUFFER_FORMAT_ABGR10;
case GST_VIDEO_FORMAT_Y444_16LE:
case GST_VIDEO_FORMAT_Y444_16BE:
return NV_ENC_BUFFER_FORMAT_YUV444_10BIT;
case GST_VIDEO_FORMAT_VUYA:
return NV_ENC_BUFFER_FORMAT_AYUV;
default:
break;
}
return NV_ENC_BUFFER_FORMAT_UNDEFINED;
}
typedef struct
{
GstVideoFormat gst_format;
NV_ENC_BUFFER_FORMAT nv_format;
gboolean is_10bit;
gboolean supported;
} GstNvEncFormat;
gboolean
gst_nvenc_get_supported_input_formats (gpointer encoder, GUID codec_id,
GValue ** formats)
{
guint32 i, count = 0;
NV_ENC_BUFFER_FORMAT format_list[64];
GValue val = G_VALUE_INIT;
GValue *ret = NULL;
NV_ENC_CAPS_PARAM param = { 0, };
gint support_yuv444 = 0;
gint support_10bit = 0;
guint num_format = 0;
GstNvEncFormat format_map[] = {
{GST_VIDEO_FORMAT_NV12, NV_ENC_BUFFER_FORMAT_NV12, FALSE, FALSE},
{GST_VIDEO_FORMAT_YV12, NV_ENC_BUFFER_FORMAT_YV12, FALSE, FALSE},
{GST_VIDEO_FORMAT_I420, NV_ENC_BUFFER_FORMAT_IYUV, FALSE, FALSE},
{GST_VIDEO_FORMAT_BGRA, NV_ENC_BUFFER_FORMAT_ARGB, FALSE, FALSE},
{GST_VIDEO_FORMAT_RGBA, NV_ENC_BUFFER_FORMAT_ABGR, FALSE, FALSE},
{GST_VIDEO_FORMAT_Y444, NV_ENC_BUFFER_FORMAT_YUV444, FALSE, FALSE},
{GST_VIDEO_FORMAT_VUYA, NV_ENC_BUFFER_FORMAT_AYUV, FALSE, FALSE},
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
{GST_VIDEO_FORMAT_P010_10LE, NV_ENC_BUFFER_FORMAT_YUV420_10BIT, TRUE,
FALSE},
{GST_VIDEO_FORMAT_BGR10A2_LE, NV_ENC_BUFFER_FORMAT_ARGB10, TRUE,
FALSE},
{GST_VIDEO_FORMAT_RGB10A2_LE, NV_ENC_BUFFER_FORMAT_ABGR10, TRUE,
FALSE},
{GST_VIDEO_FORMAT_Y444_16LE, NV_ENC_BUFFER_FORMAT_YUV444_10BIT, TRUE,
FALSE},
#else
{GST_VIDEO_FORMAT_P010_10BE, NV_ENC_BUFFER_FORMAT_YUV420_10BIT, TRUE,
FALSE},
{GST_VIDEO_FORMAT_Y444_16BE, NV_ENC_BUFFER_FORMAT_YUV444_10BIT, TRUE,
FALSE},
/* FIXME: No 10bits big-endian ARGB10 format is defined */
#endif
};
param.version = gst_nvenc_get_caps_param_version ();
param.capsToQuery = NV_ENC_CAPS_SUPPORT_YUV444_ENCODE;
if (NvEncGetEncodeCaps (encoder,
codec_id, &param, &support_yuv444) != NV_ENC_SUCCESS) {
support_yuv444 = 0;
}
param.capsToQuery = NV_ENC_CAPS_SUPPORT_10BIT_ENCODE;
if (NvEncGetEncodeCaps (encoder,
codec_id, &param, &support_10bit) != NV_ENC_SUCCESS) {
support_10bit = 0;
}
if (NvEncGetInputFormats (encoder,
codec_id, format_list, G_N_ELEMENTS (format_list),
&count) != NV_ENC_SUCCESS || count == 0) {
return FALSE;
}
for (i = 0; i < count; i++) {
GST_INFO ("input format: 0x%08x", format_list[i]);
switch (format_list[i]) {
case NV_ENC_BUFFER_FORMAT_NV12:
case NV_ENC_BUFFER_FORMAT_YV12:
case NV_ENC_BUFFER_FORMAT_IYUV:
case NV_ENC_BUFFER_FORMAT_ARGB:
case NV_ENC_BUFFER_FORMAT_ABGR:
if (!format_map[i].supported) {
format_map[i].supported = TRUE;
num_format++;
}
break;
case NV_ENC_BUFFER_FORMAT_YUV444:
case NV_ENC_BUFFER_FORMAT_AYUV:
if (support_yuv444 && !format_map[i].supported) {
format_map[i].supported = TRUE;
num_format++;
}
break;
case NV_ENC_BUFFER_FORMAT_YUV420_10BIT:
if (support_10bit && !format_map[i].supported) {
format_map[i].supported = TRUE;
num_format++;
}
break;
case NV_ENC_BUFFER_FORMAT_YUV444_10BIT:
if (support_yuv444 && support_10bit && !format_map[i].supported) {
format_map[i].supported = TRUE;
num_format++;
}
break;
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
case NV_ENC_BUFFER_FORMAT_ARGB10:
case NV_ENC_BUFFER_FORMAT_ABGR10:
if (support_10bit && !format_map[i].supported) {
format_map[i].supported = TRUE;
num_format++;
}
break;
#endif
default:
GST_FIXME ("unmapped input format: 0x%08x", format_list[i]);
break;
}
}
if (num_format == 0)
return FALSE;
/* process a second time so we can add formats in the order we want */
g_value_init (&val, G_TYPE_STRING);
ret = g_new0 (GValue, 1);
g_value_init (ret, GST_TYPE_LIST);
for (i = 0; i < G_N_ELEMENTS (format_map); i++) {
if (!format_map[i].supported)
continue;
g_value_set_static_string (&val,
gst_video_format_to_string (format_map[i].gst_format));
gst_value_list_append_value (ret, &val);
}
g_value_unset (&val);
*formats = ret;
return TRUE;
}
GValue *
gst_nvenc_get_interlace_modes (gpointer enc, GUID codec_id)
{
NV_ENC_CAPS_PARAM caps_param = { 0, };
GValue *list;
GValue val = G_VALUE_INIT;
gint interlace_modes = 0;
caps_param.version = gst_nvenc_get_caps_param_version ();
caps_param.capsToQuery = NV_ENC_CAPS_SUPPORT_FIELD_ENCODING;
if (NvEncGetEncodeCaps (enc, codec_id, &caps_param,
&interlace_modes) != NV_ENC_SUCCESS)
interlace_modes = 0;
list = g_new0 (GValue, 1);
g_value_init (list, GST_TYPE_LIST);
g_value_init (&val, G_TYPE_STRING);
g_value_set_static_string (&val, "progressive");
gst_value_list_append_value (list, &val);
if (interlace_modes == 0)
return list;
if (interlace_modes >= 1) {
g_value_set_static_string (&val, "interleaved");
gst_value_list_append_value (list, &val);
g_value_set_static_string (&val, "mixed");
gst_value_list_append_value (list, &val);
g_value_unset (&val);
}
/* TODO: figure out what nvenc frame based interlacing means in gst terms */
return list;
}
typedef struct
{
const gchar *gst_profile;
const GUID nv_profile;
const GUID codec_id;
const gboolean need_yuv444;
const gboolean need_10bit;
gboolean supported;
} GstNvEncCodecProfile;
GValue *
gst_nvenc_get_supported_codec_profiles (gpointer enc, GUID codec_id)
{
NVENCSTATUS nv_ret;
GUID profile_guids[64];
GValue *ret;
GValue val = G_VALUE_INIT;
guint i, j, n, n_profiles;
NV_ENC_CAPS_PARAM param = { 0, };
gint support_yuv444 = 0;
gint support_10bit = 0;
GstNvEncCodecProfile profiles[] = {
/* avc profiles */
{"main", NV_ENC_H264_PROFILE_MAIN_GUID, NV_ENC_CODEC_H264_GUID, FALSE,
FALSE, FALSE},
{"high", NV_ENC_H264_PROFILE_HIGH_GUID, NV_ENC_CODEC_H264_GUID, FALSE,
FALSE, FALSE},
{"high-4:4:4", NV_ENC_H264_PROFILE_HIGH_444_GUID, NV_ENC_CODEC_H264_GUID,
TRUE, FALSE, FALSE},
/* put baseline to last since it does not support bframe */
{"baseline", NV_ENC_H264_PROFILE_BASELINE_GUID, NV_ENC_CODEC_H264_GUID,
FALSE, FALSE, FALSE},
{"constrained-baseline", NV_ENC_H264_PROFILE_BASELINE_GUID,
NV_ENC_CODEC_H264_GUID,
FALSE, FALSE, FALSE},
/* hevc profiles */
{"main", NV_ENC_HEVC_PROFILE_MAIN_GUID, NV_ENC_CODEC_HEVC_GUID, FALSE,
FALSE, FALSE},
{"main-10", NV_ENC_HEVC_PROFILE_MAIN10_GUID, NV_ENC_CODEC_HEVC_GUID, FALSE,
TRUE, FALSE},
{"main-444", NV_ENC_HEVC_PROFILE_FREXT_GUID, NV_ENC_CODEC_HEVC_GUID, TRUE,
FALSE, FALSE},
#if 0
/* FIXME: seems to unsupported format */
{"main-444-10", NV_ENC_HEVC_PROFILE_FREXT_GUID, FALSE}
#endif
};
param.version = gst_nvenc_get_caps_param_version ();
param.capsToQuery = NV_ENC_CAPS_SUPPORT_YUV444_ENCODE;
if (NvEncGetEncodeCaps (enc,
codec_id, &param, &support_yuv444) != NV_ENC_SUCCESS) {
support_yuv444 = 0;
}
param.capsToQuery = NV_ENC_CAPS_SUPPORT_10BIT_ENCODE;
if (NvEncGetEncodeCaps (enc,
codec_id, &param, &support_10bit) != NV_ENC_SUCCESS) {
support_10bit = 0;
}
nv_ret = NvEncGetEncodeProfileGUIDCount (enc, codec_id, &n);
if (nv_ret != NV_ENC_SUCCESS)
return NULL;
nv_ret = NvEncGetEncodeProfileGUIDs (enc,
codec_id, profile_guids, G_N_ELEMENTS (profile_guids), &n);
if (nv_ret != NV_ENC_SUCCESS)
return NULL;
n_profiles = 0;
for (i = 0; i < n; i++) {
for (j = 0; j < G_N_ELEMENTS (profiles); j++) {
if (profiles[j].supported == FALSE &&
gst_nvenc_cmp_guid (profile_guids[i], profiles[j].nv_profile) &&
gst_nvenc_cmp_guid (codec_id, profiles[j].codec_id)) {
if (profiles[j].need_yuv444 && !support_yuv444)
continue;
if (profiles[j].need_10bit && !support_10bit)
continue;
profiles[j].supported = TRUE;
n_profiles++;
}
}
}
if (n_profiles == 0)
return NULL;
ret = g_new0 (GValue, 1);
g_value_init (ret, GST_TYPE_LIST);
g_value_init (&val, G_TYPE_STRING);
for (i = 0; i < G_N_ELEMENTS (profiles); i++) {
if (!profiles[i].supported)
continue;
g_value_set_static_string (&val, profiles[i].gst_profile);
gst_value_list_append_value (ret, &val);
}
g_value_unset (&val);
return ret;
}
#define DEBUG_DEVICE_CAPS(d,c,caps,s) \
GST_DEBUG ("[device-%d %s] %s: %s", \
d, c, caps, s ? "supported" : "not supported");
#define ERROR_DETAILS "codec %s, device %i, error code %i"
static void
gst_nv_enc_register (GstPlugin * plugin, GUID codec_id, const gchar * codec,
guint rank, gint device_index, CUcontext cuda_ctx)
{
{
GValue *formats = NULL;
GValue *profiles;
GValue *interlace_modes;
gpointer enc;
NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS params = { 0, };
NV_ENC_CAPS_PARAM caps_param = { 0, };
GUID guids[16];
guint32 count;
gint max_width = 0;
gint max_height = 0;
gint min_width = 16;
gint min_height = 16;
GstCaps *sink_templ = NULL;
GstCaps *src_templ = NULL;
gchar *name;
gint j;
GstNvEncDeviceCaps device_caps = { 0, };
NVENCSTATUS status;
CUresult cu_res;
params.version = gst_nvenc_get_open_encode_session_ex_params_version ();
params.apiVersion = gst_nvenc_get_api_version ();
params.device = cuda_ctx;
params.deviceType = NV_ENC_DEVICE_TYPE_CUDA;
if ((cu_res = CuCtxPushCurrent (cuda_ctx)) != CUDA_SUCCESS) {
GST_ERROR ("CuCtxPushCurrent failed: " ERROR_DETAILS, codec,
device_index, cu_res);
goto done;
}
if ((status = NvEncOpenEncodeSessionEx (&params, &enc)) != NV_ENC_SUCCESS) {
CuCtxPopCurrent (NULL);
GST_ERROR ("NvEncOpenEncodeSessionEx failed: " ERROR_DETAILS, codec,
device_index, status);
goto done;
}
if ((status = NvEncGetEncodeGUIDs (enc, guids, G_N_ELEMENTS (guids),
&count)) != NV_ENC_SUCCESS) {
GST_ERROR ("NvEncGetEncodeGUIDs failed: " ERROR_DETAILS, codec,
device_index, status);
goto enc_free;
}
for (j = 0; j < count; j++) {
if (gst_nvenc_cmp_guid (guids[j], codec_id))
break;
}
if (j == count)
goto enc_free;
if (!gst_nvenc_get_supported_input_formats (enc, codec_id, &formats))
goto enc_free;
profiles = gst_nvenc_get_supported_codec_profiles (enc, codec_id);
if (!profiles)
goto free_format;
caps_param.version = gst_nvenc_get_caps_param_version ();
caps_param.capsToQuery = NV_ENC_CAPS_WIDTH_MAX;
if ((status = NvEncGetEncodeCaps (enc,
codec_id, &caps_param, &max_width)) != NV_ENC_SUCCESS) {
max_width = 4096;
GST_WARNING ("could not query max width, setting as %i: "
ERROR_DETAILS, max_width, codec, device_index, status);
} else if (max_width < 4096) {
GST_WARNING ("max width %d is less than expected value", max_width);
max_width = 4096;
}
caps_param.capsToQuery = NV_ENC_CAPS_HEIGHT_MAX;
if ((status = NvEncGetEncodeCaps (enc,
codec_id, &caps_param, &max_height)) != NV_ENC_SUCCESS) {
GST_WARNING ("could not query max height, setting as %i: "
ERROR_DETAILS, max_height, codec, device_index, status);
max_height = 4096;
} else if (max_height < 4096) {
GST_WARNING ("max height %d is less than expected value", max_height);
max_height = 4096;
}
caps_param.capsToQuery = NV_ENC_CAPS_WIDTH_MIN;
if ((status = NvEncGetEncodeCaps (enc,
codec_id, &caps_param, &min_width)) != NV_ENC_SUCCESS) {
GST_WARNING ("could not query min width, setting as %i: "
ERROR_DETAILS, min_width, codec, device_index, status);
min_width = 16;
}
caps_param.capsToQuery = NV_ENC_CAPS_HEIGHT_MIN;
if ((status = NvEncGetEncodeCaps (enc,
codec_id, &caps_param, &min_height)) != NV_ENC_SUCCESS) {
GST_WARNING ("could not query min height, setting as %i: "
ERROR_DETAILS, min_height, codec, device_index, status);
min_height = 16;
}
caps_param.capsToQuery = NV_ENC_CAPS_SUPPORTED_RATECONTROL_MODES;
if (NvEncGetEncodeCaps (enc, codec_id, &caps_param,
&device_caps.rc_modes) != NV_ENC_SUCCESS) {
device_caps.rc_modes = 0;
} else {
GST_DEBUG ("[device-%d %s] rate control modes: 0x%x",
device_index, codec, device_caps.rc_modes);
#define IS_SUPPORTED_RC(rc_modes,mode) \
((((rc_modes) & (mode)) == mode) ? "supported" : "not supported")
GST_DEBUG ("\tconst-qp: %s",
IS_SUPPORTED_RC (device_caps.rc_modes, NV_ENC_PARAMS_RC_CONSTQP));
GST_DEBUG ("\tvbr: %s",
IS_SUPPORTED_RC (device_caps.rc_modes, NV_ENC_PARAMS_RC_VBR));
GST_DEBUG ("\tcbr: %s",
IS_SUPPORTED_RC (device_caps.rc_modes, NV_ENC_PARAMS_RC_CBR));
GST_DEBUG ("\tcbr-lowdelay-hq: %s",
IS_SUPPORTED_RC (device_caps.rc_modes,
NV_ENC_PARAMS_RC_CBR_LOWDELAY_HQ));
GST_DEBUG ("\tcbr-hq: %s",
IS_SUPPORTED_RC (device_caps.rc_modes, NV_ENC_PARAMS_RC_CBR_HQ));
GST_DEBUG ("\tvbr-hq: %s",
IS_SUPPORTED_RC (device_caps.rc_modes, NV_ENC_PARAMS_RC_VBR_HQ));
GST_DEBUG ("\tvbr-minqp: %s (deprecated)",
IS_SUPPORTED_RC (device_caps.rc_modes, NV_ENC_PARAMS_RC_VBR_MINQP));
#undef IS_SUPPORTED_RC
}
caps_param.capsToQuery = NV_ENC_CAPS_SUPPORT_WEIGHTED_PREDICTION;
if (NvEncGetEncodeCaps (enc, codec_id, &caps_param,
&device_caps.weighted_prediction) != NV_ENC_SUCCESS) {
device_caps.weighted_prediction = FALSE;
}
caps_param.capsToQuery = NV_ENC_CAPS_SUPPORT_CUSTOM_VBV_BUF_SIZE;
if (NvEncGetEncodeCaps (enc, codec_id, &caps_param,
&device_caps.custom_vbv_bufsize) != NV_ENC_SUCCESS) {
device_caps.custom_vbv_bufsize = FALSE;
}
caps_param.capsToQuery = NV_ENC_CAPS_SUPPORT_LOOKAHEAD;
if (NvEncGetEncodeCaps (enc,
codec_id, &caps_param, &device_caps.lookahead) != NV_ENC_SUCCESS) {
device_caps.lookahead = FALSE;
}
caps_param.capsToQuery = NV_ENC_CAPS_SUPPORT_TEMPORAL_AQ;
if (NvEncGetEncodeCaps (enc, codec_id, &caps_param,
&device_caps.temporal_aq) != NV_ENC_SUCCESS) {
device_caps.temporal_aq = FALSE;
}
caps_param.capsToQuery = NV_ENC_CAPS_NUM_MAX_BFRAMES;
if (NvEncGetEncodeCaps (enc, codec_id, &caps_param,
&device_caps.bframes) != NV_ENC_SUCCESS) {
device_caps.bframes = 0;
}
DEBUG_DEVICE_CAPS (device_index,
codec, "weighted prediction", device_caps.weighted_prediction);
DEBUG_DEVICE_CAPS (device_index, codec, "custom vbv-buffer-size",
device_caps.custom_vbv_bufsize);
DEBUG_DEVICE_CAPS (device_index, codec, "rc-loockahead",
device_caps.lookahead);
DEBUG_DEVICE_CAPS (device_index, codec, "temporal adaptive quantization",
device_caps.temporal_aq);
GST_DEBUG ("[device-%d %s] max bframes: %d", device_index, codec,
device_caps.bframes);
interlace_modes = gst_nvenc_get_interlace_modes (enc, codec_id);
sink_templ = gst_caps_new_empty_simple ("video/x-raw");
gst_caps_set_value (sink_templ, "format", formats);
gst_caps_set_simple (sink_templ,
"width", GST_TYPE_INT_RANGE, min_width, max_width,
"height", GST_TYPE_INT_RANGE, min_height, max_height,
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
if (interlace_modes) {
gst_caps_set_value (sink_templ, "interlace-mode", interlace_modes);
g_value_unset (interlace_modes);
g_free (interlace_modes);
}
{
GstCaps *cuda_caps = gst_caps_copy (sink_templ);
#ifdef HAVE_CUDA_GST_GL
GstCaps *gl_caps = gst_caps_copy (sink_templ);
gst_caps_set_features_simple (gl_caps,
gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY));
gst_caps_append (sink_templ, gl_caps);
#endif
gst_caps_set_features_simple (cuda_caps,
gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_CUDA_MEMORY));
gst_caps_append (sink_templ, cuda_caps);
}
name = g_strdup_printf ("video/x-%s", codec);
src_templ = gst_caps_new_simple (name,
"width", GST_TYPE_INT_RANGE, min_width, max_width,
"height", GST_TYPE_INT_RANGE, min_height, max_height,
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1,
"stream-format", G_TYPE_STRING, "byte-stream",
"alignment", G_TYPE_STRING, "au", NULL);
gst_caps_set_value (src_templ, "profile", profiles);
g_free (name);
GST_DEBUG ("sink template caps %" GST_PTR_FORMAT, sink_templ);
GST_DEBUG ("src template caps %" GST_PTR_FORMAT, src_templ);
g_value_unset (profiles);
g_free (profiles);
free_format:
if (formats) {
g_value_unset (formats);
g_free (formats);
}
/* fall-through */
enc_free:
NvEncDestroyEncoder (enc);
CuCtxPopCurrent (NULL);
/* fall-through */
done:
if (sink_templ && src_templ) {
if (gst_nvenc_cmp_guid (codec_id, NV_ENC_CODEC_H264_GUID)) {
gst_nv_h264_enc_register (plugin, device_index, rank, sink_templ,
src_templ, &device_caps);
} else if (gst_nvenc_cmp_guid (codec_id, NV_ENC_CODEC_HEVC_GUID)) {
gst_nv_h265_enc_register (plugin, device_index, rank, sink_templ,
src_templ, &device_caps);
} else {
g_assert_not_reached ();
}
}
gst_clear_caps (&sink_templ);
gst_clear_caps (&src_templ);
}
}
typedef struct
{
gint major;
@ -1028,16 +433,6 @@ gst_nvenc_load_library (guint * api_major_ver, guint * api_minor_ver)
return ret == NV_ENC_SUCCESS;
}
void
gst_nvenc_plugin_init (GstPlugin * plugin, guint device_index,
CUcontext cuda_ctx)
{
gst_nv_enc_register (plugin, NV_ENC_CODEC_H264_GUID,
"h264", GST_RANK_PRIMARY * 2, device_index, cuda_ctx);
gst_nv_enc_register (plugin, NV_ENC_CODEC_HEVC_GUID,
"h265", GST_RANK_PRIMARY * 2, device_index, cuda_ctx);
}
/* To verify things when updating SDK */
#define USE_STATIC_SDK_VER 0

View file

@ -28,24 +28,6 @@
G_BEGIN_DECLS
gboolean gst_nvenc_cmp_guid (GUID g1, GUID g2);
NV_ENC_BUFFER_FORMAT gst_nvenc_get_nv_buffer_format (GstVideoFormat fmt);
gboolean gst_nvenc_get_supported_input_formats (gpointer encoder,
GUID codec_id,
GValue ** formats);
GValue * gst_nvenc_get_interlace_modes (gpointer enc,
GUID codec_id);
GValue * gst_nvenc_get_supported_codec_profiles (gpointer enc,
GUID codec_id);
void gst_nvenc_plugin_init (GstPlugin * plugin,
guint device_index,
CUcontext cuda_ctx);
guint32 gst_nvenc_get_api_version (void);
guint32 gst_nvenc_get_caps_param_version (void);

View file

@ -1,748 +0,0 @@
/* GStreamer NVENC plugin
* Copyright (C) 2015 Centricular Ltd
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstnvh264enc.h"
#include <gst/pbutils/codec-utils.h>
#include <string.h>
typedef struct
{
GstCaps *sink_caps;
GstCaps *src_caps;
gboolean is_default;
} GstNvH264EncClassData;
GST_DEBUG_CATEGORY_STATIC (gst_nv_h264_enc_debug);
#define GST_CAT_DEFAULT gst_nv_h264_enc_debug
static GstElementClass *parent_class = NULL;
enum
{
PROP_0,
PROP_AUD,
PROP_WEIGHTED_PRED,
PROP_VBV_BUFFER_SIZE,
PROP_RC_LOOKAHEAD,
PROP_TEMPORAL_AQ,
PROP_BFRAMES,
PROP_B_ADAPT,
};
#define DEFAULT_AUD TRUE
#define DEFAULT_WEIGHTED_PRED FALSE
#define DEFAULT_VBV_BUFFER_SIZE 0
#define DEFAULT_RC_LOOKAHEAD 0
#define DEFAULT_TEMPORAL_AQ FALSE
#define DEFAULT_BFRAMES 0
#define DEFAULT_B_ADAPT FALSE
/* captured using RTX 2080 */
#define DOCUMENTATION_SINK_CAPS_COMM \
"format = (string) { NV12, YV12, I420, BGRA, RGBA, Y444, VUYA }, " \
"width = (int) [ 145, 4096 ], " \
"height = (int) [ 49, 4096 ], " \
"framerate = " GST_VIDEO_FPS_RANGE ", " \
"interlace-mode = (string) { progressive } "
#define DOCUMENTATION_SINK_CAPS \
"video/x-raw, " DOCUMENTATION_SINK_CAPS_COMM "; " \
"video/x-raw(memory:GLMemory), " DOCUMENTATION_SINK_CAPS_COMM "; " \
"video/x-raw(memory:CUDAMemory), " DOCUMENTATION_SINK_CAPS_COMM
#define DOCUMENTATION_SRC_CAPS \
"video/x-h264, " \
"width = (int) [ 145, 4096 ], " \
"height = (int) [ 49, 4096 ], " \
"framerate = " GST_VIDEO_FPS_RANGE ", " \
"stream-format = (string) byte-stream, " \
"alignment = (string) au, " \
"profile = (string) { main, high, high-4:4:4, baseline, constrained-baseline }"
static gboolean gst_nv_h264_enc_open (GstVideoEncoder * enc);
static gboolean gst_nv_h264_enc_close (GstVideoEncoder * enc);
static gboolean gst_nv_h264_enc_set_src_caps (GstNvBaseEnc * nvenc,
GstVideoCodecState * state);
static gboolean gst_nv_h264_enc_set_encoder_config (GstNvBaseEnc * nvenc,
GstVideoCodecState * state, NV_ENC_CONFIG * config);
static gboolean gst_nv_h264_enc_set_pic_params (GstNvBaseEnc * nvenc,
GstVideoCodecFrame * frame, NV_ENC_PIC_PARAMS * pic_params);
static void gst_nv_h264_enc_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_nv_h264_enc_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_nv_h264_enc_finalize (GObject * obj);
static void
gst_nv_h264_enc_class_init (GstNvH264EncClass * klass, gpointer data)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstVideoEncoderClass *videoenc_class = GST_VIDEO_ENCODER_CLASS (klass);
GstNvBaseEncClass *nvenc_class = GST_NV_BASE_ENC_CLASS (klass);
GstNvEncDeviceCaps *device_caps = &nvenc_class->device_caps;
GstNvH264EncClassData *cdata = (GstNvH264EncClassData *) data;
gchar *long_name;
GstPadTemplate *pad_templ;
GstCaps *doc_caps;
parent_class = g_type_class_peek_parent (klass);
gobject_class->set_property = gst_nv_h264_enc_set_property;
gobject_class->get_property = gst_nv_h264_enc_get_property;
gobject_class->finalize = gst_nv_h264_enc_finalize;
videoenc_class->open = GST_DEBUG_FUNCPTR (gst_nv_h264_enc_open);
videoenc_class->close = GST_DEBUG_FUNCPTR (gst_nv_h264_enc_close);
nvenc_class->codec_id = NV_ENC_CODEC_H264_GUID;
nvenc_class->set_encoder_config = gst_nv_h264_enc_set_encoder_config;
nvenc_class->set_src_caps = gst_nv_h264_enc_set_src_caps;
nvenc_class->set_pic_params = gst_nv_h264_enc_set_pic_params;
/**
* GstNvH264Enc:aud:
*
* Use AU (Access Unit) delimiter
*
* Since: 1.18
*/
g_object_class_install_property (gobject_class, PROP_AUD,
g_param_spec_boolean ("aud", "AUD",
"Use AU (Access Unit) delimiter", DEFAULT_AUD,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_PLAYING |
G_PARAM_STATIC_STRINGS));
if (device_caps->weighted_prediction) {
/**
* GstNvH264Enc:weighted-pred:
*
* Weighted Prediction
*
* Since: 1.18
*/
g_object_class_install_property (gobject_class, PROP_WEIGHTED_PRED,
g_param_spec_boolean ("weighted-pred", "Weighted Pred",
"Weighted Prediction", DEFAULT_WEIGHTED_PRED,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_PLAYING |
GST_PARAM_CONDITIONALLY_AVAILABLE | G_PARAM_STATIC_STRINGS));
}
if (device_caps->custom_vbv_bufsize) {
/**
* GstNvH264Enc:vbv-buffer-size:
*
* VBV(HRD) Buffer Size in kbits (0 = NVENC default)
*
* Since: 1.18
*/
g_object_class_install_property (gobject_class,
PROP_VBV_BUFFER_SIZE,
g_param_spec_uint ("vbv-buffer-size", "VBV Buffer Size",
"VBV(HRD) Buffer Size in kbits (0 = NVENC default)",
0, G_MAXUINT, DEFAULT_VBV_BUFFER_SIZE,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
GST_PARAM_CONDITIONALLY_AVAILABLE | G_PARAM_STATIC_STRINGS));
}
if (device_caps->lookahead) {
/**
* GstNvH264Enc:rc-lookahead:
*
* Number of frames for frame type lookahead
*
* Since: 1.18
*/
g_object_class_install_property (gobject_class, PROP_RC_LOOKAHEAD,
g_param_spec_uint ("rc-lookahead", "Rate Control Lookahead",
"Number of frames for frame type lookahead",
0, 32, DEFAULT_RC_LOOKAHEAD,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
GST_PARAM_CONDITIONALLY_AVAILABLE | G_PARAM_STATIC_STRINGS));
}
if (device_caps->temporal_aq) {
/**
* GstNvH264Enc:temporal-aq:
*
* Temporal Adaptive Quantization
*
* Since: 1.18
*/
g_object_class_install_property (gobject_class, PROP_TEMPORAL_AQ,
g_param_spec_boolean ("temporal-aq", "Temporal AQ",
"Temporal Adaptive Quantization", DEFAULT_TEMPORAL_AQ,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_PLAYING |
GST_PARAM_CONDITIONALLY_AVAILABLE | G_PARAM_STATIC_STRINGS));
}
if (device_caps->bframes > 0) {
/**
* GstNvH264Enc:bframes:
*
* Number of B-frames between I and P
*
* Since: 1.18
*/
g_object_class_install_property (gobject_class, PROP_BFRAMES,
g_param_spec_uint ("bframes", "B-Frames",
"Number of B-frames between I and P", 0, device_caps->bframes,
DEFAULT_BFRAMES,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
GST_PARAM_CONDITIONALLY_AVAILABLE | G_PARAM_STATIC_STRINGS));
/**
* GstNvH264Enc:b-adapt:
*
* Enable adaptive B-frame insert when lookahead is enabled
*
* Since: 1.18
*/
g_object_class_install_property (gobject_class, PROP_B_ADAPT,
g_param_spec_boolean ("b-adapt", "B Adapt",
"Enable adaptive B-frame insert when lookahead is enabled",
DEFAULT_B_ADAPT,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
GST_PARAM_CONDITIONALLY_AVAILABLE | G_PARAM_STATIC_STRINGS));
}
if (cdata->is_default)
long_name = g_strdup ("NVENC H.264 Video Encoder");
else
long_name = g_strdup_printf ("NVENC H.264 Video Encoder with device %d",
nvenc_class->cuda_device_id);
gst_element_class_set_metadata (element_class, long_name,
"Codec/Encoder/Video/Hardware",
"Encode H.264 video streams using NVIDIA's hardware-accelerated NVENC encoder API",
"Tim-Philipp Müller <tim@centricular.com>, "
"Matthew Waters <matthew@centricular.com>, "
"Seungha Yang <seungha.yang@navercorp.com>");
g_free (long_name);
GST_DEBUG_CATEGORY_INIT (gst_nv_h264_enc_debug,
"nvh264enc", 0, "Nvidia H.264 encoder");
pad_templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
cdata->sink_caps);
doc_caps = gst_caps_from_string (DOCUMENTATION_SINK_CAPS);
gst_pad_template_set_documentation_caps (pad_templ, doc_caps);
gst_caps_unref (doc_caps);
gst_element_class_add_pad_template (element_class, pad_templ);
pad_templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
cdata->src_caps);
doc_caps = gst_caps_from_string (DOCUMENTATION_SRC_CAPS);
gst_pad_template_set_documentation_caps (pad_templ, doc_caps);
gst_caps_unref (doc_caps);
gst_element_class_add_pad_template (element_class, pad_templ);
gst_caps_unref (cdata->sink_caps);
gst_caps_unref (cdata->src_caps);
g_free (cdata);
}
static void
gst_nv_h264_enc_init (GstNvH264Enc * nvenc)
{
GstNvBaseEnc *baseenc = GST_NV_BASE_ENC (nvenc);
nvenc->aud = DEFAULT_AUD;
/* device capability dependent properties */
baseenc->weighted_pred = DEFAULT_WEIGHTED_PRED;
baseenc->vbv_buffersize = DEFAULT_VBV_BUFFER_SIZE;
baseenc->rc_lookahead = DEFAULT_RC_LOOKAHEAD;
baseenc->temporal_aq = DEFAULT_TEMPORAL_AQ;
baseenc->bframes = DEFAULT_BFRAMES;
baseenc->b_adapt = DEFAULT_B_ADAPT;
}
static void
gst_nv_h264_enc_finalize (GObject * obj)
{
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
static gboolean
gst_nv_h264_enc_open (GstVideoEncoder * enc)
{
GstNvBaseEnc *base = GST_NV_BASE_ENC (enc);
if (!GST_VIDEO_ENCODER_CLASS (parent_class)->open (enc))
return FALSE;
/* Check if H.264 is supported */
{
uint32_t i, num = 0;
GUID guids[16];
NvEncGetEncodeGUIDs (base->encoder, guids, G_N_ELEMENTS (guids), &num);
for (i = 0; i < num; ++i) {
if (gst_nvenc_cmp_guid (guids[i], NV_ENC_CODEC_H264_GUID))
break;
}
GST_INFO_OBJECT (enc, "H.264 encoding %ssupported", (i == num) ? "un" : "");
if (i == num) {
gst_nv_h264_enc_close (enc);
return FALSE;
}
}
return TRUE;
}
static gboolean
gst_nv_h264_enc_close (GstVideoEncoder * enc)
{
return GST_VIDEO_ENCODER_CLASS (parent_class)->close (enc);
}
static gboolean
gst_nv_h264_enc_set_profile_and_level (GstNvH264Enc * nvenc, GstCaps * caps)
{
#define N_BYTES_SPS 128
guint8 sps[N_BYTES_SPS];
NV_ENC_SEQUENCE_PARAM_PAYLOAD spp = { 0, };
GstStructure *s;
const gchar *profile;
GstCaps *allowed_caps;
GstStructure *s2;
const gchar *allowed_profile;
NVENCSTATUS nv_ret;
guint32 seq_size;
spp.version = gst_nvenc_get_sequence_param_payload_version ();
spp.inBufferSize = N_BYTES_SPS;
spp.spsId = 0;
spp.ppsId = 0;
spp.spsppsBuffer = &sps;
spp.outSPSPPSPayloadSize = &seq_size;
nv_ret = NvEncGetSequenceParams (GST_NV_BASE_ENC (nvenc)->encoder, &spp);
if (nv_ret != NV_ENC_SUCCESS) {
GST_ELEMENT_ERROR (nvenc, STREAM, ENCODE, ("Encode header failed."),
("NvEncGetSequenceParams return code=%d", nv_ret));
return FALSE;
}
if (seq_size < 8) {
GST_ELEMENT_ERROR (nvenc, STREAM, ENCODE, ("Encode header failed."),
("NvEncGetSequenceParams returned incomplete data"));
return FALSE;
}
/* skip nal header and identifier */
gst_codec_utils_h264_caps_set_level_and_profile (caps, &sps[5], 3);
/* Constrained baseline is a strict subset of baseline. If downstream
* wanted baseline and we produced constrained baseline, we can just
* set the profile to baseline in the caps to make negotiation happy.
* Same goes for baseline as subset of main profile and main as a subset
* of high profile.
*/
s = gst_caps_get_structure (caps, 0);
profile = gst_structure_get_string (s, "profile");
allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (nvenc));
if (allowed_caps == NULL)
goto no_peer;
if (!gst_caps_can_intersect (allowed_caps, caps)) {
allowed_caps = gst_caps_make_writable (allowed_caps);
allowed_caps = gst_caps_truncate (allowed_caps);
s2 = gst_caps_get_structure (allowed_caps, 0);
gst_structure_fixate_field_string (s2, "profile", profile);
allowed_profile = gst_structure_get_string (s2, "profile");
if (!strcmp (allowed_profile, "high")) {
if (!strcmp (profile, "constrained-baseline")
|| !strcmp (profile, "baseline") || !strcmp (profile, "main")) {
gst_structure_set (s, "profile", G_TYPE_STRING, "high", NULL);
GST_INFO_OBJECT (nvenc, "downstream requested high profile, but "
"encoder will now output %s profile (which is a subset), due "
"to how it's been configured", profile);
}
} else if (!strcmp (allowed_profile, "main")) {
if (!strcmp (profile, "constrained-baseline")
|| !strcmp (profile, "baseline")) {
gst_structure_set (s, "profile", G_TYPE_STRING, "main", NULL);
GST_INFO_OBJECT (nvenc, "downstream requested main profile, but "
"encoder will now output %s profile (which is a subset), due "
"to how it's been configured", profile);
}
} else if (!strcmp (allowed_profile, "baseline")) {
if (!strcmp (profile, "constrained-baseline"))
gst_structure_set (s, "profile", G_TYPE_STRING, "baseline", NULL);
}
}
gst_caps_unref (allowed_caps);
no_peer:
return TRUE;
#undef N_BYTES_SPS
}
static gboolean
gst_nv_h264_enc_set_src_caps (GstNvBaseEnc * nvenc, GstVideoCodecState * state)
{
GstNvH264Enc *h264enc = (GstNvH264Enc *) nvenc;
GstVideoCodecState *out_state;
GstStructure *s;
GstCaps *out_caps;
out_caps = gst_caps_new_empty_simple ("video/x-h264");
s = gst_caps_get_structure (out_caps, 0);
/* TODO: add support for avc format as well */
gst_structure_set (s, "stream-format", G_TYPE_STRING, "byte-stream",
"alignment", G_TYPE_STRING, "au", NULL);
if (!gst_nv_h264_enc_set_profile_and_level (h264enc, out_caps)) {
gst_caps_unref (out_caps);
return FALSE;
}
out_state = gst_video_encoder_set_output_state (GST_VIDEO_ENCODER (nvenc),
out_caps, state);
GST_INFO_OBJECT (nvenc, "output caps: %" GST_PTR_FORMAT, out_state->caps);
/* encoder will keep it around for us */
gst_video_codec_state_unref (out_state);
/* TODO: would be nice to also send some tags with the codec name */
return TRUE;
}
static gboolean
gst_nv_h264_enc_set_encoder_config (GstNvBaseEnc * nvenc,
GstVideoCodecState * state, NV_ENC_CONFIG * config)
{
GstNvH264Enc *h264enc = (GstNvH264Enc *) nvenc;
GstCaps *allowed_caps, *template_caps;
GUID selected_profile = NV_ENC_CODEC_PROFILE_AUTOSELECT_GUID;
int level_idc = NV_ENC_LEVEL_AUTOSELECT;
GstVideoInfo *info = &state->info;
NV_ENC_CONFIG_H264 *h264_config = &config->encodeCodecConfig.h264Config;
NV_ENC_CONFIG_H264_VUI_PARAMETERS *vui = &h264_config->h264VUIParameters;
template_caps =
gst_pad_get_pad_template_caps (GST_VIDEO_ENCODER_SRC_PAD (h264enc));
allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (h264enc));
if (template_caps == allowed_caps) {
GST_INFO_OBJECT (h264enc, "downstream has ANY caps");
} else if (allowed_caps) {
GstStructure *s;
const gchar *profile;
const gchar *level;
if (gst_caps_is_empty (allowed_caps)) {
gst_caps_unref (allowed_caps);
gst_caps_unref (template_caps);
return FALSE;
}
allowed_caps = gst_caps_make_writable (allowed_caps);
allowed_caps = gst_caps_fixate (allowed_caps);
s = gst_caps_get_structure (allowed_caps, 0);
profile = gst_structure_get_string (s, "profile");
if (profile) {
if (!strcmp (profile, "baseline")
|| !strcmp (profile, "constrained-baseline")) {
selected_profile = NV_ENC_H264_PROFILE_BASELINE_GUID;
} else if (g_str_has_prefix (profile, "high-4:4:4")) {
selected_profile = NV_ENC_H264_PROFILE_HIGH_444_GUID;
} else if (g_str_has_prefix (profile, "high-10")) {
g_assert_not_reached ();
} else if (g_str_has_prefix (profile, "high-4:2:2")) {
g_assert_not_reached ();
} else if (g_str_has_prefix (profile, "high")) {
selected_profile = NV_ENC_H264_PROFILE_HIGH_GUID;
} else if (g_str_has_prefix (profile, "main")) {
selected_profile = NV_ENC_H264_PROFILE_MAIN_GUID;
} else {
g_assert_not_reached ();
}
}
level = gst_structure_get_string (s, "level");
if (level)
/* matches values stored in NV_ENC_LEVEL */
level_idc = gst_codec_utils_h264_get_level_idc (level);
gst_caps_unref (allowed_caps);
}
gst_caps_unref (template_caps);
/* override some defaults */
GST_LOG_OBJECT (h264enc, "setting parameters");
config->profileGUID = selected_profile;
h264_config->level = level_idc;
h264_config->chromaFormatIDC = 1;
if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_Y444 ||
GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_VUYA) {
GST_DEBUG_OBJECT (h264enc, "have Y444 input, setting config accordingly");
config->profileGUID = NV_ENC_H264_PROFILE_HIGH_444_GUID;
h264_config->chromaFormatIDC = 3;
}
h264_config->idrPeriod = config->gopLength;
h264_config->outputAUD = h264enc->aud;
vui->videoSignalTypePresentFlag = 1;
/* NOTE: vui::video_format represents the video format before
* being encoded such as PAL, NTSC, SECAM, and MAC. That's not much informal
* and can be inferred with resolution and framerate by any application.
*/
/* Unspecified video format (5) */
vui->videoFormat = 5;
if (info->colorimetry.range == GST_VIDEO_COLOR_RANGE_0_255) {
vui->videoFullRangeFlag = 1;
} else {
vui->videoFullRangeFlag = 0;
}
vui->colourDescriptionPresentFlag = 1;
vui->colourMatrix = gst_video_color_matrix_to_iso (info->colorimetry.matrix);
vui->colourPrimaries =
gst_video_color_primaries_to_iso (info->colorimetry.primaries);
vui->transferCharacteristics =
gst_video_transfer_function_to_iso (info->colorimetry.transfer);
return TRUE;
}
static gboolean
gst_nv_h264_enc_set_pic_params (GstNvBaseEnc * enc, GstVideoCodecFrame * frame,
NV_ENC_PIC_PARAMS * pic_params)
{
/* encode whole picture in one single slice */
pic_params->codecPicParams.h264PicParams.sliceMode = 0;
pic_params->codecPicParams.h264PicParams.sliceModeData = 0;
return TRUE;
}
static void
gst_nv_h264_enc_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstNvH264Enc *self = (GstNvH264Enc *) object;
GstNvBaseEnc *nvenc = GST_NV_BASE_ENC (object);
GstNvBaseEncClass *klass = GST_NV_BASE_ENC_GET_CLASS (object);
GstNvEncDeviceCaps *device_caps = &klass->device_caps;
gboolean reconfig = FALSE;
switch (prop_id) {
case PROP_AUD:
{
gboolean aud;
aud = g_value_get_boolean (value);
if (aud != self->aud) {
self->aud = aud;
reconfig = TRUE;
}
break;
}
case PROP_WEIGHTED_PRED:
if (!device_caps->weighted_prediction) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
nvenc->weighted_pred = g_value_get_boolean (value);
reconfig = TRUE;
}
break;
case PROP_VBV_BUFFER_SIZE:
if (!device_caps->custom_vbv_bufsize) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
nvenc->vbv_buffersize = g_value_get_uint (value);
reconfig = TRUE;
}
break;
case PROP_RC_LOOKAHEAD:
if (!device_caps->lookahead) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
nvenc->rc_lookahead = g_value_get_uint (value);
reconfig = TRUE;
}
break;
case PROP_TEMPORAL_AQ:
if (!device_caps->temporal_aq) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
nvenc->temporal_aq = g_value_get_boolean (value);
reconfig = TRUE;
}
break;
case PROP_BFRAMES:
if (!device_caps->bframes) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
nvenc->bframes = g_value_get_uint (value);
reconfig = TRUE;
}
break;
case PROP_B_ADAPT:
if (!device_caps->bframes) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
nvenc->b_adapt = g_value_get_boolean (value);
}
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
if (reconfig)
gst_nv_base_enc_schedule_reconfig (GST_NV_BASE_ENC (self));
}
static void
gst_nv_h264_enc_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstNvH264Enc *self = (GstNvH264Enc *) object;
GstNvBaseEnc *nvenc = GST_NV_BASE_ENC (object);
GstNvBaseEncClass *klass = GST_NV_BASE_ENC_GET_CLASS (object);
GstNvEncDeviceCaps *device_caps = &klass->device_caps;
switch (prop_id) {
case PROP_AUD:
g_value_set_boolean (value, self->aud);
break;
case PROP_WEIGHTED_PRED:
if (!device_caps->weighted_prediction) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
g_value_set_boolean (value, nvenc->weighted_pred);
}
break;
case PROP_VBV_BUFFER_SIZE:
if (!device_caps->custom_vbv_bufsize) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
g_value_set_uint (value, nvenc->vbv_buffersize);
}
break;
case PROP_RC_LOOKAHEAD:
if (!device_caps->lookahead) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
g_value_set_uint (value, nvenc->rc_lookahead);
}
break;
case PROP_TEMPORAL_AQ:
if (!device_caps->temporal_aq) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
g_value_set_boolean (value, nvenc->temporal_aq);
}
break;
case PROP_BFRAMES:
if (!device_caps->bframes) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
g_value_set_uint (value, nvenc->bframes);
}
break;
case PROP_B_ADAPT:
if (!device_caps->bframes) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
g_value_set_boolean (value, nvenc->b_adapt);
}
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
void
gst_nv_h264_enc_register (GstPlugin * plugin, guint device_id, guint rank,
GstCaps * sink_caps, GstCaps * src_caps, GstNvEncDeviceCaps * device_caps)
{
GType parent_type;
GType type;
gchar *type_name;
gchar *feature_name;
GstNvH264EncClassData *cdata;
gboolean is_default = TRUE;
GTypeInfo type_info = {
sizeof (GstNvH264EncClass),
NULL,
NULL,
(GClassInitFunc) gst_nv_h264_enc_class_init,
NULL,
NULL,
sizeof (GstNvH264Enc),
0,
(GInstanceInitFunc) gst_nv_h264_enc_init,
};
parent_type = gst_nv_base_enc_register ("H264", device_id, device_caps);
cdata = g_new0 (GstNvH264EncClassData, 1);
cdata->sink_caps = gst_caps_ref (sink_caps);
cdata->src_caps = gst_caps_ref (src_caps);
type_info.class_data = cdata;
/* class data will be leaked if the element never gets instantiated */
GST_MINI_OBJECT_FLAG_SET (sink_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
GST_MINI_OBJECT_FLAG_SET (src_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
type_name = g_strdup ("GstNvH264Enc");
feature_name = g_strdup ("nvh264enc");
if (g_type_from_name (type_name) != 0) {
g_free (type_name);
g_free (feature_name);
type_name = g_strdup_printf ("GstNvH264Device%dEnc", device_id);
feature_name = g_strdup_printf ("nvh264device%denc", device_id);
is_default = FALSE;
}
cdata->is_default = is_default;
type = g_type_register_static (parent_type, type_name, &type_info, 0);
/* make lower rank than default device */
if (rank > 0 && !is_default)
rank--;
if (!gst_element_register (plugin, feature_name, rank, type))
GST_WARNING ("Failed to register plugin '%s'", type_name);
g_free (type_name);
g_free (feature_name);
}

View file

@ -1,43 +0,0 @@
/* GStreamer NVENC plugin
* Copyright (C) 2015 Centricular Ltd
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_NV_H264_ENC_H_INCLUDED__
#define __GST_NV_H264_ENC_H_INCLUDED__
#include "gstnvbaseenc.h"
typedef struct {
GstNvBaseEnc base_nvenc;
/* properties */
gboolean aud;
} GstNvH264Enc;
typedef struct {
GstNvBaseEncClass video_encoder_class;
} GstNvH264EncClass;
void gst_nv_h264_enc_register (GstPlugin * plugin,
guint device_id,
guint rank,
GstCaps * sink_caps,
GstCaps * src_caps,
GstNvEncDeviceCaps * device_caps);
#endif /* __GST_NV_H264_ENC_H_INCLUDED__ */

View file

@ -1,861 +0,0 @@
/* GStreamer NVENC plugin
* Copyright (C) 2015 Centricular Ltd
* Copyright (C) 2018 Seungha Yang <pudding8757@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstnvh265enc.h"
#include <gst/pbutils/codec-utils.h>
#include <gst/base/gstbytewriter.h>
#include <string.h>
typedef struct
{
GstCaps *sink_caps;
GstCaps *src_caps;
gboolean is_default;
} GstNvH265EncClassData;
GST_DEBUG_CATEGORY_STATIC (gst_nv_h265_enc_debug);
#define GST_CAT_DEFAULT gst_nv_h265_enc_debug
static GstElementClass *parent_class = NULL;
enum
{
PROP_0,
PROP_AUD,
PROP_WEIGHTED_PRED,
PROP_VBV_BUFFER_SIZE,
PROP_RC_LOOKAHEAD,
PROP_TEMPORAL_AQ,
PROP_BFRAMES,
PROP_B_ADAPT,
};
#define DEFAULT_AUD TRUE
#define DEFAULT_WEIGHTED_PRED FALSE
#define DEFAULT_VBV_BUFFER_SIZE 0
#define DEFAULT_RC_LOOKAHEAD 0
#define DEFAULT_TEMPORAL_AQ FALSE
#define DEFAULT_BFRAMES 0
#define DEFAULT_B_ADAPT FALSE
/* captured using RTX 2080 */
#define DOCUMENTATION_SINK_CAPS_COMM \
"format = (string) { NV12, P010_10LE, P016_LE, Y444, Y444_16LE, Y444_16LE }, " \
"width = (int) [ 144, 8192 ], " \
"height = (int) [ 144, 8192 ], " \
"framerate = " GST_VIDEO_FPS_RANGE
#define DOCUMENTATION_SINK_CAPS \
"video/x-raw, " DOCUMENTATION_SINK_CAPS_COMM "; " \
"video/x-raw(memory:GLMemory), " DOCUMENTATION_SINK_CAPS_COMM "; " \
"video/x-raw(memory:CUDAMemory), " DOCUMENTATION_SINK_CAPS_COMM
#define DOCUMENTATION_SRC_CAPS \
"video/x-h265, " \
"width = (int) [ 144, 8192 ], " \
"height = (int) [ 144, 8192 ], " \
"stream-format = (string) byte-stream, " \
"alignment = (string) au, " \
"profile = (string) { main, main-10, main-12, main-444, main-444-10, main-444-12 }"
static gboolean gst_nv_h265_enc_open (GstVideoEncoder * enc);
static gboolean gst_nv_h265_enc_close (GstVideoEncoder * enc);
static gboolean gst_nv_h265_enc_stop (GstVideoEncoder * enc);
static gboolean gst_nv_h265_enc_set_src_caps (GstNvBaseEnc * nvenc,
GstVideoCodecState * state);
static gboolean gst_nv_h265_enc_set_encoder_config (GstNvBaseEnc * nvenc,
GstVideoCodecState * state, NV_ENC_CONFIG * config);
static gboolean gst_nv_h265_enc_set_pic_params (GstNvBaseEnc * nvenc,
GstVideoCodecFrame * frame, NV_ENC_PIC_PARAMS * pic_params);
static void gst_nv_h265_enc_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_nv_h265_enc_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_nv_h265_enc_finalize (GObject * obj);
static void
gst_nv_h265_enc_class_init (GstNvH265EncClass * klass, gpointer data)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstVideoEncoderClass *videoenc_class = GST_VIDEO_ENCODER_CLASS (klass);
GstNvBaseEncClass *nvenc_class = GST_NV_BASE_ENC_CLASS (klass);
GstNvEncDeviceCaps *device_caps = &nvenc_class->device_caps;
GstNvH265EncClassData *cdata = (GstNvH265EncClassData *) data;
gchar *long_name;
GstPadTemplate *pad_templ;
GstCaps *doc_caps;
parent_class = g_type_class_peek_parent (klass);
gobject_class->set_property = gst_nv_h265_enc_set_property;
gobject_class->get_property = gst_nv_h265_enc_get_property;
gobject_class->finalize = gst_nv_h265_enc_finalize;
videoenc_class->open = GST_DEBUG_FUNCPTR (gst_nv_h265_enc_open);
videoenc_class->close = GST_DEBUG_FUNCPTR (gst_nv_h265_enc_close);
videoenc_class->stop = GST_DEBUG_FUNCPTR (gst_nv_h265_enc_stop);
nvenc_class->codec_id = NV_ENC_CODEC_HEVC_GUID;
nvenc_class->set_encoder_config = gst_nv_h265_enc_set_encoder_config;
nvenc_class->set_src_caps = gst_nv_h265_enc_set_src_caps;
nvenc_class->set_pic_params = gst_nv_h265_enc_set_pic_params;
/**
* GstNvH265Enc:aud:
*
* Use AU (Access Unit) delimiter
*
* Since: 1.18
*/
g_object_class_install_property (gobject_class, PROP_AUD,
g_param_spec_boolean ("aud", "AUD",
"Use AU (Access Unit) delimiter", DEFAULT_AUD,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_PLAYING |
G_PARAM_STATIC_STRINGS));
if (device_caps->weighted_prediction) {
/**
* GstNvH265Enc:weighted-pred:
*
* Weighted Prediction
*
* Since: 1.18
*/
g_object_class_install_property (gobject_class, PROP_WEIGHTED_PRED,
g_param_spec_boolean ("weighted-pred", "Weighted Pred",
"Weighted Prediction", DEFAULT_WEIGHTED_PRED,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_PLAYING |
GST_PARAM_CONDITIONALLY_AVAILABLE | G_PARAM_STATIC_STRINGS));
}
if (device_caps->custom_vbv_bufsize) {
/**
* GstNvH265Enc:vbv-buffer-size:
*
* VBV(HRD) Buffer Size in kbits (0 = NVENC default)
*
* Since: 1.18
*/
g_object_class_install_property (gobject_class,
PROP_VBV_BUFFER_SIZE,
g_param_spec_uint ("vbv-buffer-size", "VBV Buffer Size",
"VBV(HRD) Buffer Size in kbits (0 = NVENC default)",
0, G_MAXUINT, DEFAULT_VBV_BUFFER_SIZE,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
GST_PARAM_CONDITIONALLY_AVAILABLE | G_PARAM_STATIC_STRINGS));
}
if (device_caps->lookahead) {
/**
* GstNvH265Enc:rc-lookahead:
*
* Number of frames for frame type lookahead
*
* Since: 1.18
*/
g_object_class_install_property (gobject_class, PROP_RC_LOOKAHEAD,
g_param_spec_uint ("rc-lookahead", "Rate Control Lookahead",
"Number of frames for frame type lookahead", 0, 32,
DEFAULT_RC_LOOKAHEAD,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
GST_PARAM_CONDITIONALLY_AVAILABLE | G_PARAM_STATIC_STRINGS));
}
if (device_caps->temporal_aq) {
/**
* GstNvH265Enc:temporal-aq:
*
* Temporal Adaptive Quantization
*
* Since: 1.18
*/
g_object_class_install_property (gobject_class, PROP_TEMPORAL_AQ,
g_param_spec_boolean ("temporal-aq", "Temporal AQ",
"Temporal Adaptive Quantization", DEFAULT_TEMPORAL_AQ,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_PLAYING |
GST_PARAM_CONDITIONALLY_AVAILABLE | G_PARAM_STATIC_STRINGS));
}
if (device_caps->bframes > 0) {
/**
* GstNvH265Enc:bframes:
*
* Number of B-frames between I and P
*
* Since: 1.18
*/
g_object_class_install_property (gobject_class, PROP_BFRAMES,
g_param_spec_uint ("bframes", "B-Frames",
"Number of B-frames between I and P", 0, device_caps->bframes,
DEFAULT_BFRAMES,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
GST_PARAM_CONDITIONALLY_AVAILABLE | G_PARAM_STATIC_STRINGS));
/**
* GstNvH265Enc:b-adapt:
*
* Enable adaptive B-frame insert when lookahead is enabled
*
* Since: 1.18
*/
g_object_class_install_property (gobject_class, PROP_B_ADAPT,
g_param_spec_boolean ("b-adapt", "B Adapt",
"Enable adaptive B-frame insert when lookahead is enabled",
DEFAULT_B_ADAPT,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
GST_PARAM_CONDITIONALLY_AVAILABLE | G_PARAM_STATIC_STRINGS));
}
if (cdata->is_default)
long_name = g_strdup ("NVENC HEVC Video Encoder");
else
long_name = g_strdup_printf ("NVENC HEVC Video Encoder with device %d",
nvenc_class->cuda_device_id);
gst_element_class_set_metadata (element_class, long_name,
"Codec/Encoder/Video/Hardware",
"Encode HEVC video streams using NVIDIA's hardware-accelerated NVENC encoder API",
"Tim-Philipp Müller <tim@centricular.com>, "
"Matthew Waters <matthew@centricular.com>, "
"Seungha Yang <pudding8757@gmail.com>");
g_free (long_name);
GST_DEBUG_CATEGORY_INIT (gst_nv_h265_enc_debug,
"nvh265enc", 0, "Nvidia HEVC encoder");
pad_templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
cdata->sink_caps);
doc_caps = gst_caps_from_string (DOCUMENTATION_SINK_CAPS);
gst_pad_template_set_documentation_caps (pad_templ, doc_caps);
gst_caps_unref (doc_caps);
gst_element_class_add_pad_template (element_class, pad_templ);
pad_templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
cdata->src_caps);
doc_caps = gst_caps_from_string (DOCUMENTATION_SRC_CAPS);
gst_pad_template_set_documentation_caps (pad_templ, doc_caps);
gst_caps_unref (doc_caps);
gst_element_class_add_pad_template (element_class, pad_templ);
gst_caps_unref (cdata->sink_caps);
gst_caps_unref (cdata->src_caps);
g_free (cdata);
}
static void
gst_nv_h265_enc_init (GstNvH265Enc * nvenc)
{
GstNvBaseEnc *baseenc = GST_NV_BASE_ENC (nvenc);
nvenc->aud = DEFAULT_AUD;
/* device capability dependent properties */
baseenc->weighted_pred = DEFAULT_WEIGHTED_PRED;
baseenc->vbv_buffersize = DEFAULT_VBV_BUFFER_SIZE;
baseenc->rc_lookahead = DEFAULT_RC_LOOKAHEAD;
baseenc->temporal_aq = DEFAULT_TEMPORAL_AQ;
baseenc->bframes = DEFAULT_BFRAMES;
baseenc->b_adapt = DEFAULT_B_ADAPT;
}
static void
gst_nv_h265_enc_finalize (GObject * obj)
{
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
static gboolean
gst_nv_h265_enc_open (GstVideoEncoder * enc)
{
GstNvBaseEnc *base = GST_NV_BASE_ENC (enc);
if (!GST_VIDEO_ENCODER_CLASS (parent_class)->open (enc))
return FALSE;
/* Check if HEVC is supported */
{
uint32_t i, num = 0;
GUID guids[16];
NvEncGetEncodeGUIDs (base->encoder, guids, G_N_ELEMENTS (guids), &num);
for (i = 0; i < num; ++i) {
if (gst_nvenc_cmp_guid (guids[i], NV_ENC_CODEC_HEVC_GUID))
break;
}
GST_INFO_OBJECT (enc, "HEVC encoding %ssupported", (i == num) ? "un" : "");
if (i == num) {
gst_nv_h265_enc_close (enc);
return FALSE;
}
}
return TRUE;
}
static gboolean
gst_nv_h265_enc_close (GstVideoEncoder * enc)
{
return GST_VIDEO_ENCODER_CLASS (parent_class)->close (enc);
}
static void
gst_nv_h265_enc_clear_stream_data (GstNvH265Enc * h265enc)
{
gint i;
if (!h265enc->sei_payload)
return;
for (i = 0; i < h265enc->num_sei_payload; i++)
g_free (h265enc->sei_payload[i].payload);
g_free (h265enc->sei_payload);
h265enc->sei_payload = NULL;
h265enc->num_sei_payload = 0;
}
static gboolean
gst_nv_h265_enc_stop (GstVideoEncoder * enc)
{
GstNvH265Enc *h265enc = (GstNvH265Enc *) enc;
gst_nv_h265_enc_clear_stream_data (h265enc);
return GST_VIDEO_ENCODER_CLASS (parent_class)->stop (enc);
}
static gboolean
gst_nv_h265_enc_set_level_tier_and_profile (GstNvH265Enc * nvenc,
GstCaps * caps)
{
#define N_BYTES_VPS 128
guint8 vps[N_BYTES_VPS];
NV_ENC_SEQUENCE_PARAM_PAYLOAD spp = { 0, };
NVENCSTATUS nv_ret;
guint32 seq_size;
spp.version = gst_nvenc_get_sequence_param_payload_version ();
spp.inBufferSize = N_BYTES_VPS;
spp.spsId = 0;
spp.ppsId = 0;
spp.spsppsBuffer = &vps;
spp.outSPSPPSPayloadSize = &seq_size;
nv_ret = NvEncGetSequenceParams (GST_NV_BASE_ENC (nvenc)->encoder, &spp);
if (nv_ret != NV_ENC_SUCCESS) {
GST_ELEMENT_ERROR (nvenc, STREAM, ENCODE, ("Encode header failed."),
("NvEncGetSequenceParams return code=%d", nv_ret));
return FALSE;
}
if (seq_size < 8) {
GST_ELEMENT_ERROR (nvenc, STREAM, ENCODE, ("Encode header failed."),
("NvEncGetSequenceParams returned incomplete data"));
return FALSE;
}
GST_MEMDUMP ("Header", spp.spsppsBuffer, seq_size);
/* skip nal header and identifier */
gst_codec_utils_h265_caps_set_level_tier_and_profile (caps,
&vps[6], seq_size - 6);
return TRUE;
#undef N_BYTES_VPS
}
static gboolean
gst_nv_h265_enc_set_src_caps (GstNvBaseEnc * nvenc, GstVideoCodecState * state)
{
GstNvH265Enc *h265enc = (GstNvH265Enc *) nvenc;
GstVideoCodecState *out_state;
GstStructure *s;
GstCaps *out_caps;
out_caps = gst_caps_new_empty_simple ("video/x-h265");
s = gst_caps_get_structure (out_caps, 0);
/* TODO: add support for hvc1,hev1 format as well */
gst_structure_set (s, "stream-format", G_TYPE_STRING, "byte-stream",
"alignment", G_TYPE_STRING, "au", NULL);
if (!gst_nv_h265_enc_set_level_tier_and_profile (h265enc, out_caps)) {
gst_caps_unref (out_caps);
return FALSE;
}
out_state = gst_video_encoder_set_output_state (GST_VIDEO_ENCODER (nvenc),
out_caps, state);
GST_INFO_OBJECT (nvenc, "output caps: %" GST_PTR_FORMAT, out_state->caps);
/* encoder will keep it around for us */
gst_video_codec_state_unref (out_state);
/* TODO: would be nice to also send some tags with the codec name */
return TRUE;
}
static guint8 *
gst_nv_h265_enc_create_mastering_display_sei_nal (GstNvH265Enc * h265enc,
GstVideoMasteringDisplayInfo * minfo, guint * size)
{
guint sei_size;
gint i;
GstByteWriter br;
GST_LOG_OBJECT (h265enc, "Apply mastering display info: "
"Red(%u, %u) "
"Green(%u, %u) "
"Blue(%u, %u) "
"White(%u, %u) "
"max_luminance(%u) "
"min_luminance(%u) ",
minfo->display_primaries[0].x, minfo->display_primaries[0].y,
minfo->display_primaries[1].x, minfo->display_primaries[1].y,
minfo->display_primaries[2].x, minfo->display_primaries[2].y,
minfo->white_point.x, minfo->white_point.y,
minfo->max_display_mastering_luminance,
minfo->min_display_mastering_luminance);
/* x, y 16bits per RGB channel
* x, y 16bits white point
* max, min luminance 32bits
*/
sei_size = (2 * 2 * 3) + (2 * 2) + (4 * 2);
gst_byte_writer_init_with_size (&br, sei_size, TRUE);
/* GstVideoMasteringDisplayInfo::display_primaries is rgb order but
* HEVC uses gbr order
* See spec D.3.28 display_primaries_x and display_primaries_y
*/
for (i = 0; i < 3; i++) {
gst_byte_writer_put_uint16_be (&br,
minfo->display_primaries[(i + 1) % 3].x);
gst_byte_writer_put_uint16_be (&br,
minfo->display_primaries[(i + 1) % 3].y);
}
gst_byte_writer_put_uint16_be (&br, minfo->white_point.x);
gst_byte_writer_put_uint16_be (&br, minfo->white_point.y);
gst_byte_writer_put_uint32_be (&br, minfo->max_display_mastering_luminance);
gst_byte_writer_put_uint32_be (&br, minfo->min_display_mastering_luminance);
*size = sei_size;
return gst_byte_writer_reset_and_get_data (&br);
}
static guint8 *
gst_nv_h265_enc_create_content_light_level_sei_nal (GstNvH265Enc * h265enc,
GstVideoContentLightLevel * linfo, guint * size)
{
guint sei_size;
GstByteWriter br;
GST_LOG_OBJECT (h265enc, "Apply content light level: "
"maxCLL:(%u), maxFALL:(%u)", linfo->max_content_light_level,
linfo->max_frame_average_light_level);
/* maxCLL and maxFALL per 16bits */
sei_size = 2 * 2;
gst_byte_writer_init_with_size (&br, sei_size, TRUE);
gst_byte_writer_put_uint16_be (&br, linfo->max_content_light_level);
gst_byte_writer_put_uint16_be (&br, linfo->max_frame_average_light_level);
*size = sei_size;
return gst_byte_writer_reset_and_get_data (&br);
}
static gboolean
gst_nv_h265_enc_set_encoder_config (GstNvBaseEnc * nvenc,
GstVideoCodecState * state, NV_ENC_CONFIG * config)
{
GstNvH265Enc *h265enc = (GstNvH265Enc *) nvenc;
GstCaps *allowed_caps, *template_caps;
GUID selected_profile = NV_ENC_CODEC_PROFILE_AUTOSELECT_GUID;
int level_idc = NV_ENC_LEVEL_AUTOSELECT;
GstVideoInfo *info = &state->info;
NV_ENC_CONFIG_HEVC *hevc_config = &config->encodeCodecConfig.hevcConfig;
NV_ENC_CONFIG_HEVC_VUI_PARAMETERS *vui = &hevc_config->hevcVUIParameters;
template_caps =
gst_pad_get_pad_template_caps (GST_VIDEO_ENCODER_SRC_PAD (h265enc));
allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (h265enc));
if (template_caps == allowed_caps) {
GST_INFO_OBJECT (h265enc, "downstream has ANY caps");
} else if (allowed_caps) {
GstStructure *s;
const gchar *profile;
const gchar *level;
if (gst_caps_is_empty (allowed_caps)) {
gst_caps_unref (allowed_caps);
gst_caps_unref (template_caps);
return FALSE;
}
allowed_caps = gst_caps_make_writable (allowed_caps);
allowed_caps = gst_caps_fixate (allowed_caps);
s = gst_caps_get_structure (allowed_caps, 0);
profile = gst_structure_get_string (s, "profile");
/* FIXME: only support main profile only for now */
if (profile) {
if (!strcmp (profile, "main")) {
selected_profile = NV_ENC_HEVC_PROFILE_MAIN_GUID;
} else if (g_str_has_prefix (profile, "main-10")) {
selected_profile = NV_ENC_HEVC_PROFILE_MAIN10_GUID;
} else if (g_str_has_prefix (profile, "main-444")) {
selected_profile = NV_ENC_HEVC_PROFILE_FREXT_GUID;
} else {
g_assert_not_reached ();
}
}
level = gst_structure_get_string (s, "level");
if (level)
/* matches values stored in NV_ENC_LEVEL */
level_idc = gst_codec_utils_h265_get_level_idc (level);
gst_caps_unref (allowed_caps);
}
gst_caps_unref (template_caps);
/* override some defaults */
GST_LOG_OBJECT (h265enc, "setting parameters");
config->profileGUID = selected_profile;
hevc_config->level = level_idc;
hevc_config->idrPeriod = config->gopLength;
config->encodeCodecConfig.hevcConfig.chromaFormatIDC = 1;
if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_Y444 ||
GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_Y444_16LE ||
GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_Y444_16BE ||
GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_VUYA) {
GST_DEBUG_OBJECT (h265enc, "have Y444 input, setting config accordingly");
config->profileGUID = NV_ENC_HEVC_PROFILE_FREXT_GUID;
config->encodeCodecConfig.hevcConfig.chromaFormatIDC = 3;
if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_Y444_16LE ||
GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_Y444_16BE)
config->encodeCodecConfig.hevcConfig.pixelBitDepthMinus8 = 2;
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
} else if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_P010_10LE) {
#else
} else if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_P010_10BE) {
#endif
config->profileGUID = NV_ENC_HEVC_PROFILE_MAIN10_GUID;
config->encodeCodecConfig.hevcConfig.pixelBitDepthMinus8 = 2;
}
hevc_config->outputAUD = h265enc->aud;
vui->videoSignalTypePresentFlag = 1;
/* NOTE: vui::video_format represents the video format before
* being encoded such as PAL, NTSC, SECAM, and MAC. That's not much informal
* and can be inferred with resolution and framerate by any application.
*/
/* Unspecified video format (5) */
vui->videoFormat = 5;
if (info->colorimetry.range == GST_VIDEO_COLOR_RANGE_0_255) {
vui->videoFullRangeFlag = 1;
} else {
vui->videoFullRangeFlag = 0;
}
vui->colourDescriptionPresentFlag = 1;
vui->colourMatrix = gst_video_color_matrix_to_iso (info->colorimetry.matrix);
vui->colourPrimaries =
gst_video_color_primaries_to_iso (info->colorimetry.primaries);
vui->transferCharacteristics =
gst_video_transfer_function_to_iso (info->colorimetry.transfer);
gst_nv_h265_enc_clear_stream_data (h265enc);
{
GstVideoMasteringDisplayInfo minfo;
GstVideoContentLightLevel linfo;
gboolean have_mastering;
gboolean have_cll;
guint size;
gint i = 0;
have_mastering =
gst_video_mastering_display_info_from_caps (&minfo, state->caps);
have_cll = gst_video_content_light_level_from_caps (&linfo, state->caps);
if (have_mastering)
h265enc->num_sei_payload++;
if (have_cll)
h265enc->num_sei_payload++;
h265enc->sei_payload =
g_new0 (NV_ENC_SEI_PAYLOAD, h265enc->num_sei_payload);
if (have_mastering) {
h265enc->sei_payload[i].payload =
gst_nv_h265_enc_create_mastering_display_sei_nal (h265enc,
&minfo, &size);
h265enc->sei_payload[i].payloadSize = size;
h265enc->sei_payload[i].payloadType = 137;
i++;
}
if (have_cll) {
h265enc->sei_payload[i].payload =
gst_nv_h265_enc_create_content_light_level_sei_nal (h265enc,
&linfo, &size);
h265enc->sei_payload[i].payloadSize = size;
h265enc->sei_payload[i].payloadType = 144;
}
}
return TRUE;
}
static gboolean
gst_nv_h265_enc_set_pic_params (GstNvBaseEnc * enc, GstVideoCodecFrame * frame,
NV_ENC_PIC_PARAMS * pic_params)
{
GstNvH265Enc *h265enc = (GstNvH265Enc *) enc;
/* encode whole picture in one single slice */
pic_params->codecPicParams.hevcPicParams.sliceMode = 0;
pic_params->codecPicParams.hevcPicParams.sliceModeData = 0;
if (h265enc->sei_payload) {
pic_params->codecPicParams.hevcPicParams.seiPayloadArray =
h265enc->sei_payload;
pic_params->codecPicParams.hevcPicParams.seiPayloadArrayCnt =
h265enc->num_sei_payload;
}
return TRUE;
}
static void
gst_nv_h265_enc_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstNvH265Enc *self = (GstNvH265Enc *) object;
GstNvBaseEnc *nvenc = GST_NV_BASE_ENC (object);
GstNvBaseEncClass *klass = GST_NV_BASE_ENC_GET_CLASS (object);
GstNvEncDeviceCaps *device_caps = &klass->device_caps;
gboolean reconfig = FALSE;
switch (prop_id) {
case PROP_AUD:
{
gboolean aud;
aud = g_value_get_boolean (value);
if (aud != self->aud) {
self->aud = aud;
reconfig = TRUE;
}
break;
}
case PROP_WEIGHTED_PRED:
if (!device_caps->weighted_prediction) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
nvenc->weighted_pred = g_value_get_boolean (value);
reconfig = TRUE;
}
break;
case PROP_VBV_BUFFER_SIZE:
if (!device_caps->custom_vbv_bufsize) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
nvenc->vbv_buffersize = g_value_get_uint (value);
reconfig = TRUE;
}
break;
case PROP_RC_LOOKAHEAD:
if (!device_caps->lookahead) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
nvenc->rc_lookahead = g_value_get_uint (value);
reconfig = TRUE;
}
break;
case PROP_TEMPORAL_AQ:
if (!device_caps->temporal_aq) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
nvenc->temporal_aq = g_value_get_boolean (value);
reconfig = TRUE;
}
break;
case PROP_BFRAMES:
if (!device_caps->bframes) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
nvenc->bframes = g_value_get_uint (value);
reconfig = TRUE;
}
break;
case PROP_B_ADAPT:
if (!device_caps->bframes) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
nvenc->b_adapt = g_value_get_boolean (value);
}
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
if (reconfig)
gst_nv_base_enc_schedule_reconfig (GST_NV_BASE_ENC (self));
}
static void
gst_nv_h265_enc_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstNvH265Enc *self = (GstNvH265Enc *) object;
GstNvBaseEnc *nvenc = GST_NV_BASE_ENC (object);
GstNvBaseEncClass *klass = GST_NV_BASE_ENC_GET_CLASS (object);
GstNvEncDeviceCaps *device_caps = &klass->device_caps;
switch (prop_id) {
case PROP_AUD:
g_value_set_boolean (value, self->aud);
break;
case PROP_WEIGHTED_PRED:
if (!device_caps->weighted_prediction) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
g_value_set_boolean (value, nvenc->weighted_pred);
}
break;
case PROP_VBV_BUFFER_SIZE:
if (!device_caps->custom_vbv_bufsize) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
g_value_set_uint (value, nvenc->vbv_buffersize);
}
break;
case PROP_RC_LOOKAHEAD:
if (!device_caps->lookahead) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
g_value_set_uint (value, nvenc->rc_lookahead);
}
break;
case PROP_TEMPORAL_AQ:
if (!device_caps->temporal_aq) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
g_value_set_boolean (value, nvenc->temporal_aq);
}
break;
case PROP_BFRAMES:
if (!device_caps->bframes) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
g_value_set_uint (value, nvenc->bframes);
}
break;
case PROP_B_ADAPT:
if (!device_caps->bframes) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
} else {
g_value_set_boolean (value, nvenc->b_adapt);
}
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
void
gst_nv_h265_enc_register (GstPlugin * plugin, guint device_id, guint rank,
GstCaps * sink_caps, GstCaps * src_caps, GstNvEncDeviceCaps * device_caps)
{
GType parent_type;
GType type;
gchar *type_name;
gchar *feature_name;
GstNvH265EncClassData *cdata;
gboolean is_default = TRUE;
GTypeInfo type_info = {
sizeof (GstNvH265EncClass),
NULL,
NULL,
(GClassInitFunc) gst_nv_h265_enc_class_init,
NULL,
NULL,
sizeof (GstNvH265Enc),
0,
(GInstanceInitFunc) gst_nv_h265_enc_init,
};
parent_type = gst_nv_base_enc_register ("H265", device_id, device_caps);
cdata = g_new0 (GstNvH265EncClassData, 1);
cdata->sink_caps = gst_caps_ref (sink_caps);
cdata->src_caps = gst_caps_ref (src_caps);
type_info.class_data = cdata;
/* class data will be leaked if the element never gets instantiated */
GST_MINI_OBJECT_FLAG_SET (sink_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
GST_MINI_OBJECT_FLAG_SET (src_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
type_name = g_strdup ("GstNvH265Enc");
feature_name = g_strdup ("nvh265enc");
if (g_type_from_name (type_name) != 0) {
g_free (type_name);
g_free (feature_name);
type_name = g_strdup_printf ("GstNvH265Device%dEnc", device_id);
feature_name = g_strdup_printf ("nvh265device%denc", device_id);
is_default = FALSE;
}
cdata->is_default = is_default;
type = g_type_register_static (parent_type, type_name, &type_info, 0);
/* make lower rank than default device */
if (rank > 0 && !is_default)
rank--;
if (!gst_element_register (plugin, feature_name, rank, type))
GST_WARNING ("Failed to register plugin '%s'", type_name);
g_free (type_name);
g_free (feature_name);
}

View file

@ -1,47 +0,0 @@
/* GStreamer NVENC plugin
* Copyright (C) 2015 Centricular Ltd
* Copyright (C) 2018 Seungha Yang <pudding8757@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_NV_HEVC_ENC_H_INCLUDED__
#define __GST_NV_HEVC_ENC_H_INCLUDED__
#include "gstnvbaseenc.h"
typedef struct {
GstNvBaseEnc base_nvenc;
NV_ENC_SEI_PAYLOAD *sei_payload;
guint num_sei_payload;
/* properties */
gboolean aud;
} GstNvH265Enc;
typedef struct {
GstNvBaseEncClass video_encoder_class;
} GstNvH265EncClass;
void gst_nv_h265_enc_register (GstPlugin * plugin,
guint device_id,
guint rank,
GstCaps * sink_caps,
GstCaps * src_caps,
GstNvEncDeviceCaps * device_caps);
#endif /* __GST_NV_HEVC_ENC_H_INCLUDED__ */

View file

@ -10,7 +10,6 @@ nvcodec_sources = [
'gstcudamemorycopy.c',
'gstcuvidloader.c',
'gstnvav1dec.cpp',
'gstnvbaseenc.c',
'gstnvdec.c',
'gstnvdecobject.cpp',
'gstnvdecoder.cpp',
@ -18,10 +17,8 @@ nvcodec_sources = [
'gstnvencobject.cpp',
'gstnvencoder.cpp',
'gstnvh264dec.cpp',
'gstnvh264enc.c',
'gstnvh264encoder.cpp',
'gstnvh265dec.cpp',
'gstnvh265enc.c',
'gstnvh265encoder.cpp',
'gstnvjpegenc.cpp',
'gstnvvp8dec.cpp',

View file

@ -304,8 +304,6 @@ plugin_init (GstPlugin * plugin)
gst_nv_h265_encoder_register_cuda (plugin, context, GST_RANK_NONE);
if (cdata)
h265_enc_cdata = g_list_append (h265_enc_cdata, cdata);
gst_nvenc_plugin_init (plugin, i, cuda_ctx);
}
gst_nv_jpeg_enc_register (plugin, context, GST_RANK_NONE, have_nvrtc);