ndisrc: Implement zerocopy handling for the received frames if possible

Also move processing from the capture thread to the streaming thread.
The NDI SDK can cause frame drops if not reading fast enough from it.

All frame processing is now handled inside the ndisrcdemux.

Also use a buffer pool for video if copying is necessary.

Additionally, make sure to use different stream ids in the stream-start
event for the audio and video pad.

This plugin now requires GStreamer 1.16 or newer.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1365>
This commit is contained in:
Sebastian Dröge 2023-10-18 21:02:55 +03:00
parent 2afffb39dd
commit 39155ef81c
8 changed files with 1984 additions and 1725 deletions

View file

@ -10,10 +10,10 @@ rust-version = "1.70"
[dependencies]
glib = { git = "https://github.com/gtk-rs/gtk-rs-core" }
gst = { package = "gstreamer", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs" }
gst-base = { package = "gstreamer-base", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs" }
gst-audio = { package = "gstreamer-audio", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs" }
gst-video = { package = "gstreamer-video", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs" }
gst = { package = "gstreamer", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", features = ["v1_16"] }
gst-base = { package = "gstreamer-base", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", features = ["v1_16"] }
gst-audio = { package = "gstreamer-audio", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", features = ["v1_16"] }
gst-video = { package = "gstreamer-video", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", features = ["v1_16"] }
anyhow = "1.0"
byte-slice-cast = "1"
byteorder = "1.0"
@ -28,8 +28,7 @@ thiserror = "1.0"
gst-plugin-version-helper = { path = "../../version-helper" }
[features]
default = ["interlaced-fields", "sink"]
interlaced-fields = ["gst/v1_16", "gst-video/v1_16"]
default = ["sink"]
sink = ["gst/v1_18", "gst-base/v1_18"]
advanced-sdk = []
static = []

View file

@ -32,10 +32,11 @@ use gst::prelude::*;
use gst::glib::once_cell::sync::Lazy;
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Copy, glib::Enum)]
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Copy, glib::Enum, Default)]
#[repr(u32)]
#[enum_type(name = "GstNdiTimestampMode")]
pub enum TimestampMode {
#[default]
#[enum_value(name = "Auto", nick = "auto")]
Auto = 0,
#[enum_value(name = "Receive Time / Timecode", nick = "receive-time-vs-timecode")]

View file

@ -257,7 +257,7 @@ impl<'a> RecvBuilder<'a> {
}
}
#[derive(Debug, Clone)]
#[derive(Debug)]
struct RecvInstancePtr(ptr::NonNull<::std::os::raw::c_void>);
impl Drop for RecvInstancePtr {
@ -836,13 +836,11 @@ impl VideoFrame {
NDIlib_frame_format_type_e::NDIlib_frame_format_type_interleaved
}
// FIXME: Is this correct?
#[cfg(feature = "interlaced-fields")]
gst_video::VideoInterlaceMode::Alternate
if frame.flags().contains(gst_video::VideoFrameFlags::TFF) =>
{
NDIlib_frame_format_type_e::NDIlib_frame_format_type_field_0
}
#[cfg(feature = "interlaced-fields")]
gst_video::VideoInterlaceMode::Alternate
if !frame.flags().contains(gst_video::VideoFrameFlags::TFF) =>
{

View file

@ -268,12 +268,11 @@ impl NDICCMetaDecoder {
/// Decodes the provided NDI metadata string, searching for NDI closed captions
/// and add them as `VideoCaptionMeta` to the provided `gst::Buffer`.
pub fn decode(&mut self, input: &str, buffer: &mut gst::Buffer) -> Result<()> {
pub fn decode(&mut self, input: &str) -> Result<Vec<VideoAncillary>> {
use quick_xml::events::Event;
use quick_xml::reader::Reader;
let buffer = buffer.get_mut().unwrap();
let mut captions = Vec::new();
let mut reader = Reader::from_str(input);
self.xml_buf.clear();
@ -293,11 +292,7 @@ impl NDICCMetaDecoder {
Ok(v210_buf) => match self.parse_for_cea608(&v210_buf) {
Ok(None) => (),
Ok(Some(anc)) => {
gst_video::VideoCaptionMeta::add(
buffer,
gst_video::VideoCaptionType::Cea608S3341a,
anc.data(),
);
captions.push(anc);
}
Err(err) => {
gst::error!(CAT, "Failed to parse NDI C608 metadata: {err}");
@ -311,11 +306,7 @@ impl NDICCMetaDecoder {
Ok(v210_buf) => match self.parse_for_cea708(&v210_buf) {
Ok(None) => (),
Ok(Some(anc)) => {
gst_video::VideoCaptionMeta::add(
buffer,
gst_video::VideoCaptionType::Cea708Cdp,
anc.data(),
);
captions.push(anc);
}
Err(err) => {
gst::error!(CAT, "Failed to parse NDI C708 metadata: {err}");
@ -333,7 +324,7 @@ impl NDICCMetaDecoder {
self.xml_buf.clear();
}
Ok(())
Ok(captions)
}
fn parse_for_cea608(&mut self, input: &[u8]) -> Result<Option<VideoAncillary>> {
@ -510,39 +501,36 @@ mod tests {
fn decode_ndi_meta_c608() {
gst::init().unwrap();
let mut buf = gst::Buffer::new();
let mut ndi_cc_decoder = NDICCMetaDecoder::new(1920);
ndi_cc_decoder
.decode(
"<C608 line=\"128\">AAAAAP8D8D8AhAUAAgEwIAAABgCUAcASAJgKAAAAAAA=</C608>",
&mut buf,
)
let captions = ndi_cc_decoder
.decode("<C608 line=\"128\">AAAAAP8D8D8AhAUAAgEwIAAABgCUAcASAJgKAAAAAAA=</C608>")
.unwrap();
let mut cc_meta_iter = buf.iter_meta::<gst_video::VideoCaptionMeta>();
let cc_meta = cc_meta_iter.next().unwrap();
assert_eq!(cc_meta.caption_type(), VideoCaptionType::Cea608S3341a);
assert_eq!(cc_meta.data(), [0x80, 0x94, 0x2c]);
assert!(cc_meta_iter.next().is_none());
assert_eq!(captions.len(), 1);
assert_eq!(
captions[0].did16(),
gst_video::VideoAncillaryDID16::S334Eia608
);
assert_eq!(captions[0].data(), [0x80, 0x94, 0x2c]);
}
#[test]
fn decode_ndi_meta_c708() {
gst::init().unwrap();
let mut buf = gst::Buffer::new();
let mut ndi_cc_decoder = NDICCMetaDecoder::new(1920);
ndi_cc_decoder.decode(
let captions = ndi_cc_decoder.decode(
"<C708 line=\"10\">AAAAAP8D8D8AhAUAAQFQJQBYCgBpAlAlAPwIAEMBACAAAAgAcgKAHwDwCwCUAcASAOQLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADQCQAAAgAgAGwIALcCAAAAAAAAAAAAAA==</C708>",
&mut buf,
)
.unwrap();
let mut cc_meta_iter = buf.iter_meta::<gst_video::VideoCaptionMeta>();
let cc_meta = cc_meta_iter.next().unwrap();
assert_eq!(cc_meta.caption_type(), VideoCaptionType::Cea708Cdp);
assert_eq!(captions.len(), 1);
assert_eq!(
cc_meta.data(),
captions[0].did16(),
gst_video::VideoAncillaryDID16::S334Eia708
);
assert_eq!(
captions[0].data(),
[
0x96, 0x69, 0x55, 0x3f, 0x43, 0x00, 0x00, 0x72, 0xf8, 0xfc, 0x94, 0x2c, 0xf9, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
@ -553,16 +541,14 @@ mod tests {
0x1b,
]
);
assert!(cc_meta_iter.next().is_none());
}
#[test]
fn decode_ndi_meta_c708_newlines_and_indent() {
gst::init().unwrap();
let mut buf = gst::Buffer::new();
let mut ndi_cc_decoder = NDICCMetaDecoder::new(1920);
ndi_cc_decoder
let captions = ndi_cc_decoder
.decode(
r#"<C708 line=\"10\">
AAAAAP8D8D8AhAUAAQFQJQBYCgBpAlAlAPwIAEMBACAAAAgAcgKAHwDwCwCUAcASAOQ
@ -572,15 +558,16 @@ mod tests {
6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADQCQAAAgAgAGwIALcCAAAAAAA
AAAAAAA==
</C708>"#,
&mut buf,
)
.unwrap();
let mut cc_meta_iter = buf.iter_meta::<gst_video::VideoCaptionMeta>();
let cc_meta = cc_meta_iter.next().unwrap();
assert_eq!(cc_meta.caption_type(), VideoCaptionType::Cea708Cdp);
assert_eq!(captions.len(), 1);
assert_eq!(
cc_meta.data(),
captions[0].did16(),
gst_video::VideoAncillaryDID16::S334Eia708
);
assert_eq!(
captions[0].data(),
[
0x96, 0x69, 0x55, 0x3f, 0x43, 0x00, 0x00, 0x72, 0xf8, 0xfc, 0x94, 0x2c, 0xf9, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
@ -591,51 +578,49 @@ mod tests {
0x1b,
]
);
assert!(cc_meta_iter.next().is_none());
}
#[test]
fn decode_ndi_meta_c608_newlines_spaces_inline() {
gst::init().unwrap();
let mut buf = gst::Buffer::new();
let mut ndi_cc_decoder = NDICCMetaDecoder::new(1920);
ndi_cc_decoder.decode(
let captions = ndi_cc_decoder.decode(
"<C608 line=\"128\">\n\tAAAAAP8D8\n\n\r D8AhAUA\r\n\tAgEwIAAABgCUAcASAJgKAAAAAAA= \n</C608>",
&mut buf,
)
.unwrap();
let mut cc_meta_iter = buf.iter_meta::<gst_video::VideoCaptionMeta>();
let cc_meta = cc_meta_iter.next().unwrap();
assert_eq!(cc_meta.caption_type(), VideoCaptionType::Cea608S3341a);
assert_eq!(cc_meta.data(), [0x80, 0x94, 0x2c]);
assert!(cc_meta_iter.next().is_none());
assert_eq!(captions.len(), 1);
assert_eq!(
captions[0].did16(),
gst_video::VideoAncillaryDID16::S334Eia608
);
assert_eq!(captions[0].data(), [0x80, 0x94, 0x2c]);
}
#[test]
fn decode_ndi_meta_c608_and_c708() {
gst::init().unwrap();
let mut buf = gst::Buffer::new();
let mut ndi_cc_decoder = NDICCMetaDecoder::new(1920);
ndi_cc_decoder.decode(
let captions = ndi_cc_decoder.decode(
"<C608 line=\"128\">AAAAAP8D8D8AhAUAAgEwIAAABgCUAcASAJgKAAAAAAA=</C608><C708 line=\"10\">AAAAAP8D8D8AhAUAAQFQJQBYCgBpAlAlAPwIAEMBACAAAAgAcgKAHwDwCwCUAcASAOQLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADQCQAAAgAgAGwIALcCAAAAAAAAAAAAAA==</C708>",
&mut buf,
)
.unwrap();
let mut cc_meta_iter = buf.iter_meta::<gst_video::VideoCaptionMeta>();
let cc_meta = cc_meta_iter.next().unwrap();
assert_eq!(cc_meta.caption_type(), VideoCaptionType::Cea608S3341a);
assert_eq!(cc_meta.data(), [0x80, 0x94, 0x2c]);
let cc_meta = cc_meta_iter.next().unwrap();
assert_eq!(cc_meta.caption_type(), VideoCaptionType::Cea708Cdp);
assert_eq!(captions.len(), 2);
assert_eq!(
cc_meta.data(),
captions[0].did16(),
gst_video::VideoAncillaryDID16::S334Eia608
);
assert_eq!(captions[0].data(), [0x80, 0x94, 0x2c]);
assert_eq!(
captions[1].did16(),
gst_video::VideoAncillaryDID16::S334Eia708
);
assert_eq!(
captions[1].data(),
[
0x96, 0x69, 0x55, 0x3f, 0x43, 0x00, 0x00, 0x72, 0xf8, 0xfc, 0x94, 0x2c, 0xf9, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
@ -646,8 +631,6 @@ mod tests {
0x1b,
]
);
assert!(cc_meta_iter.next().is_none());
}
#[test]
@ -655,13 +638,9 @@ mod tests {
gst::init().unwrap();
// Expecting </C608> found </C708>'
let mut buf = gst::Buffer::new();
let mut ndi_cc_decoder = NDICCMetaDecoder::new(1920);
ndi_cc_decoder
.decode(
"<C608 line=\"128\">AAAAAP8D8D8AhAUAAgEwIAAABgCUAcASAJgKAAAAAAA=</C708>",
&mut buf,
)
.decode("<C608 line=\"128\">AAAAAP8D8D8AhAUAAgEwIAAABgCUAcASAJgKAAAAAAA=</C708>")
.unwrap_err();
}
}

View file

@ -11,12 +11,13 @@ use std::u32;
use gst::glib::once_cell::sync::Lazy;
use crate::ndisrcmeta::NdiSrcMeta;
use crate::ndisys;
use crate::RecvColorFormat;
use crate::TimestampMode;
use super::receiver::{self, Buffer, Receiver, ReceiverControlHandle, ReceiverItem};
use crate::ndisrcmeta;
use super::receiver::{Receiver, ReceiverControlHandle, ReceiverItem};
use crate::ndisrcmeta::Buffer;
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
@ -63,26 +64,11 @@ impl Default for Settings {
}
}
#[derive(Default)]
struct State {
video_info: Option<receiver::VideoInfo>,
video_caps: Option<gst::Caps>,
audio_info: Option<receiver::AudioInfo>,
audio_caps: Option<gst::Caps>,
current_latency: Option<gst::ClockTime>,
receiver: Option<Receiver>,
}
impl Default for State {
fn default() -> State {
State {
video_info: None,
video_caps: None,
audio_info: None,
audio_caps: None,
current_latency: gst::ClockTime::NONE,
receiver: None,
}
}
timestamp_mode: TimestampMode,
current_latency: Option<gst::ClockTime>,
}
pub struct NdiSrc {
@ -447,7 +433,6 @@ impl BaseSrcImpl for NdiSrc {
settings.connect_timeout,
settings.bandwidth,
settings.color_format.into(),
settings.timestamp_mode,
settings.timeout,
settings.max_queue_length as usize,
);
@ -462,6 +447,7 @@ impl BaseSrcImpl for NdiSrc {
Some(receiver.receiver_control_handle());
let mut state = self.state.lock().unwrap();
state.receiver = Some(receiver);
state.timestamp_mode = settings.timestamp_mode;
Ok(())
}
@ -537,72 +523,32 @@ impl BaseSrcImpl for NdiSrc {
state.receiver = Some(recv);
match res {
ReceiverItem::Buffer(buffer) => {
let buffer = match buffer {
Buffer::Audio(mut buffer, info) => {
if state.audio_info.as_ref() != Some(&info) {
let caps = info.to_caps().map_err(|_| {
gst::element_imp_error!(
self,
gst::ResourceError::Settings,
["Invalid audio info received: {:?}", info]
);
gst::FlowError::NotNegotiated
})?;
state.audio_info = Some(info);
state.audio_caps = Some(caps);
}
{
let buffer = buffer.get_mut().unwrap();
ndisrcmeta::NdiSrcMeta::add(
buffer,
ndisrcmeta::StreamType::Audio,
state.audio_caps.as_ref().unwrap(),
);
}
buffer
}
Buffer::Video(mut buffer, info) => {
ReceiverItem::Buffer(ndi_buffer) => {
let mut latency_changed = false;
if state.video_info.as_ref() != Some(&info) {
let caps = info.to_caps().map_err(|_| {
gst::element_imp_error!(
self,
gst::ResourceError::Settings,
["Invalid video info received: {:?}", info]
);
gst::FlowError::NotNegotiated
})?;
state.video_info = Some(info);
state.video_caps = Some(caps);
latency_changed = state.current_latency != buffer.duration();
state.current_latency = buffer.duration();
if let Buffer::Video { ref frame, .. } = ndi_buffer {
let duration = gst::ClockTime::SECOND
.mul_div_floor(frame.frame_rate().1 as u64, frame.frame_rate().0 as u64);
latency_changed = state.current_latency != duration;
state.current_latency = duration;
}
let mut gst_buffer = gst::Buffer::new();
{
let buffer = buffer.get_mut().unwrap();
ndisrcmeta::NdiSrcMeta::add(
buffer,
ndisrcmeta::StreamType::Video,
state.video_caps.as_ref().unwrap(),
);
let buffer_ref = gst_buffer.get_mut().unwrap();
NdiSrcMeta::add(buffer_ref, ndi_buffer, state.timestamp_mode);
}
drop(state);
if latency_changed {
let _ = self.obj().post_message(
gst::message::Latency::builder().src(&*self.obj()).build(),
);
let _ = self
.obj()
.post_message(gst::message::Latency::builder().src(&*self.obj()).build());
}
buffer
}
};
Ok(CreateSuccess::NewBuffer(buffer))
Ok(CreateSuccess::NewBuffer(gst_buffer))
}
ReceiverItem::Timeout => Err(gst::FlowError::Eos),
ReceiverItem::Flushing => Err(gst::FlowError::Flushing),

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -4,30 +4,49 @@ use gst::prelude::*;
use std::fmt;
use std::mem;
use crate::ndi::{AudioFrame, MetadataFrame, VideoFrame};
use crate::TimestampMode;
#[repr(transparent)]
pub struct NdiSrcMeta(imp::NdiSrcMeta);
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum StreamType {
Audio,
Video,
#[derive(Debug)]
#[allow(clippy::large_enum_variant)]
pub enum Buffer {
Audio {
frame: AudioFrame,
discont: bool,
receive_time_gst: gst::ClockTime,
receive_time_real: gst::ClockTime,
},
Video {
frame: VideoFrame,
discont: bool,
receive_time_gst: gst::ClockTime,
receive_time_real: gst::ClockTime,
},
Metadata {
frame: MetadataFrame,
receive_time_gst: gst::ClockTime,
receive_time_real: gst::ClockTime,
},
}
unsafe impl Send for NdiSrcMeta {}
unsafe impl Sync for NdiSrcMeta {}
impl NdiSrcMeta {
pub fn add<'a>(
buffer: &'a mut gst::BufferRef,
stream_type: StreamType,
caps: &gst::Caps,
) -> gst::MetaRefMut<'a, Self, gst::meta::Standalone> {
pub fn add(
buffer: &mut gst::BufferRef,
ndi_buffer: Buffer,
timestamp_mode: TimestampMode,
) -> gst::MetaRefMut<Self, gst::meta::Standalone> {
unsafe {
// Manually dropping because gst_buffer_add_meta() takes ownership of the
// content of the struct
let mut params = mem::ManuallyDrop::new(imp::NdiSrcMetaParams {
caps: caps.clone(),
stream_type,
ndi_buffer,
timestamp_mode,
});
let meta = gst::ffi::gst_buffer_add_meta(
@ -40,12 +59,8 @@ impl NdiSrcMeta {
}
}
pub fn stream_type(&self) -> StreamType {
self.0.stream_type
}
pub fn caps(&self) -> gst::Caps {
self.0.caps.clone()
pub fn take_ndi_buffer(&mut self) -> Buffer {
self.0.ndi_buffer.take().expect("can only take buffer once")
}
}
@ -60,29 +75,30 @@ unsafe impl MetaAPI for NdiSrcMeta {
impl fmt::Debug for NdiSrcMeta {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("NdiSrcMeta")
.field("stream_type", &self.stream_type())
.field("caps", &self.caps())
.field("ndi_buffer", &self.0.ndi_buffer)
.finish()
}
}
mod imp {
use super::StreamType;
use crate::TimestampMode;
use super::Buffer;
use glib::translate::*;
use gst::glib::once_cell::sync::Lazy;
use std::mem;
use std::ptr;
pub(super) struct NdiSrcMetaParams {
pub caps: gst::Caps,
pub stream_type: StreamType,
pub ndi_buffer: Buffer,
pub timestamp_mode: TimestampMode,
}
#[repr(C)]
pub struct NdiSrcMeta {
parent: gst::ffi::GstMeta,
pub(super) caps: gst::Caps,
pub(super) stream_type: StreamType,
pub(super) ndi_buffer: Option<Buffer>,
pub(super) timestamp_mode: TimestampMode,
}
pub(super) fn ndi_src_meta_api_get_type() -> glib::Type {
@ -110,8 +126,8 @@ mod imp {
let meta = &mut *(meta as *mut NdiSrcMeta);
let params = ptr::read(params as *const NdiSrcMetaParams);
ptr::write(&mut meta.stream_type, params.stream_type);
ptr::write(&mut meta.caps, params.caps);
ptr::write(&mut meta.ndi_buffer, Some(params.ndi_buffer));
ptr::write(&mut meta.timestamp_mode, params.timestamp_mode);
true.into_glib()
}
@ -122,8 +138,7 @@ mod imp {
) {
let meta = &mut *(meta as *mut NdiSrcMeta);
ptr::drop_in_place(&mut meta.stream_type);
ptr::drop_in_place(&mut meta.caps);
ptr::drop_in_place(&mut meta.ndi_buffer);
}
unsafe extern "C" fn ndi_src_meta_transform(