net/ndi: add closed caption support

Closed caption support in NDI is described as a proposal in [1] & [2].

The proposal consists in encapsulating c608 or c708 closed caption in ADF
packets and pushing them in an XML tag as part of NDI Metadata.

This commit implements this proposal.

[1]: http://www.sienna-tv.com/ndi/ndiclosedcaptions.html
[2]: http://www.sienna-tv.com/ndi/ndiclosedcaptions608.html

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1320>
This commit is contained in:
François Laignel 2023-09-07 14:28:24 +02:00
parent e83238b681
commit 9604dea90a
9 changed files with 1044 additions and 0 deletions

View file

@ -14,10 +14,15 @@ gst = { package = "gstreamer", git = "https://gitlab.freedesktop.org/gstreamer/g
gst-base = { package = "gstreamer-base", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs" }
gst-audio = { package = "gstreamer-audio", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs" }
gst-video = { package = "gstreamer-video", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs" }
anyhow = "1.0"
byte-slice-cast = "1"
byteorder = "1.0"
data-encoding = "2.4.0"
atomic_refcell = "0.1"
libloading = "0.8"
quick-xml = "0.30"
smallvec = { version = "1.11", features = ["const_generics"] }
thiserror = "1.0"
[build-dependencies]
gst-plugin-version-helper = { path = "../../version-helper" }

View file

@ -42,6 +42,57 @@ Feel free to contribute to this project. Some ways you can contribute are:
* Testing with more hardware and software and reporting bugs
* Doing pull requests.
Closed Captions Support
-----------------------
Closed captions support is based on [1] & [2].
This pipelines streams a test video with test subtitles from
gst-plugins-rs/video/closedcaption. Run from the gst-plugins-rs root directory.
```console
# Audio/Video sink pipeline with closed captions (cc start around 0:00:14)
$ gst-launch-1.0 \
ndisinkcombiner name=ndicombiner ! ndisink ndi-name="My NDI source" \
cccombiner name=cccombiner ! videoconvert ! video/x-raw,format=UYVY ! ndicombiner.video \
videotestsrc is-live=true ! cccombiner. \
filesrc location=video/closedcaption/tests/dn2018-1217.scc ! sccparse ! cccombiner.caption \
audiotestsrc is-live=true volume=0.1 ! ndicombiner.audio
# Discover all NDI sources on the network
$ gst-device-monitor-1.0 -f Source/Network:application/x-ndi
# Audio/Video source pipeline with closed caption overlay
$ gst-launch-1.0 \
ndisrc ndi-name="_REPLACE_WITH_SOURCE_NAME_" ! ndisrcdemux name=demux \
demux.video ! queue ! cea608overlay ! videoconvert ! autovideosink \
demux.audio ! queue ! audioconvert ! autoaudiosink
# Variant 1: sink pipeline with c708 closed captions
$ gst-launch-1.0 \
ndisinkcombiner name=ndicombiner ! ndisink ndi-name="My NDI source" \
cccombiner name=cccombiner ! videoconvert ! video/x-raw,format=UYVY ! ndicombiner.video \
videotestsrc is-live=true ! cccombiner. \
filesrc location=video/closedcaption/tests/dn2018-1217.scc ! sccparse ! ccconverter ! closedcaption/x-cea-708,format=cdp ! cccombiner.caption \
audiotestsrc is-live=true volume=0.1 ! ndicombiner.audio
# Variant 2: sink pipeline with c608 and c708 closed captions
$ gst-launch-1.0 \
ndisinkcombiner name=ndicombiner ! ndisink ndi-name="My NDI source" \
cccombiner name=cccombiner_1 ! cccombiner name=cccombiner_2 ! videoconvert ! video/x-raw,format=UYVY ! ndicombiner.video \
videotestsrc is-live=true ! cccombiner_1. \
filesrc location=video/closedcaption/tests/dn2018-1217.scc ! sccparse ! tee name=cctee \
cctee. ! ccconverter ! closedcaption/x-cea-608,format=raw ! cccombiner_1.caption \
cctee. ! ccconverter ! closedcaption/x-cea-708,format=cdp ! cccombiner_2.caption \
audiotestsrc is-live=true volume=0.1 ! ndicombiner.audio
```
[1]: http://www.sienna-tv.com/ndi/ndiclosedcaptions.html
[2]: http://www.sienna-tv.com/ndi/ndiclosedcaptions608.html
License
-------
This plugin is licensed under the MPL-2 - see the [LICENSE](LICENSE-MPL-2.0) file for details

View file

@ -25,6 +25,9 @@ mod ndisrc;
mod ndisrcdemux;
mod ndisrcmeta;
mod ndi_cc_meta;
mod video_anc;
#[cfg(feature = "doc")]
use gst::prelude::*;

View file

@ -403,6 +403,10 @@ impl SendInstance {
NDIlib_send_send_audio_v3(self.0.as_ptr(), frame.as_ptr());
}
}
pub fn send_metadata(&self, metadata: &MetadataFrame) {
unsafe { NDIlib_send_send_metadata(self.0.as_ptr(), metadata.as_ptr()) }
}
}
impl Drop for SendInstance {

416
net/ndi/src/ndi_cc_meta.rs Normal file
View file

@ -0,0 +1,416 @@
//! NDI Closed Caption encoder and parser
//!
//! See:
//!
//! * http://www.sienna-tv.com/ndi/ndiclosedcaptions.html
//! * http://www.sienna-tv.com/ndi/ndiclosedcaptions608.html
use anyhow::{bail, Context, Result};
use data_encoding::BASE64;
use smallvec::SmallVec;
use crate::video_anc;
use crate::video_anc::VideoAncillaryAFD;
const C608_TAG: &str = "C608";
const C608_TAG_BYTES: &[u8] = C608_TAG.as_bytes();
const C708_TAG: &str = "C708";
const C708_TAG_BYTES: &[u8] = C708_TAG.as_bytes();
const LINE_ATTR: &str = "line";
const DEFAULT_LINE_VALUE: &str = "21";
/// Video anc AFD content padded to 32bit alignment encoded in base64
const NDI_CC_CONTENT_MAX_LEN: usize = (video_anc::VIDEO_ANC_AFD_MAX_LEN + 3) * 3 / 2;
/// Video anc AFD padded to 32bit alignment encoded in base64
/// + XML tags with brackets and end '/'
const NDI_CC_MAX_LEN: usize = NDI_CC_CONTENT_MAX_LEN + 13;
#[derive(thiserror::Error, Debug, Eq, PartialEq)]
/// NDI Video Caption related Errors.
pub enum NDIClosedCaptionError {
#[error("Unsupported closed caption type {cc_type:?}")]
UnsupportedCC {
cc_type: gst_video::VideoCaptionType,
},
}
impl NDIClosedCaptionError {
pub fn is_unsupported_cc(&self) -> bool {
matches!(self, Self::UnsupportedCC { .. })
}
}
fn write_32bit_padded_base64<W>(writer: &mut quick_xml::writer::Writer<W>, data: &[u8])
where
W: std::io::Write,
{
use quick_xml::events::{BytesText, Event};
use std::borrow::Cow;
let mut buf = String::with_capacity(NDI_CC_CONTENT_MAX_LEN);
let mut input = Cow::from(data);
let alignment_rem = input.len() % 4;
if alignment_rem != 0 {
let owned = input.to_mut();
let mut padding = 4 - alignment_rem;
while padding != 0 {
owned.push(0);
padding -= 1;
}
}
debug_assert_eq!(input.len() % 4, 0);
buf.clear();
BASE64.encode_append(&input, &mut buf);
writer
.write_event(Event::Text(BytesText::from_escaped(buf)))
.unwrap();
}
/// Encodes the provided VideoCaptionMeta in an NDI closed caption metadata.
pub fn encode_video_caption_meta(video_buf: &gst::BufferRef) -> Result<Option<String>> {
use crate::video_anc::VideoAncillaryAFDEncoder;
use quick_xml::events::{BytesEnd, BytesStart, Event};
use quick_xml::writer::Writer;
if video_buf.meta::<gst_video::VideoCaptionMeta>().is_none() {
return Ok(None);
}
// Start with an initial capacity suitable to store one ndi cc metadata
let mut writer = Writer::new(Vec::<u8>::with_capacity(NDI_CC_MAX_LEN));
let cc_meta_iter = video_buf.iter_meta::<gst_video::VideoCaptionMeta>();
for cc_meta in cc_meta_iter {
if cc_meta.data().is_empty() {
return Ok(None);
}
use gst_video::VideoCaptionType::*;
match cc_meta.caption_type() {
Cea608Raw => {
let mut anc_afd = VideoAncillaryAFDEncoder::for_cea608_raw(21);
anc_afd.push_data(cc_meta.data()).context("Cea608Raw")?;
let mut elem = BytesStart::new(C608_TAG);
elem.push_attribute((LINE_ATTR, DEFAULT_LINE_VALUE));
writer.write_event(Event::Start(elem)).unwrap();
write_32bit_padded_base64(&mut writer, anc_afd.terminate().as_slice());
writer
.write_event(Event::End(BytesEnd::new(C608_TAG)))
.unwrap();
}
Cea608S3341a => {
let mut anc_afd = VideoAncillaryAFDEncoder::for_cea608_s334_1a();
anc_afd.push_data(cc_meta.data()).context("Cea608S3341a")?;
let mut elem = BytesStart::new(C608_TAG);
elem.push_attribute((LINE_ATTR, format!("{}", cc_meta.data()[0]).as_str()));
writer.write_event(Event::Start(elem)).unwrap();
write_32bit_padded_base64(&mut writer, anc_afd.terminate().as_slice());
writer
.write_event(Event::End(BytesEnd::new(C608_TAG)))
.unwrap();
}
Cea708Cdp => {
let mut anc_afd = VideoAncillaryAFDEncoder::for_cea708_cdp();
anc_afd.push_data(cc_meta.data()).context("Cea708Cdp")?;
writer
.write_event(Event::Start(BytesStart::new(C708_TAG)))
.unwrap();
write_32bit_padded_base64(&mut writer, anc_afd.terminate().as_slice());
writer
.write_event(Event::End(BytesEnd::new(C708_TAG)))
.unwrap();
}
other => bail!(NDIClosedCaptionError::UnsupportedCC { cc_type: other }),
}
}
// # Safety
// `writer` content is guaranteed to be a valid UTF-8 string since:
// * It contains ASCII XML tags, ASCII XML attributes and base64 encoded content
// * ASCII & base64 are subsets of UTF-8.
unsafe {
let ndi_cc_meta_b = writer.into_inner();
let ndi_cc_meta = std::str::from_utf8_unchecked(&ndi_cc_meta_b);
Ok(Some(ndi_cc_meta.into()))
}
}
#[derive(Debug)]
pub struct NDIClosedCaption {
pub cc_type: gst_video::VideoCaptionType,
pub data: VideoAncillaryAFD,
}
/// Parses the provided NDI metadata string, searching for
/// an NDI closed caption metadata.
pub fn parse_ndi_cc_meta(input: &str) -> Result<Vec<NDIClosedCaption>> {
use crate::video_anc::VideoAncillaryAFDParser;
use quick_xml::events::Event;
use quick_xml::reader::Reader;
let mut ndi_cc = Vec::new();
let mut reader = Reader::from_str(input);
reader.trim_text(true);
let mut content = SmallVec::<[u8; NDI_CC_CONTENT_MAX_LEN]>::new();
let mut buf = Vec::with_capacity(NDI_CC_MAX_LEN);
loop {
match reader.read_event_into(&mut buf)? {
Event::Eof => break,
Event::Start(_) => content.clear(),
Event::Text(e) => content.extend(e.iter().copied()),
Event::End(e) => match e.name().as_ref() {
C608_TAG_BYTES => {
let adf_packet = BASE64.decode(content.as_slice()).context(C608_TAG)?;
let data =
VideoAncillaryAFDParser::parse_for_cea608(&adf_packet).context(C608_TAG)?;
ndi_cc.push(NDIClosedCaption {
cc_type: gst_video::VideoCaptionType::Cea608S3341a,
data,
});
}
C708_TAG_BYTES => {
let adf_packet = BASE64.decode(content.as_slice()).context(C708_TAG)?;
let data =
VideoAncillaryAFDParser::parse_for_cea708(&adf_packet).context(C708_TAG)?;
ndi_cc.push(NDIClosedCaption {
cc_type: gst_video::VideoCaptionType::Cea708Cdp,
data,
});
}
_ => (),
},
_ => {}
}
buf.clear();
}
Ok(ndi_cc)
}
#[cfg(test)]
mod tests {
use super::*;
use gst_video::VideoCaptionType;
#[test]
fn encode_gst_meta_c608() {
gst::init().unwrap();
let mut buf = gst::Buffer::new();
{
let buf = buf.get_mut().unwrap();
gst_video::VideoCaptionMeta::add(
buf,
VideoCaptionType::Cea608S3341a,
&[0x80, 0x94, 0x2c],
);
}
assert_eq!(
encode_video_caption_meta(&buf).unwrap().unwrap(),
"<C608 line=\"128\">AD///WFAoDYBlEsqYAAAAA==</C608>",
);
}
#[test]
fn encode_gst_meta_c708() {
gst::init().unwrap();
let mut buf = gst::Buffer::new();
{
let buf = buf.get_mut().unwrap();
gst_video::VideoCaptionMeta::add(
buf,
VideoCaptionType::Cea708Cdp,
&[
0x96, 0x69, 0x55, 0x3f, 0x43, 0x00, 0x00, 0x72, 0xf8, 0xfc, 0x94, 0x2c, 0xf9,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00,
0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00,
0xfa, 0x00, 0x00, 0x74, 0x00, 0x00, 0x1b,
],
);
}
assert_eq!(
encode_video_caption_meta(&buf).unwrap().unwrap(),
"<C708>AD///WFAZVpaaZVj9Q4AgCcn4vxlEsvmAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAnSAIAhutwA=</C708>",
);
}
#[test]
fn encode_gst_meta_c608_and_c708() {
gst::init().unwrap();
let mut buf = gst::Buffer::new();
{
let buf = buf.get_mut().unwrap();
gst_video::VideoCaptionMeta::add(
buf,
VideoCaptionType::Cea608S3341a,
&[0x80, 0x94, 0x2c],
);
gst_video::VideoCaptionMeta::add(
buf,
VideoCaptionType::Cea708Cdp,
&[
0x96, 0x69, 0x55, 0x3f, 0x43, 0x00, 0x00, 0x72, 0xf8, 0xfc, 0x94, 0x2c, 0xf9,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00,
0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00,
0xfa, 0x00, 0x00, 0x74, 0x00, 0x00, 0x1b,
],
);
}
assert_eq!(
encode_video_caption_meta(&buf).unwrap().unwrap(),
"<C608 line=\"128\">AD///WFAoDYBlEsqYAAAAA==</C608><C708>AD///WFAZVpaaZVj9Q4AgCcn4vxlEsvmAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAnSAIAhutwA=</C708>",
);
}
#[test]
fn encode_gst_meta_unsupported_cc() {
gst::init().unwrap();
let mut buf = gst::Buffer::new();
{
let buf = buf.get_mut().unwrap();
gst_video::VideoCaptionMeta::add(
buf,
VideoCaptionType::Cea708Raw,
// Content doesn't matter here
&[0x00, 0x01, 0x02, 0x03, 0x04, 0x05],
);
}
let err = encode_video_caption_meta(&buf)
.unwrap_err()
.downcast::<NDIClosedCaptionError>()
.unwrap();
assert_eq!(
err,
NDIClosedCaptionError::UnsupportedCC {
cc_type: VideoCaptionType::Cea708Raw
}
);
assert!(err.is_unsupported_cc());
}
#[test]
fn encode_gst_meta_none() {
gst::init().unwrap();
let buf = gst::Buffer::new();
assert!(encode_video_caption_meta(&buf).unwrap().is_none());
}
#[test]
fn parse_ndi_meta_c608() {
let mut ndi_cc_list =
parse_ndi_cc_meta("<C608 line=\"128\">AD///WFAoDYBlEsqYAAAAA==</C608>").unwrap();
let ndi_cc = ndi_cc_list.pop().unwrap();
assert_eq!(ndi_cc.cc_type, VideoCaptionType::Cea608S3341a);
assert_eq!(ndi_cc.data.as_slice(), [0x80, 0x94, 0x2c]);
assert!(ndi_cc_list.is_empty());
}
#[test]
fn parse_ndi_meta_c708() {
let mut ndi_cc_list = parse_ndi_cc_meta(
"<C708>AD///WFAZVpaaZVj9Q4AgCcn4vxlEsvmAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAnSAIAhutwA=</C708>",
)
.unwrap();
let ndi_cc = ndi_cc_list.pop().unwrap();
assert_eq!(ndi_cc.cc_type, VideoCaptionType::Cea708Cdp);
assert_eq!(
ndi_cc.data.as_slice(),
[
0x96, 0x69, 0x55, 0x3f, 0x43, 0x00, 0x00, 0x72, 0xf8, 0xfc, 0x94, 0x2c, 0xf9, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00,
0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0x74, 0x00, 0x00,
0x1b,
]
);
assert!(ndi_cc_list.is_empty());
}
#[test]
fn parse_ndi_meta_c608_and_c708() {
let ndi_cc_list = parse_ndi_cc_meta(
"<C608 line=\"128\">AD///WFAoDYBlEsqYAAAAA==</C608><C708>AD///WFAZVpaaZVj9Q4AgCcn4vxlEsvmAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAnSAIAhutwA=</C708>",
)
.unwrap();
let mut ndi_cc_iter = ndi_cc_list.iter();
let ndi_cc = ndi_cc_iter.next().unwrap();
assert_eq!(ndi_cc.cc_type, VideoCaptionType::Cea608S3341a);
assert_eq!(ndi_cc.data.as_slice(), [0x80, 0x94, 0x2c]);
let ndi_cc = ndi_cc_iter.next().unwrap();
assert_eq!(ndi_cc.cc_type, VideoCaptionType::Cea708Cdp);
assert_eq!(
ndi_cc.data.as_slice(),
[
0x96, 0x69, 0x55, 0x3f, 0x43, 0x00, 0x00, 0x72, 0xf8, 0xfc, 0x94, 0x2c, 0xf9, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00,
0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0x74, 0x00, 0x00,
0x1b,
]
);
assert!(ndi_cc_iter.next().is_none());
}
#[test]
fn parse_ndi_meta_tag_mismatch() {
// Expecting </C608> found </C708>'
let _ =
parse_ndi_cc_meta("<C608 line=\"128\">AD///WFAoDYBlEsqYAAAAA==</C708>").unwrap_err();
}
#[test]
fn parse_ndi_meta_c608_deeper_failure() {
// Caused by:
// 0: Parsing anc data flags
// 1: Not enough data'
let _ = parse_ndi_cc_meta("<C608 line=\"128\">AAA=</C608>").unwrap_err();
}
}

View file

@ -11,6 +11,7 @@ use std::sync::Mutex;
use gst::glib::once_cell::sync::Lazy;
use crate::ndi::SendInstance;
use crate::ndi_cc_meta;
static DEFAULT_SENDER_NDI_NAME: Lazy<String> = Lazy::new(|| {
format!(
@ -303,6 +304,24 @@ impl BaseSinkImpl for NdiSink {
.map(|time| (time.nseconds() / 100) as i64)
.unwrap_or(crate::ndisys::NDIlib_send_timecode_synthesize);
match ndi_cc_meta::encode_video_caption_meta(buffer) {
Ok(None) => (),
Ok(Some(cc_data)) => {
gst::trace!(CAT, "Sending cc meta with timecode {timecode}");
let metadata_frame =
crate::ndi::MetadataFrame::new(timecode, Some(cc_data.as_str()));
state.send.send_metadata(&metadata_frame);
}
Err(err) => match err.downcast_ref::<ndi_cc_meta::NDIClosedCaptionError>() {
Some(err) if err.is_unsupported_cc() => {
gst::info!(CAT, "{err}");
}
_ => {
gst::error!(CAT, "Failed to encode Video Caption meta: {err}");
}
},
}
let frame = gst_video::VideoFrameRef::from_buffer_ref_readable(buffer, info)
.map_err(|_| {
gst::error!(CAT, imp: self, "Failed to map buffer");

View file

@ -17,6 +17,7 @@ use atomic_refcell::AtomicRefCell;
use gst::glib::once_cell::sync::Lazy;
use crate::ndi::*;
use crate::ndi_cc_meta;
use crate::ndisys;
use crate::ndisys::*;
use crate::TimestampMode;
@ -744,6 +745,7 @@ impl Receiver {
let mut first_audio_frame = true;
let mut first_frame = true;
let mut timer = time::Instant::now();
let mut pending_ndi_cc = VecDeque::<ndi_cc_meta::NDIClosedCaption>::new();
// Capture until error or shutdown
loop {
@ -812,6 +814,16 @@ impl Receiver {
first_video_frame = false;
}
}
if !pending_ndi_cc.is_empty() {
if let Ok(Buffer::Video(ref mut buffer, _)) = buffer {
let buf = buffer.get_mut().unwrap();
for ndi_cc in pending_ndi_cc.drain(..) {
gst_video::VideoCaptionMeta::add(buf, ndi_cc.cc_type, &ndi_cc.data);
}
}
}
buffer
}
Ok(Some(Frame::Audio(frame))) => {
@ -837,6 +849,13 @@ impl Receiver {
(frame.timecode() as u64 * 100).nseconds(),
metadata,
);
match ndi_cc_meta::parse_ndi_cc_meta(metadata) {
Ok(mut ndi_cc_list) => pending_ndi_cc.extend(ndi_cc_list.drain(..)),
Err(err) => {
gst::error!(CAT, obj: element, "Error parsing closed caption: {err}");
}
}
}
continue;

View file

@ -73,6 +73,8 @@ struct FFI {
send_send_audio_v3: Symbol<
fn(p_instance: NDIlib_send_instance_t, p_audio_data: *const NDIlib_audio_frame_v3_t),
>,
send_send_metadata:
Symbol<fn(p_instance: NDIlib_send_instance_t, p_metadata: *const NDIlib_metadata_frame_t)>,
}
pub type NDIlib_find_instance_t = *mut ::std::os::raw::c_void;
@ -398,6 +400,7 @@ pub fn load() -> Result<(), glib::BoolError> {
send_destroy: load_symbol!(NDIlib_send_destroy),
send_send_video_v2: load_symbol!(NDIlib_send_send_video_v2),
send_send_audio_v3: load_symbol!(NDIlib_send_send_audio_v3),
send_send_metadata: load_symbol!(NDIlib_send_send_metadata),
_library: library,
};
@ -532,3 +535,10 @@ pub unsafe fn NDIlib_send_send_audio_v3(
) {
(FFI.get_unchecked().send_send_audio_v3)(p_instance, p_audio_data)
}
pub unsafe fn NDIlib_send_send_metadata(
p_instance: NDIlib_send_instance_t,
p_metadata: *const NDIlib_metadata_frame_t,
) {
(FFI.get_unchecked().send_send_metadata)(p_instance, p_metadata)
}

517
net/ndi/src/video_anc.rs Normal file
View file

@ -0,0 +1,517 @@
//! Video Ancillary Active Format Description (AFD) encoder and parser
//! see SMPTE-291M
use anyhow::{bail, Context, Result};
use smallvec::SmallVec;
#[derive(thiserror::Error, Debug, Eq, PartialEq)]
/// Video Ancillary AFD related Errors.
pub enum VideoAncillaryAFDError {
#[error("Unexpected data count {found}. Expected: {expected}")]
UnexpectedDataCount { found: u8, expected: u8 },
#[error("Not enough data")]
NotEnoughData,
#[error("Unexpected data flags")]
UnexpectedDataFlags,
#[error("Unexpected checksum {found}. Expected: {expected}")]
WrongChecksum { found: u16, expected: u16 },
#[error("Unexpected did {found}. Expected: {expected}")]
UnexpectedDID { found: u16, expected: u16 },
}
const ANCILLARY_DATA_FLAGS: [u16; 3] = [0x000, 0x3ff, 0x3ff];
const EIA_708_ANCILLARY_DID_16: u16 = 0x6101;
const EIA_608_ANCILLARY_DID_16: u16 = 0x6102;
// Video anc AFD content:
// ADF + DID/SDID + DATA COUNT + PAYLOAD + checksum:
// 3 + 2 + 1 + 256 max + 1 = 263
// Those are 10bit words, so we need 329 bytes max.
pub const VIDEO_ANC_AFD_MAX_LEN: usize = 329;
pub type VideoAncillaryAFD = SmallVec<[u8; VIDEO_ANC_AFD_MAX_LEN]>;
fn with_afd_parity(val: u8) -> u16 {
let p = (val.count_ones() % 2) as u16;
(1 - p) << 9 | p << 8 | (val as u16)
}
#[derive(Debug)]
/// Video Ancillary Active Format Description (AFD) Encoder
pub struct VideoAncillaryAFDEncoder {
data: VideoAncillaryAFD,
offset: u8,
checksum: u16,
data_count: u8,
expected_data_count: Option<u8>,
}
impl VideoAncillaryAFDEncoder {
pub fn for_cea608_raw(line: u8) -> Self {
let mut this = Self::new(EIA_608_ANCILLARY_DID_16);
this.expected_data_count = Some(3);
this.push_data(&[line]).unwrap();
this
}
pub fn for_cea608_s334_1a() -> Self {
let mut this = Self::new(EIA_608_ANCILLARY_DID_16);
this.expected_data_count = Some(3);
this
}
pub fn for_cea708_cdp() -> Self {
Self::new(EIA_708_ANCILLARY_DID_16)
}
fn new(did16: u16) -> Self {
let mut this = VideoAncillaryAFDEncoder {
data: SmallVec::new(),
offset: 0,
checksum: 0,
data_count: 0,
expected_data_count: None,
};
// Ancillary Data Flag, component AFD description
this.push_raw_10bit_word(ANCILLARY_DATA_FLAGS[0]);
this.push_raw_10bit_word(ANCILLARY_DATA_FLAGS[1]);
this.push_raw_10bit_word(ANCILLARY_DATA_FLAGS[2]);
// did / sdid: not part of data count
let did_sdid: [u8; 2] = did16.to_be_bytes();
this.push_as_10bit_word(did_sdid[0]);
this.push_as_10bit_word(did_sdid[1]);
// Reserved for data count
this.push_raw_10bit_word(0x000);
this
}
/// Pushes the provided `word` as a 10 bits value.
///
/// The 10bits lsb are pushed at current offset as is.
fn push_raw_10bit_word(&mut self, word: u16) {
debug_assert_eq!(word & 0xfc00, 0);
let word = word & 0x3ff;
match self.offset {
0 => {
self.data.push((word >> 2) as u8);
self.data.push((word << 6) as u8);
self.offset = 2;
}
2 => {
*self.data.last_mut().unwrap() |= (word >> 4) as u8;
self.data.push((word << 4) as u8);
self.offset = 4;
}
4 => {
*self.data.last_mut().unwrap() |= (word >> 6) as u8;
self.data.push((word << 2) as u8);
self.offset = 6;
}
6 => {
*self.data.last_mut().unwrap() |= (word >> 8) as u8;
self.data.push(word as u8);
self.offset = 0;
}
_ => unreachable!(),
}
}
/// Pushes the provided `value` as a 10 bits value.
///
/// The `value` is:
///
/// - prepended with the parity bits,
/// - pushed at current buffer offset,
/// - pushed to the checksum.
fn push_as_10bit_word(&mut self, value: u8) {
let pval = with_afd_parity(value);
self.push_raw_10bit_word(pval);
self.checksum += pval;
}
/// Pushes the provided each item in `data` as a 10 bits value.
///
/// The `value` is:
///
/// - prepended with the parity bits,
/// - pushed at current buffer offset,
/// - pushed to the checksum.
///
/// The data count is incremented for each pushed value.
/// If the expected data count is defined and data count exceeds it,
/// `VideoAncillaryAFDError::UnexpectedDataCount` is returned.
pub fn push_data(&mut self, data: &[u8]) -> Result<()> {
for val in data {
self.data_count += 1;
if let Some(expected_data_count) = self.expected_data_count {
if self.data_count > expected_data_count {
bail!(VideoAncillaryAFDError::UnexpectedDataCount {
found: self.data_count,
expected: expected_data_count,
});
}
}
self.push_as_10bit_word(*val);
}
Ok(())
}
/// Terminates and returns the Video Ancillary AFD buffer.
pub fn terminate(mut self) -> VideoAncillaryAFD {
// update data_count starting at idx 6, offset 2
let data_count = with_afd_parity(self.data_count);
self.data[6] |= (data_count >> 4) as u8;
self.data[7] |= (data_count << 4) as u8;
self.checksum = (self.checksum + data_count) & 0x1ff;
self.checksum |= (!(self.checksum >> 8)) << 9;
self.checksum &= 0x3ff;
self.push_raw_10bit_word(self.checksum);
self.data
}
}
#[derive(Debug)]
/// Video Ancillary Active Format Description (AFD) Parser
pub struct VideoAncillaryAFDParser<'a> {
input: &'a [u8],
data: VideoAncillaryAFD,
did: u16,
idx: usize,
offset: u8,
checksum: u16,
data_count: u8,
}
impl<'a> VideoAncillaryAFDParser<'a> {
pub fn parse_for_cea608(input: &'a [u8]) -> Result<VideoAncillaryAFD> {
let this = Self::parse(input)?;
if this.did != EIA_608_ANCILLARY_DID_16 {
bail!(VideoAncillaryAFDError::UnexpectedDID {
found: this.did,
expected: EIA_608_ANCILLARY_DID_16,
});
}
if this.data_count != 3 {
bail!(VideoAncillaryAFDError::UnexpectedDataCount {
found: this.data_count,
expected: 3,
});
}
Ok(this.data)
}
pub fn parse_for_cea708(input: &'a [u8]) -> Result<VideoAncillaryAFD> {
let this = Self::parse(input)?;
if this.did != EIA_708_ANCILLARY_DID_16 {
bail!(VideoAncillaryAFDError::UnexpectedDID {
found: this.did,
expected: EIA_708_ANCILLARY_DID_16,
});
}
Ok(this.data)
}
fn parse(input: &'a [u8]) -> Result<Self> {
let mut this = VideoAncillaryAFDParser {
input,
data: SmallVec::new(),
did: 0,
idx: 0,
offset: 0,
checksum: 0,
data_count: 0,
};
let mut anc_data_flags = [0u16; 3];
anc_data_flags[0] = this
.pull_raw_10bit_word()
.context("Parsing anc data flags")?;
anc_data_flags[1] = this
.pull_raw_10bit_word()
.context("Parsing anc data flags")?;
anc_data_flags[2] = this
.pull_raw_10bit_word()
.context("Parsing anc data flags")?;
if anc_data_flags != ANCILLARY_DATA_FLAGS {
bail!(VideoAncillaryAFDError::UnexpectedDataFlags);
}
let did = this.pull_from_10bit_word().context("Parsing did")?;
let sdid = this.pull_from_10bit_word().context("Parsing sdid")?;
this.did = u16::from_be_bytes([did, sdid]);
let data_count = this.pull_from_10bit_word().context("Parsing data_count")?;
for _ in 0..data_count {
let val = this.pull_from_10bit_word().context("Parsing data")?;
this.data.push(val);
}
this.data_count = data_count;
let found_checksum = this.pull_raw_10bit_word().context("Parsing checksum")?;
this.checksum &= 0x1ff;
this.checksum |= (!(this.checksum >> 8)) << 9;
this.checksum &= 0x3ff;
if this.checksum != found_checksum {
bail!(VideoAncillaryAFDError::WrongChecksum {
found: found_checksum,
expected: this.checksum
});
}
Ok(this)
}
fn pull_raw_10bit_word(&mut self) -> Result<u16> {
if self.input.len() <= self.idx + 1 {
bail!(VideoAncillaryAFDError::NotEnoughData);
}
let word;
let msb = self.input[self.idx] as u16;
self.idx += 1;
let lsb = self.input[self.idx] as u16;
match self.offset {
0 => {
word = (msb << 2) | (lsb >> 6);
self.offset = 2;
}
2 => {
word = ((msb & 0x3f) << 4) | (lsb >> 4);
self.offset = 4;
}
4 => {
word = ((msb & 0x0f) << 6) | (lsb >> 2);
self.offset = 6;
}
6 => {
word = ((msb & 0x03) << 8) | lsb;
self.idx += 1;
self.offset = 0;
}
_ => unreachable!(),
}
Ok(word)
}
/// Pulls a 8bit value from next 10bit word.
///
/// Also checks parity and adds to checksum.
fn pull_from_10bit_word(&mut self) -> Result<u8> {
let word = self.pull_raw_10bit_word()?;
let val = (word & 0xff) as u8;
// Don't check parity: we will rely on the checksum for integrity
self.checksum += word;
Ok(val)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn afd_encode_cea608_raw() {
let mut anc_afd = VideoAncillaryAFDEncoder::for_cea608_raw(21);
anc_afd.push_data(&[0x94, 0x2c]).unwrap();
let buf = anc_afd.terminate();
assert_eq!(
buf.as_slice(),
[0x00, 0x3f, 0xff, 0xfd, 0x61, 0x40, 0xa0, 0x34, 0x55, 0x94, 0x4b, 0x23, 0xb0]
);
}
#[test]
fn afd_encode_cea608_s334_1a() {
let mut anc_afd = VideoAncillaryAFDEncoder::for_cea608_s334_1a();
anc_afd.push_data(&[0x80, 0x94, 0x2c]).unwrap();
let buf = anc_afd.terminate();
assert_eq!(
buf.as_slice(),
[0x00, 0x3f, 0xff, 0xfd, 0x61, 0x40, 0xa0, 0x36, 0x01, 0x94, 0x4b, 0x2a, 0x60]
);
}
#[test]
fn afd_encode_cea608_s334_1a_data_count_exceeded() {
let mut anc_afd = VideoAncillaryAFDEncoder::for_cea608_s334_1a();
assert_eq!(
anc_afd
.push_data(&[0x80, 0x94, 0x2c, 0xab])
.unwrap_err()
.downcast::<VideoAncillaryAFDError>()
.unwrap(),
VideoAncillaryAFDError::UnexpectedDataCount {
expected: 3,
found: 4
},
);
}
#[test]
fn afd_encode_cea708_cdp() {
let mut anc_afd = VideoAncillaryAFDEncoder::for_cea708_cdp();
anc_afd
.push_data(&[
0x96, 0x69, 0x55, 0x3f, 0x43, 0x00, 0x00, 0x72, 0xf8, 0xfc, 0x94, 0x2c, 0xf9, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00,
0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0x74, 0x00, 0x00,
0x1b,
])
.unwrap();
let buf = anc_afd.terminate();
assert_eq!(
buf.as_slice(),
[
0x00, 0x3f, 0xff, 0xfd, 0x61, 0x40, 0x65, 0x5a, 0x5a, 0x69, 0x95, 0x63, 0xf5, 0x0e,
0x00, 0x80, 0x27, 0x27, 0xe2, 0xfc, 0x65, 0x12, 0xcb, 0xe6, 0x00, 0x80, 0x2f, 0xa8,
0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b, 0xea, 0x00, 0x80, 0x2f,
0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b, 0xea, 0x00, 0x80,
0x2f, 0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b, 0xea, 0x00,
0x80, 0x2f, 0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b, 0xea,
0x00, 0x80, 0x2f, 0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b,
0xea, 0x00, 0x80, 0x2f, 0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0x74, 0x80, 0x20,
0x08, 0x6e, 0xb7,
]
);
}
#[test]
fn parse_afd_cea608() {
let buf = VideoAncillaryAFDParser::parse_for_cea608(&[
0x00, 0x3f, 0xff, 0xfd, 0x61, 0x40, 0xa0, 0x34, 0x55, 0x94, 0x4b, 0x23, 0xb0,
])
.unwrap();
assert_eq!(buf.as_slice(), [0x15, 0x94, 0x2c]);
}
#[test]
fn parse_afd_cea608_32bit_padded() {
let buf = VideoAncillaryAFDParser::parse_for_cea608(&[
0x00, 0x3f, 0xff, 0xfd, 0x61, 0x40, 0xa0, 0x34, 0x55, 0x94, 0x4b, 0x23, 0xb0, 0x00,
0x00, 0x00,
])
.unwrap();
assert_eq!(buf.as_slice(), [0x15, 0x94, 0x2c]);
}
#[test]
fn parse_afd_cea708() {
let buf = VideoAncillaryAFDParser::parse_for_cea708(&[
0x00, 0x3f, 0xff, 0xfd, 0x61, 0x40, 0x65, 0x5a, 0x5a, 0x69, 0x95, 0x63, 0xf5, 0x0e,
0x00, 0x80, 0x27, 0x27, 0xe2, 0xfc, 0x65, 0x12, 0xcb, 0xe6, 0x00, 0x80, 0x2f, 0xa8,
0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b, 0xea, 0x00, 0x80, 0x2f,
0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b, 0xea, 0x00, 0x80,
0x2f, 0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b, 0xea, 0x00,
0x80, 0x2f, 0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b, 0xea,
0x00, 0x80, 0x2f, 0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b,
0xea, 0x00, 0x80, 0x2f, 0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0x74, 0x80, 0x20,
0x08, 0x6e, 0xb7,
])
.unwrap();
assert_eq!(
buf.as_slice(),
[
0x96, 0x69, 0x55, 0x3f, 0x43, 0x00, 0x00, 0x72, 0xf8, 0xfc, 0x94, 0x2c, 0xf9, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00,
0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0x74, 0x00, 0x00,
0x1b,
]
);
}
#[test]
fn parse_afd_cea608_not_enough_data() {
assert_eq!(
VideoAncillaryAFDParser::parse_for_cea608(&[0x00, 0x3f])
.unwrap_err()
.downcast::<VideoAncillaryAFDError>()
.unwrap(),
VideoAncillaryAFDError::NotEnoughData,
);
}
#[test]
fn parse_afd_cea608_unexpected_data_flags() {
assert_eq!(
VideoAncillaryAFDParser::parse_for_cea608(&[
0x00, 0x3f, 0xff, 0xdd, 0x61, 0x40, 0x60, 0x09, 0x88
])
.unwrap_err()
.downcast::<VideoAncillaryAFDError>()
.unwrap(),
VideoAncillaryAFDError::UnexpectedDataFlags,
);
}
#[test]
fn parse_afd_cea608_unexpected_did() {
assert_eq!(
VideoAncillaryAFDParser::parse_for_cea608(&[
0x00, 0x3f, 0xff, 0xfd, 0x61, 0x40, 0x60, 0x09, 0x88
])
.unwrap_err()
.downcast::<VideoAncillaryAFDError>()
.unwrap(),
VideoAncillaryAFDError::UnexpectedDID {
found: EIA_708_ANCILLARY_DID_16,
expected: EIA_608_ANCILLARY_DID_16
},
);
}
#[test]
fn parse_afd_cea708_wrong_checksum() {
assert_eq!(
VideoAncillaryAFDParser::parse_for_cea708(&[
0x00, 0x3f, 0xff, 0xfd, 0x61, 0x40, 0x60, 0x09, 0x81
])
.unwrap_err()
.downcast::<VideoAncillaryAFDError>()
.unwrap(),
VideoAncillaryAFDError::WrongChecksum {
found: 0x260,
expected: 0x262
},
);
}
}