ndi: use v210 encoding for cc and attach to video frame

The NDI closed captions specifications [1] define a variation where metadata is
attached to the video frame. This requires the AFD buffer to be v210 encoded.
This commit applies this strategy.

Another difference with previous version is that when an error occurs while
encoding or decoding a meta, next meta are also tried instead of failing
immediately.

Receiving closed captions as a standalone metadata is kept for interoperability
purposes. In this case, metadata is also expected to be v210 encoded.

[1]: http://www.sienna-tv.com/ndi/ndiclosedcaptions.html

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1356>
This commit is contained in:
François Laignel 2023-10-11 21:25:29 +02:00
parent 5b03f7d7b0
commit 022afa6375
6 changed files with 555 additions and 842 deletions

View file

@ -26,7 +26,6 @@ mod ndisrcdemux;
mod ndisrcmeta;
mod ndi_cc_meta;
mod video_anc;
#[cfg(feature = "doc")]
use gst::prelude::*;

View file

@ -461,6 +461,7 @@ pub enum VideoFrame<'a> {
BorrowedGst(
NDIlib_video_frame_v2_t,
&'a gst_video::VideoFrameRef<&'a gst::BufferRef>,
Option<&'a std::ffi::CStr>,
),
}
@ -478,7 +479,7 @@ impl std::error::Error for TryFromVideoFrameError {}
impl<'a> VideoFrame<'a> {
pub fn xres(&self) -> i32 {
match self {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, ..) => {
frame.xres
}
}
@ -486,7 +487,7 @@ impl<'a> VideoFrame<'a> {
pub fn yres(&self) -> i32 {
match self {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, ..) => {
frame.yres
}
}
@ -494,7 +495,7 @@ impl<'a> VideoFrame<'a> {
pub fn fourcc(&self) -> NDIlib_FourCC_video_type_e {
match self {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, ..) => {
frame.FourCC
}
}
@ -502,7 +503,7 @@ impl<'a> VideoFrame<'a> {
pub fn frame_rate(&self) -> (i32, i32) {
match self {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, ..) => {
(frame.frame_rate_N, frame.frame_rate_D)
}
}
@ -510,7 +511,7 @@ impl<'a> VideoFrame<'a> {
pub fn picture_aspect_ratio(&self) -> f32 {
match self {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, ..) => {
frame.picture_aspect_ratio
}
}
@ -518,7 +519,7 @@ impl<'a> VideoFrame<'a> {
pub fn frame_format_type(&self) -> NDIlib_frame_format_type_e {
match self {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, ..) => {
frame.frame_format_type
}
}
@ -526,7 +527,7 @@ impl<'a> VideoFrame<'a> {
pub fn timecode(&self) -> i64 {
match self {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, ..) => {
frame.timecode
}
}
@ -565,8 +566,8 @@ impl<'a> VideoFrame<'a> {
return unsafe {
use std::slice;
match self {
VideoFrame::BorrowedRecv(ref frame, _)
| VideoFrame::BorrowedGst(ref frame, _) => Some(slice::from_raw_parts(
VideoFrame::BorrowedRecv(ref frame, ..)
| VideoFrame::BorrowedGst(ref frame, ..) => Some(slice::from_raw_parts(
frame.p_data as *const u8,
frame_size as usize,
)),
@ -588,8 +589,8 @@ impl<'a> VideoFrame<'a> {
return unsafe {
use std::slice;
match self {
VideoFrame::BorrowedRecv(ref frame, _)
| VideoFrame::BorrowedGst(ref frame, _) => Some(slice::from_raw_parts(
VideoFrame::BorrowedRecv(ref frame, ..)
| VideoFrame::BorrowedGst(ref frame, ..) => Some(slice::from_raw_parts(
frame.p_data as *const u8,
frame.line_stride_or_data_size_in_bytes as usize,
)),
@ -625,12 +626,11 @@ impl<'a> VideoFrame<'a> {
}
let data = match self {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => {
slice::from_raw_parts(
frame.p_data as *const u8,
frame.line_stride_or_data_size_in_bytes as usize,
)
}
VideoFrame::BorrowedRecv(ref frame, ..)
| VideoFrame::BorrowedGst(ref frame, ..) => slice::from_raw_parts(
frame.p_data as *const u8,
frame.line_stride_or_data_size_in_bytes as usize,
),
};
let mut cursor = Cursor::new(data);
@ -675,7 +675,7 @@ impl<'a> VideoFrame<'a> {
pub fn line_stride_or_data_size_in_bytes(&self) -> i32 {
match self {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, ..) => {
let stride = frame.line_stride_or_data_size_in_bytes;
if stride != 0 {
@ -705,7 +705,7 @@ impl<'a> VideoFrame<'a> {
pub fn metadata(&self) -> Option<&str> {
unsafe {
match self {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, ..) => {
if frame.p_metadata.is_null() {
None
} else {
@ -718,7 +718,7 @@ impl<'a> VideoFrame<'a> {
pub fn timestamp(&self) -> i64 {
match self {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, ..) => {
frame.timestamp
}
}
@ -726,12 +726,15 @@ impl<'a> VideoFrame<'a> {
pub fn as_ptr(&self) -> *const NDIlib_video_frame_v2_t {
match self {
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, _) => frame,
VideoFrame::BorrowedRecv(ref frame, _) | VideoFrame::BorrowedGst(ref frame, ..) => {
frame
}
}
}
pub fn try_from_video_frame(
frame: &'a gst_video::VideoFrameRef<&'a gst::BufferRef>,
metadata: Option<&'a std::ffi::CStr>,
timecode: i64,
) -> Result<Self, TryFromVideoFrameError> {
// Planar formats must be in contiguous memory
@ -837,11 +840,11 @@ impl<'a> VideoFrame<'a> {
timecode,
p_data: frame.plane_data(0).unwrap().as_ptr() as *const ::std::os::raw::c_char,
line_stride_or_data_size_in_bytes: frame.plane_stride()[0],
p_metadata: ptr::null(),
p_metadata: metadata.map_or(ptr::null(), |meta| meta.as_ptr()),
timestamp: 0,
};
Ok(VideoFrame::BorrowedGst(ndi_frame, frame))
Ok(VideoFrame::BorrowedGst(ndi_frame, frame, metadata))
}
}

View file

@ -5,12 +5,23 @@
//! * http://www.sienna-tv.com/ndi/ndiclosedcaptions.html
//! * http://www.sienna-tv.com/ndi/ndiclosedcaptions608.html
use anyhow::{bail, Context, Result};
use anyhow::{bail, Result};
use data_encoding::BASE64;
use smallvec::SmallVec;
use crate::video_anc;
use crate::video_anc::VideoAncillaryAFD;
use gst::glib::once_cell::sync::Lazy;
use gst::glib::translate::IntoGlib;
use gst_video::{VideoAncillary, VideoAncillaryDID16, VideoVBIEncoder, VideoVBIParser};
use std::ffi::CString;
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"ndiccmeta",
gst::DebugColorFlags::empty(),
Some("NewTek NDI CC Meta"),
)
});
const C608_TAG: &str = "C608";
const C608_TAG_BYTES: &[u8] = C608_TAG.as_bytes();
@ -19,197 +30,362 @@ const C708_TAG: &str = "C708";
const C708_TAG_BYTES: &[u8] = C708_TAG.as_bytes();
const LINE_ATTR: &str = "line";
const DEFAULT_LINE_VALUE: &str = "21";
const DEFAULT_LINE: u8 = 21;
const DEFAULT_LINE_STR: &str = "21";
const DEFAULT_LINE_C708_STR: &str = "10";
// Video anc AFD content:
// ADF + DID/SDID + DATA COUNT + PAYLOAD + checksum:
// 3 + 2 + 1 + 256 max + 1 = 263
// Those are 10bit words, so we need 329 bytes max.
pub const VIDEO_ANC_AFD_CAPACITY: usize = 329;
/// Video anc AFD content padded to 32bit alignment encoded in base64 + padding
const NDI_CC_CONTENT_CAPACITY: usize = (video_anc::VIDEO_ANC_AFD_CAPACITY + 3) * 3 / 2 + 2;
const NDI_CC_CONTENT_CAPACITY: usize = (VIDEO_ANC_AFD_CAPACITY + 3) * 3 / 2 + 2;
/// Video anc AFD padded to 32bit alignment encoded in base64
/// + XML tags with brackets and end '/' + attr
const NDI_CC_CAPACITY: usize = NDI_CC_CONTENT_CAPACITY + 13 + 10;
#[derive(thiserror::Error, Debug, Eq, PartialEq)]
/// NDI Video Caption related Errors.
pub enum NDIClosedCaptionError {
/// NDI Video Captions related Errors.
pub enum NDICCError {
#[error("Unsupported closed caption type {cc_type:?}")]
UnsupportedCC {
cc_type: gst_video::VideoCaptionType,
},
#[error("Unexpected AFD data count {found}. Expected: {expected}")]
UnexpectedAfdDataCount { found: u8, expected: u8 },
#[error("Unexpected AFD did {found}. Expected: {expected}")]
UnexpectedAfdDid { found: i32, expected: i32 },
}
impl NDIClosedCaptionError {
pub fn is_unsupported_cc(&self) -> bool {
matches!(self, Self::UnsupportedCC { .. })
impl NDICCError {
fn new_unexpected_afd_did(found: VideoAncillaryDID16, expected: VideoAncillaryDID16) -> Self {
NDICCError::UnexpectedAfdDid {
found: found.into_glib(),
expected: expected.into_glib(),
}
}
}
fn write_32bit_padded_base64<W>(writer: &mut quick_xml::writer::Writer<W>, data: &[u8])
where
W: std::io::Write,
{
use quick_xml::events::{BytesText, Event};
use std::borrow::Cow;
/// NDI Closed Captions Meta encoder.
pub struct NDICCMetaEncoder {
v210_encoder: VideoVBIEncoder,
width: u32,
line_buf: Vec<u8>,
}
let mut buf = String::with_capacity(NDI_CC_CONTENT_CAPACITY);
let mut input = Cow::from(data);
impl NDICCMetaEncoder {
pub fn new(width: u32) -> Self {
let v210_encoder = VideoVBIEncoder::try_new(gst_video::VideoFormat::V210, width).unwrap();
let alignment_rem = input.len() % 4;
if alignment_rem != 0 {
let owned = input.to_mut();
let mut padding = 4 - alignment_rem;
while padding != 0 {
owned.push(0);
padding -= 1;
NDICCMetaEncoder {
line_buf: vec![0; v210_encoder.line_buffer_len()],
v210_encoder,
width,
}
}
debug_assert_eq!(input.len() % 4, 0);
buf.clear();
BASE64.encode_append(&input, &mut buf);
writer
.write_event(Event::Text(BytesText::from_escaped(buf)))
.unwrap();
}
/// Encodes the provided VideoCaptionMeta in an NDI closed caption metadata.
pub fn encode_video_caption_meta(video_buf: &gst::BufferRef) -> Result<Option<String>> {
use crate::video_anc::VideoAncillaryAFDEncoder;
use quick_xml::events::{BytesEnd, BytesStart, Event};
use quick_xml::writer::Writer;
if video_buf.meta::<gst_video::VideoCaptionMeta>().is_none() {
return Ok(None);
pub fn set_width(&mut self, width: u32) {
if width != self.width {
*self = Self::new(width);
}
}
// Start with an initial capacity suitable to store one ndi cc metadata
let mut writer = Writer::new(Vec::<u8>::with_capacity(NDI_CC_CAPACITY));
/// Encodes the VideoCaptionMeta of the provided `gst::Buffer`
/// in an NDI closed caption metadata suitable to be attached to an NDI video frame.
pub fn encode(&mut self, video_buf: &gst::BufferRef) -> Option<CString> {
use quick_xml::events::{BytesEnd, BytesStart, Event};
use quick_xml::writer::Writer;
let cc_meta_iter = video_buf.iter_meta::<gst_video::VideoCaptionMeta>();
for cc_meta in cc_meta_iter {
if cc_meta.data().is_empty() {
video_buf.meta::<gst_video::VideoCaptionMeta>()?;
// Start with an initial capacity suitable to store one ndi cc metadata
let mut xml_writer = Writer::new(Vec::with_capacity(NDI_CC_CAPACITY));
let cc_meta_iter = video_buf.iter_meta::<gst_video::VideoCaptionMeta>();
for cc_meta in cc_meta_iter {
let cc_data = cc_meta.data();
if cc_data.is_empty() {
continue;
}
use gst_video::VideoCaptionType::*;
match cc_meta.caption_type() {
Cea608Raw => {
if cc_data.len() != 2 {
let err = NDICCError::UnexpectedAfdDataCount {
found: cc_data.len() as u8,
expected: 2,
};
gst::error!(CAT, "Failed to encode Cea608Raw metadata: {err}");
continue;
}
let res = self.add_did16_ancillary(
VideoAncillaryDID16::S334Eia608,
&[DEFAULT_LINE, cc_data[0], cc_data[1]],
);
if let Err(err) = res {
gst::error!(CAT, "Failed to add Cea608Raw metadata: {err}");
continue;
}
let mut elem = BytesStart::new(C608_TAG);
elem.push_attribute((LINE_ATTR, DEFAULT_LINE_STR));
xml_writer.write_event(Event::Start(elem)).unwrap();
self.write_v210_base64(&mut xml_writer);
xml_writer
.write_event(Event::End(BytesEnd::new(C608_TAG)))
.unwrap();
}
Cea608S3341a => {
if cc_data.len() != 3 {
let err = NDICCError::UnexpectedAfdDataCount {
found: cc_data.len() as u8,
expected: 3,
};
gst::error!(CAT, "Failed to encode Cea608Raw metadata: {err}");
continue;
}
let res = self.add_did16_ancillary(VideoAncillaryDID16::S334Eia608, cc_data);
if let Err(err) = res {
gst::error!(CAT, "Failed to add Cea608S3341a metadata: {err}");
continue;
}
let mut elem = BytesStart::new(C608_TAG);
elem.push_attribute((LINE_ATTR, format!("{}", cc_meta.data()[0]).as_str()));
xml_writer.write_event(Event::Start(elem)).unwrap();
self.write_v210_base64(&mut xml_writer);
xml_writer
.write_event(Event::End(BytesEnd::new(C608_TAG)))
.unwrap();
}
Cea708Cdp => {
let res = self.add_did16_ancillary(VideoAncillaryDID16::S334Eia708, cc_data);
if let Err(err) = res {
gst::error!(CAT, "Failed to add Cea708Cdp metadata: {err}");
continue;
}
let mut elem = BytesStart::new(C708_TAG);
elem.push_attribute((LINE_ATTR, DEFAULT_LINE_C708_STR));
xml_writer.write_event(Event::Start(elem)).unwrap();
self.write_v210_base64(&mut xml_writer);
xml_writer
.write_event(Event::End(BytesEnd::new(C708_TAG)))
.unwrap();
}
other => {
gst::info!(CAT, "{}", NDICCError::UnsupportedCC { cc_type: other });
}
}
}
// # Safety
// `writer` content is guaranteed to be a C compatible String without interior 0 since:
// * It contains ASCII XML tags, ASCII XML attributes and base64 encoded content
// * ASCII & base64 are subsets of UTF-8.
unsafe {
let cc_meta = xml_writer.into_inner();
if cc_meta.is_empty() {
return None;
}
Some(CString::from_vec_unchecked(cc_meta))
}
}
fn add_did16_ancillary(&mut self, did16: VideoAncillaryDID16, data: &[u8]) -> Result<()> {
self.v210_encoder.add_did16_ancillary(
gst_video::VideoAFDDescriptionMode::Component,
did16,
data,
)?;
Ok(())
}
/// Encodes previously added data as v210 in base64 and writes it with the XML writer.
fn write_v210_base64<W>(&mut self, writer: &mut quick_xml::writer::Writer<W>)
where
W: std::io::Write,
{
use quick_xml::events::{BytesText, Event};
let anc_len = self.v210_encoder.write_line(&mut self.line_buf).unwrap();
assert_eq!(anc_len % 4, 0);
let mut xml_buf = String::with_capacity(NDI_CC_CONTENT_CAPACITY);
BASE64.encode_append(&self.line_buf[..anc_len], &mut xml_buf);
writer
.write_event(Event::Text(BytesText::from_escaped(xml_buf)))
.unwrap();
}
}
/// NDI Closed Captions Meta decoder.
pub struct NDICCMetaDecoder {
v210_parser: VideoVBIParser,
width: u32,
line_buf: Vec<u8>,
xml_content: SmallVec<[u8; NDI_CC_CONTENT_CAPACITY]>,
xml_buf: Vec<u8>,
}
impl NDICCMetaDecoder {
pub fn new(width: u32) -> Self {
let v210_parser = VideoVBIParser::try_new(gst_video::VideoFormat::V210, width).unwrap();
NDICCMetaDecoder {
line_buf: vec![0; v210_parser.line_buffer_len()],
v210_parser,
width,
xml_content: SmallVec::<[u8; NDI_CC_CONTENT_CAPACITY]>::new(),
xml_buf: Vec::with_capacity(NDI_CC_CAPACITY),
}
}
pub fn set_width(&mut self, width: u32) {
if width != self.width {
self.v210_parser =
VideoVBIParser::try_new(gst_video::VideoFormat::V210, width).unwrap();
self.line_buf = vec![0; self.v210_parser.line_buffer_len()];
self.width = width;
}
}
/// Decodes the provided NDI metadata string, searching for NDI closed captions
/// and add them as `VideoCaptionMeta` to the provided `gst::Buffer`.
pub fn decode(&mut self, input: &str, buffer: &mut gst::Buffer) -> Result<()> {
use quick_xml::events::Event;
use quick_xml::reader::Reader;
let buffer = buffer.get_mut().unwrap();
let mut reader = Reader::from_str(input);
self.xml_buf.clear();
loop {
match reader.read_event_into(&mut self.xml_buf)? {
Event::Eof => break,
Event::Start(_) => self.xml_content.clear(),
Event::Text(e) => {
self.xml_content.extend(
e.iter().copied().filter(|&b| {
(b != b' ') && (b != b'\t') && (b != b'\n') && (b != b'\r')
}),
);
}
Event::End(e) => match e.name().as_ref() {
C608_TAG_BYTES => match BASE64.decode(self.xml_content.as_slice()) {
Ok(v210_buf) => match self.parse_for_cea608(&v210_buf) {
Ok(None) => (),
Ok(Some(anc)) => {
gst_video::VideoCaptionMeta::add(
buffer,
gst_video::VideoCaptionType::Cea608S3341a,
anc.data(),
);
}
Err(err) => {
gst::error!(CAT, "Failed to parse NDI C608 metadata: {err}");
}
},
Err(err) => {
gst::error!(CAT, "Failed to decode NDI C608 metadata: {err}");
}
},
C708_TAG_BYTES => match BASE64.decode(self.xml_content.as_slice()) {
Ok(v210_buf) => match self.parse_for_cea708(&v210_buf) {
Ok(None) => (),
Ok(Some(anc)) => {
gst_video::VideoCaptionMeta::add(
buffer,
gst_video::VideoCaptionType::Cea708Cdp,
anc.data(),
);
}
Err(err) => {
gst::error!(CAT, "Failed to parse NDI C708 metadata: {err}");
}
},
Err(err) => {
gst::error!(CAT, "Failed to decode NDI C708 metadata: {err}");
}
},
_ => (),
},
_ => {}
}
self.xml_buf.clear();
}
Ok(())
}
fn parse_for_cea608(&mut self, input: &[u8]) -> Result<Option<VideoAncillary>> {
let Some(anc) = self.parse(input)? else {
return Ok(None);
};
if anc.did16() != VideoAncillaryDID16::S334Eia608 {
bail!(NDICCError::new_unexpected_afd_did(
anc.did16(),
VideoAncillaryDID16::S334Eia608,
));
}
if anc.len() != 3 {
bail!(NDICCError::UnexpectedAfdDataCount {
found: anc.len() as u8,
expected: 3,
});
}
Ok(Some(anc))
}
fn parse_for_cea708(&mut self, input: &[u8]) -> Result<Option<VideoAncillary>> {
let Some(anc) = self.parse(input)? else {
return Ok(None);
};
if anc.did16() != VideoAncillaryDID16::S334Eia708 {
bail!(NDICCError::new_unexpected_afd_did(
anc.did16(),
VideoAncillaryDID16::S334Eia708,
));
}
Ok(Some(anc))
}
fn parse(&mut self, data: &[u8]) -> Result<Option<VideoAncillary>> {
if data.is_empty() {
return Ok(None);
}
use gst_video::VideoCaptionType::*;
match cc_meta.caption_type() {
Cea608Raw => {
let mut anc_afd = VideoAncillaryAFDEncoder::for_cea608_raw(21);
anc_afd.push_data(cc_meta.data()).context("Cea608Raw")?;
self.line_buf[0..data.len()].copy_from_slice(data);
self.line_buf[data.len()..].fill(0);
self.v210_parser.add_line(self.line_buf.as_slice())?;
let mut elem = BytesStart::new(C608_TAG);
elem.push_attribute((LINE_ATTR, DEFAULT_LINE_VALUE));
writer.write_event(Event::Start(elem)).unwrap();
let opt = self.v210_parser.next_ancillary().transpose()?;
write_32bit_padded_base64(&mut writer, anc_afd.terminate().as_slice());
writer
.write_event(Event::End(BytesEnd::new(C608_TAG)))
.unwrap();
}
Cea608S3341a => {
let mut anc_afd = VideoAncillaryAFDEncoder::for_cea608_s334_1a();
anc_afd.push_data(cc_meta.data()).context("Cea608S3341a")?;
let mut elem = BytesStart::new(C608_TAG);
elem.push_attribute((LINE_ATTR, format!("{}", cc_meta.data()[0]).as_str()));
writer.write_event(Event::Start(elem)).unwrap();
write_32bit_padded_base64(&mut writer, anc_afd.terminate().as_slice());
writer
.write_event(Event::End(BytesEnd::new(C608_TAG)))
.unwrap();
}
Cea708Cdp => {
let mut anc_afd = VideoAncillaryAFDEncoder::for_cea708_cdp();
anc_afd.push_data(cc_meta.data()).context("Cea708Cdp")?;
writer
.write_event(Event::Start(BytesStart::new(C708_TAG)))
.unwrap();
write_32bit_padded_base64(&mut writer, anc_afd.terminate().as_slice());
writer
.write_event(Event::End(BytesEnd::new(C708_TAG)))
.unwrap();
}
other => bail!(NDIClosedCaptionError::UnsupportedCC { cc_type: other }),
}
Ok(opt)
}
// # Safety
// `writer` content is guaranteed to be a valid UTF-8 string since:
// * It contains ASCII XML tags, ASCII XML attributes and base64 encoded content
// * ASCII & base64 are subsets of UTF-8.
unsafe {
let ndi_cc_meta_b = writer.into_inner();
let ndi_cc_meta = std::str::from_utf8_unchecked(&ndi_cc_meta_b);
Ok(Some(ndi_cc_meta.into()))
}
}
#[derive(Debug)]
pub struct NDIClosedCaption {
pub cc_type: gst_video::VideoCaptionType,
pub data: VideoAncillaryAFD,
}
/// Parses the provided NDI metadata string, searching for
/// an NDI closed caption metadata.
pub fn parse_ndi_cc_meta(input: &str) -> Result<Vec<NDIClosedCaption>> {
use crate::video_anc::VideoAncillaryAFDParser;
use quick_xml::events::Event;
use quick_xml::reader::Reader;
let mut ndi_cc = Vec::new();
let mut reader = Reader::from_str(input);
let mut content = SmallVec::<[u8; NDI_CC_CONTENT_CAPACITY]>::new();
let mut buf = Vec::with_capacity(NDI_CC_CAPACITY);
loop {
match reader.read_event_into(&mut buf)? {
Event::Eof => break,
Event::Start(_) => content.clear(),
Event::Text(e) => {
content.extend(
e.iter()
.copied()
.filter(|&b| (b != b' ') && (b != b'\t') && (b != b'\n') && (b != b'\r')),
);
}
Event::End(e) => match e.name().as_ref() {
C608_TAG_BYTES => {
let adf_packet = BASE64.decode(content.as_slice()).context(C608_TAG)?;
let data =
VideoAncillaryAFDParser::parse_for_cea608(&adf_packet).context(C608_TAG)?;
ndi_cc.push(NDIClosedCaption {
cc_type: gst_video::VideoCaptionType::Cea608S3341a,
data,
});
}
C708_TAG_BYTES => {
let adf_packet = BASE64.decode(content.as_slice()).context(C708_TAG)?;
let data =
VideoAncillaryAFDParser::parse_for_cea708(&adf_packet).context(C708_TAG)?;
ndi_cc.push(NDIClosedCaption {
cc_type: gst_video::VideoCaptionType::Cea708Cdp,
data,
});
}
_ => (),
},
_ => {}
}
buf.clear();
}
Ok(ndi_cc)
}
#[cfg(test)]
@ -232,9 +408,10 @@ mod tests {
);
}
let mut ndi_cc_encoder = NDICCMetaEncoder::new(1920);
assert_eq!(
encode_video_caption_meta(&buf).unwrap().unwrap(),
"<C608 line=\"128\">AD///WFAoDYBlEsqYAAAAA==</C608>",
ndi_cc_encoder.encode(&buf).unwrap().as_bytes(),
b"<C608 line=\"128\">AAAAAP8D8D8AhAUAAgEwIAAABgCUAcASAJgKAAAAAAA=</C608>",
);
}
@ -260,9 +437,10 @@ mod tests {
);
}
let mut ndi_cc_encoder = NDICCMetaEncoder::new(1920);
assert_eq!(
encode_video_caption_meta(&buf).unwrap().unwrap(),
"<C708>AD///WFAZVpaaZVj9Q4AgCcn4vxlEsvmAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAnSAIAhutwA=</C708>",
ndi_cc_encoder.encode(&buf).unwrap().as_bytes(),
b"<C708 line=\"10\">AAAAAP8D8D8AhAUAAQFQJQBYCgBpAlAlAPwIAEMBACAAAAgAcgKAHwDwCwCUAcASAOQLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADQCQAAAgAgAGwIALcCAAAAAAAAAAAAAA==</C708>",
);
}
@ -293,9 +471,10 @@ mod tests {
);
}
let mut ndi_cc_encoder = NDICCMetaEncoder::new(1920);
assert_eq!(
encode_video_caption_meta(&buf).unwrap().unwrap(),
"<C608 line=\"128\">AD///WFAoDYBlEsqYAAAAA==</C608><C708>AD///WFAZVpaaZVj9Q4AgCcn4vxlEsvmAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAnSAIAhutwA=</C708>",
ndi_cc_encoder.encode(&buf).unwrap().as_bytes(),
b"<C608 line=\"128\">AAAAAP8D8D8AhAUAAgEwIAAABgCUAcASAJgKAAAAAAA=</C608><C708 line=\"10\">AAAAAP8D8D8AhAUAAQFQJQBYCgBpAlAlAPwIAEMBACAAAAgAcgKAHwDwCwCUAcASAOQLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADQCQAAAgAgAGwIALcCAAAAAAAAAAAAAA==</C708>",
);
}
@ -314,18 +493,8 @@ mod tests {
);
}
let err = encode_video_caption_meta(&buf)
.unwrap_err()
.downcast::<NDIClosedCaptionError>()
.unwrap();
assert_eq!(
err,
NDIClosedCaptionError::UnsupportedCC {
cc_type: VideoCaptionType::Cea708Raw
}
);
assert!(err.is_unsupported_cc());
let mut ndi_cc_encoder = NDICCMetaEncoder::new(1920);
assert!(ndi_cc_encoder.encode(&buf).is_none());
}
#[test]
@ -333,32 +502,47 @@ mod tests {
gst::init().unwrap();
let buf = gst::Buffer::new();
assert!(encode_video_caption_meta(&buf).unwrap().is_none());
let mut ndi_cc_encoder = NDICCMetaEncoder::new(1920);
assert!(ndi_cc_encoder.encode(&buf).is_none());
}
#[test]
fn parse_ndi_meta_c608() {
let mut ndi_cc_list =
parse_ndi_cc_meta("<C608 line=\"128\">AD///WFAoDYBlEsqYAAAAA==</C608>").unwrap();
fn decode_ndi_meta_c608() {
gst::init().unwrap();
let ndi_cc = ndi_cc_list.pop().unwrap();
assert_eq!(ndi_cc.cc_type, VideoCaptionType::Cea608S3341a);
assert_eq!(ndi_cc.data.as_slice(), [0x80, 0x94, 0x2c]);
assert!(ndi_cc_list.is_empty());
}
#[test]
fn parse_ndi_meta_c708() {
let mut ndi_cc_list = parse_ndi_cc_meta(
"<C708>AD///WFAZVpaaZVj9Q4AgCcn4vxlEsvmAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAnSAIAhutwA=</C708>",
let mut buf = gst::Buffer::new();
let mut ndi_cc_decoder = NDICCMetaDecoder::new(1920);
ndi_cc_decoder
.decode(
"<C608 line=\"128\">AAAAAP8D8D8AhAUAAgEwIAAABgCUAcASAJgKAAAAAAA=</C608>",
&mut buf,
)
.unwrap();
let ndi_cc = ndi_cc_list.pop().unwrap();
assert_eq!(ndi_cc.cc_type, VideoCaptionType::Cea708Cdp);
let mut cc_meta_iter = buf.iter_meta::<gst_video::VideoCaptionMeta>();
let cc_meta = cc_meta_iter.next().unwrap();
assert_eq!(cc_meta.caption_type(), VideoCaptionType::Cea608S3341a);
assert_eq!(cc_meta.data(), [0x80, 0x94, 0x2c]);
assert!(cc_meta_iter.next().is_none());
}
#[test]
fn decode_ndi_meta_c708() {
gst::init().unwrap();
let mut buf = gst::Buffer::new();
let mut ndi_cc_decoder = NDICCMetaDecoder::new(1920);
ndi_cc_decoder.decode(
"<C708 line=\"10\">AAAAAP8D8D8AhAUAAQFQJQBYCgBpAlAlAPwIAEMBACAAAAgAcgKAHwDwCwCUAcASAOQLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADQCQAAAgAgAGwIALcCAAAAAAAAAAAAAA==</C708>",
&mut buf,
)
.unwrap();
let mut cc_meta_iter = buf.iter_meta::<gst_video::VideoCaptionMeta>();
let cc_meta = cc_meta_iter.next().unwrap();
assert_eq!(cc_meta.caption_type(), VideoCaptionType::Cea708Cdp);
assert_eq!(
ndi_cc.data.as_slice(),
cc_meta.data(),
[
0x96, 0x69, 0x55, 0x3f, 0x43, 0x00, 0x00, 0x72, 0xf8, 0xfc, 0x94, 0x2c, 0xf9, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
@ -369,70 +553,89 @@ mod tests {
0x1b,
]
);
assert!(ndi_cc_list.is_empty());
assert!(cc_meta_iter.next().is_none());
}
#[test]
fn parse_ndi_meta_c708_newlines_and_indent() {
let mut ndi_cc_list = parse_ndi_cc_meta(
r#"<C708>
AD///WFAZVpaaZVj9Q4AgCcn4vxlEsvmAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIA
vqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAnSAIA
hutwA=
fn decode_ndi_meta_c708_newlines_and_indent() {
gst::init().unwrap();
let mut buf = gst::Buffer::new();
let mut ndi_cc_decoder = NDICCMetaDecoder::new(1920);
ndi_cc_decoder
.decode(
r#"<C708 line=\"10\">
AAAAAP8D8D8AhAUAAQFQJQBYCgBpAlAlAPwIAEMBACAAAAgAcgKAHwDwCwCUAcASAOQ
LAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAA
ACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACA
CAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA
6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADQCQAAAgAgAGwIALcCAAAAAAA
AAAAAAA==
</C708>"#,
)
.unwrap();
let ndi_cc = ndi_cc_list.pop().unwrap();
assert_eq!(ndi_cc.cc_type, VideoCaptionType::Cea708Cdp);
assert_eq!(
ndi_cc.data.as_slice(),
[
0x96, 0x69, 0x55, 0x3f, 0x43, 0x00, 0x00, 0x72, 0xf8, 0xfc, 0x94, 0x2c, 0xf9, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00,
0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0x74, 0x00, 0x00,
0x1b,
]
);
assert!(ndi_cc_list.is_empty());
}
#[test]
fn parse_ndi_meta_c608_newlines_spaces_inline() {
let mut ndi_cc_list = parse_ndi_cc_meta(
"<C608 line=\"128\">\n\tAD///WFAo\n\n\r DYBlEsq\r\n\tYAAAAA== \n</C608>",
)
.unwrap();
let ndi_cc = ndi_cc_list.pop().unwrap();
assert_eq!(ndi_cc.cc_type, VideoCaptionType::Cea608S3341a);
assert_eq!(ndi_cc.data.as_slice(), [0x80, 0x94, 0x2c]);
assert!(ndi_cc_list.is_empty());
}
#[test]
fn parse_ndi_meta_c608_and_c708() {
let ndi_cc_list = parse_ndi_cc_meta(
"<C608 line=\"128\">AD///WFAoDYBlEsqYAAAAA==</C608><C708>AD///WFAZVpaaZVj9Q4AgCcn4vxlEsvmAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAvqAIAnSAIAhutwA=</C708>",
&mut buf,
)
.unwrap();
let mut ndi_cc_iter = ndi_cc_list.iter();
let ndi_cc = ndi_cc_iter.next().unwrap();
assert_eq!(ndi_cc.cc_type, VideoCaptionType::Cea608S3341a);
assert_eq!(ndi_cc.data.as_slice(), [0x80, 0x94, 0x2c]);
let ndi_cc = ndi_cc_iter.next().unwrap();
assert_eq!(ndi_cc.cc_type, VideoCaptionType::Cea708Cdp);
let mut cc_meta_iter = buf.iter_meta::<gst_video::VideoCaptionMeta>();
let cc_meta = cc_meta_iter.next().unwrap();
assert_eq!(cc_meta.caption_type(), VideoCaptionType::Cea708Cdp);
assert_eq!(
ndi_cc.data.as_slice(),
cc_meta.data(),
[
0x96, 0x69, 0x55, 0x3f, 0x43, 0x00, 0x00, 0x72, 0xf8, 0xfc, 0x94, 0x2c, 0xf9, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00,
0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0x74, 0x00, 0x00,
0x1b,
]
);
assert!(cc_meta_iter.next().is_none());
}
#[test]
fn decode_ndi_meta_c608_newlines_spaces_inline() {
gst::init().unwrap();
let mut buf = gst::Buffer::new();
let mut ndi_cc_decoder = NDICCMetaDecoder::new(1920);
ndi_cc_decoder.decode(
"<C608 line=\"128\">\n\tAAAAAP8D8\n\n\r D8AhAUA\r\n\tAgEwIAAABgCUAcASAJgKAAAAAAA= \n</C608>",
&mut buf,
)
.unwrap();
let mut cc_meta_iter = buf.iter_meta::<gst_video::VideoCaptionMeta>();
let cc_meta = cc_meta_iter.next().unwrap();
assert_eq!(cc_meta.caption_type(), VideoCaptionType::Cea608S3341a);
assert_eq!(cc_meta.data(), [0x80, 0x94, 0x2c]);
assert!(cc_meta_iter.next().is_none());
}
#[test]
fn decode_ndi_meta_c608_and_c708() {
gst::init().unwrap();
let mut buf = gst::Buffer::new();
let mut ndi_cc_decoder = NDICCMetaDecoder::new(1920);
ndi_cc_decoder.decode(
"<C608 line=\"128\">AAAAAP8D8D8AhAUAAgEwIAAABgCUAcASAJgKAAAAAAA=</C608><C708 line=\"10\">AAAAAP8D8D8AhAUAAQFQJQBYCgBpAlAlAPwIAEMBACAAAAgAcgKAHwDwCwCUAcASAOQLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADoCwAAAgAgAOgLAAACACAA6AsAAAIAIADQCQAAAgAgAGwIALcCAAAAAAAAAAAAAA==</C708>",
&mut buf,
)
.unwrap();
let mut cc_meta_iter = buf.iter_meta::<gst_video::VideoCaptionMeta>();
let cc_meta = cc_meta_iter.next().unwrap();
assert_eq!(cc_meta.caption_type(), VideoCaptionType::Cea608S3341a);
assert_eq!(cc_meta.data(), [0x80, 0x94, 0x2c]);
let cc_meta = cc_meta_iter.next().unwrap();
assert_eq!(cc_meta.caption_type(), VideoCaptionType::Cea708Cdp);
assert_eq!(
cc_meta.data(),
[
0x96, 0x69, 0x55, 0x3f, 0x43, 0x00, 0x00, 0x72, 0xf8, 0xfc, 0x94, 0x2c, 0xf9, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
@ -444,21 +647,21 @@ mod tests {
]
);
assert!(ndi_cc_iter.next().is_none());
assert!(cc_meta_iter.next().is_none());
}
#[test]
fn parse_ndi_meta_tag_mismatch() {
fn decode_ndi_meta_tag_mismatch() {
gst::init().unwrap();
// Expecting </C608> found </C708>'
let _ =
parse_ndi_cc_meta("<C608 line=\"128\">AD///WFAoDYBlEsqYAAAAA==</C708>").unwrap_err();
}
#[test]
fn parse_ndi_meta_c608_deeper_failure() {
// Caused by:
// 0: Parsing anc data flags
// 1: Not enough data'
let _ = parse_ndi_cc_meta("<C608 line=\"128\">AAA=</C608>").unwrap_err();
let mut buf = gst::Buffer::new();
let mut ndi_cc_decoder = NDICCMetaDecoder::new(1920);
ndi_cc_decoder
.decode(
"<C608 line=\"128\">AAAAAP8D8D8AhAUAAgEwIAAABgCUAcASAJgKAAAAAAA=</C708>",
&mut buf,
)
.unwrap_err();
}
}

View file

@ -11,7 +11,7 @@ use std::sync::Mutex;
use gst::glib::once_cell::sync::Lazy;
use crate::ndi::SendInstance;
use crate::ndi_cc_meta;
use crate::ndi_cc_meta::NDICCMetaEncoder;
static DEFAULT_SENDER_NDI_NAME: Lazy<String> = Lazy::new(|| {
format!(
@ -37,6 +37,7 @@ impl Default for Settings {
struct State {
send: SendInstance,
video_info: Option<gst_video::VideoInfo>,
ndi_cc_encoder: Option<NDICCMetaEncoder>,
audio_info: Option<gst_audio::AudioInfo>,
}
@ -204,6 +205,7 @@ impl BaseSinkImpl for NdiSink {
let state = State {
send,
video_info: None,
ndi_cc_encoder: None,
audio_info: None,
};
*state_storage = Some(state);
@ -243,6 +245,7 @@ impl BaseSinkImpl for NdiSink {
let info = gst_video::VideoInfo::from_caps(caps)
.map_err(|_| gst::loggable_error!(CAT, "Couldn't parse caps {}", caps))?;
state.ndi_cc_encoder = Some(NDICCMetaEncoder::new(info.width()));
state.video_info = Some(info);
state.audio_info = None;
} else {
@ -251,6 +254,7 @@ impl BaseSinkImpl for NdiSink {
state.audio_info = Some(info);
state.video_info = None;
state.ndi_cc_encoder = None;
}
Ok(())
@ -304,22 +308,11 @@ impl BaseSinkImpl for NdiSink {
.map(|time| (time.nseconds() / 100) as i64)
.unwrap_or(crate::ndisys::NDIlib_send_timecode_synthesize);
match ndi_cc_meta::encode_video_caption_meta(buffer) {
Ok(None) => (),
Ok(Some(cc_data)) => {
gst::trace!(CAT, "Sending cc meta with timecode {timecode}");
let metadata_frame =
crate::ndi::MetadataFrame::new(timecode, Some(cc_data.as_str()));
state.send.send_metadata(&metadata_frame);
}
Err(err) => match err.downcast_ref::<ndi_cc_meta::NDIClosedCaptionError>() {
Some(err) if err.is_unsupported_cc() => {
gst::info!(CAT, "{err}");
}
_ => {
gst::error!(CAT, "Failed to encode Video Caption meta: {err}");
}
},
let mut ndi_meta = None;
if let Some(ref mut ndi_cc_encoder) = state.ndi_cc_encoder {
// handle potential width change
ndi_cc_encoder.set_width(info.width());
ndi_meta = ndi_cc_encoder.encode(buffer);
}
let frame = gst_video::VideoFrameRef::from_buffer_ref_readable(buffer, info)
@ -328,11 +321,15 @@ impl BaseSinkImpl for NdiSink {
gst::FlowError::Error
})?;
let frame = crate::ndi::VideoFrame::try_from_video_frame(&frame, timecode)
.map_err(|_| {
gst::error!(CAT, imp: self, "Unsupported video frame");
gst::FlowError::NotNegotiated
})?;
let frame = crate::ndi::VideoFrame::try_from_video_frame(
&frame,
ndi_meta.as_deref(),
timecode,
)
.map_err(|_| {
gst::error!(CAT, imp: self, "Unsupported video frame");
gst::FlowError::NotNegotiated
})?;
gst::trace!(
CAT,

View file

@ -17,7 +17,7 @@ use atomic_refcell::AtomicRefCell;
use gst::glib::once_cell::sync::Lazy;
use crate::ndi::*;
use crate::ndi_cc_meta;
use crate::ndi_cc_meta::NDICCMetaDecoder;
use crate::ndisys;
use crate::ndisys::*;
use crate::TimestampMode;
@ -177,6 +177,16 @@ impl VideoInfo {
.build()),
}
}
pub fn width(&self) -> u32 {
match self {
VideoInfo::Video(ref info) => info.width(),
#[cfg(feature = "advanced-sdk")]
VideoInfo::SpeedHQInfo { xres, .. }
| VideoInfo::H264 { xres, .. }
| VideoInfo::H265 { xres, .. } => *xres as u32,
}
}
}
#[derive(Debug)]
@ -209,6 +219,7 @@ pub struct ReceiverInner {
timeout: u32,
connect_timeout: u32,
ndi_cc_decoder: AtomicRefCell<Option<NDICCMetaDecoder>>,
thread: Mutex<Option<std::thread::JoinHandle<()>>>,
}
@ -600,6 +611,7 @@ impl Receiver {
timestamp_mode,
timeout,
connect_timeout,
ndi_cc_decoder: AtomicRefCell::new(None),
thread: Mutex::new(None),
}));
@ -745,7 +757,7 @@ impl Receiver {
let mut first_audio_frame = true;
let mut first_frame = true;
let mut timer = time::Instant::now();
let mut pending_ndi_cc = VecDeque::<ndi_cc_meta::NDIClosedCaption>::new();
let mut pending_metas = VecDeque::<String>::new();
// Capture until error or shutdown
loop {
@ -815,11 +827,14 @@ impl Receiver {
}
}
if !pending_ndi_cc.is_empty() {
if !pending_metas.is_empty() {
if let Ok(Buffer::Video(ref mut buffer, _)) = buffer {
let buf = buffer.get_mut().unwrap();
for ndi_cc in pending_ndi_cc.drain(..) {
gst_video::VideoCaptionMeta::add(buf, ndi_cc.cc_type, &ndi_cc.data);
let mut ndi_cc_decoder = receiver.0.ndi_cc_decoder.borrow_mut();
for meta in pending_metas.drain(..) {
let res = ndi_cc_decoder.as_mut().unwrap().decode(&meta, buffer);
if let Err(err) = res {
gst::debug!(CAT, obj: element, "Failed to parse NDI metadata: {err}");
}
}
}
}
@ -850,12 +865,7 @@ impl Receiver {
metadata,
);
match ndi_cc_meta::parse_ndi_cc_meta(metadata) {
Ok(mut ndi_cc_list) => pending_ndi_cc.extend(ndi_cc_list.drain(..)),
Err(err) => {
gst::error!(CAT, obj: element, "Error parsing closed caption: {err}");
}
}
pending_metas.push_back(metadata.to_string());
}
continue;
@ -1026,6 +1036,24 @@ impl Receiver {
.set_flags(gst::BufferFlags::RESYNC);
}
let mut ndi_cc_decoder = self.0.ndi_cc_decoder.borrow_mut();
if ndi_cc_decoder.is_none() {
*ndi_cc_decoder = Some(NDICCMetaDecoder::new(info.width()));
}
{
let ndi_cc_decoder = ndi_cc_decoder.as_mut().unwrap();
// handle potential width change (also needed for standalone metadata)
ndi_cc_decoder.set_width(info.width());
if let Some(metadata) = video_frame.metadata() {
let res = ndi_cc_decoder.decode(metadata, &mut buffer);
if let Err(err) = res {
gst::debug!(CAT, obj: element, "Failed to parse NDI video frame metadata: {err}");
}
}
}
gst::log!(CAT, obj: element, "Produced video buffer {:?}", buffer);
Ok(Buffer::Video(buffer, info))

View file

@ -1,517 +0,0 @@
//! Video Ancillary Active Format Description (AFD) encoder and parser
//! see SMPTE-291M
use anyhow::{bail, Context, Result};
use smallvec::SmallVec;
#[derive(thiserror::Error, Debug, Eq, PartialEq)]
/// Video Ancillary AFD related Errors.
pub enum VideoAncillaryAFDError {
#[error("Unexpected data count {found}. Expected: {expected}")]
UnexpectedDataCount { found: u8, expected: u8 },
#[error("Not enough data")]
NotEnoughData,
#[error("Unexpected data flags")]
UnexpectedDataFlags,
#[error("Unexpected checksum {found}. Expected: {expected}")]
WrongChecksum { found: u16, expected: u16 },
#[error("Unexpected did {found}. Expected: {expected}")]
UnexpectedDID { found: u16, expected: u16 },
}
const ANCILLARY_DATA_FLAGS: [u16; 3] = [0x000, 0x3ff, 0x3ff];
const EIA_708_ANCILLARY_DID_16: u16 = 0x6101;
const EIA_608_ANCILLARY_DID_16: u16 = 0x6102;
// Video anc AFD content:
// ADF + DID/SDID + DATA COUNT + PAYLOAD + checksum:
// 3 + 2 + 1 + 256 max + 1 = 263
// Those are 10bit words, so we need 329 bytes max.
pub const VIDEO_ANC_AFD_CAPACITY: usize = 329;
pub type VideoAncillaryAFD = SmallVec<[u8; VIDEO_ANC_AFD_CAPACITY]>;
fn with_afd_parity(val: u8) -> u16 {
let p = (val.count_ones() % 2) as u16;
(1 - p) << 9 | p << 8 | (val as u16)
}
#[derive(Debug)]
/// Video Ancillary Active Format Description (AFD) Encoder
pub struct VideoAncillaryAFDEncoder {
data: VideoAncillaryAFD,
offset: u8,
checksum: u16,
data_count: u8,
expected_data_count: Option<u8>,
}
impl VideoAncillaryAFDEncoder {
pub fn for_cea608_raw(line: u8) -> Self {
let mut this = Self::new(EIA_608_ANCILLARY_DID_16);
this.expected_data_count = Some(3);
this.push_data(&[line]).unwrap();
this
}
pub fn for_cea608_s334_1a() -> Self {
let mut this = Self::new(EIA_608_ANCILLARY_DID_16);
this.expected_data_count = Some(3);
this
}
pub fn for_cea708_cdp() -> Self {
Self::new(EIA_708_ANCILLARY_DID_16)
}
fn new(did16: u16) -> Self {
let mut this = VideoAncillaryAFDEncoder {
data: SmallVec::new(),
offset: 0,
checksum: 0,
data_count: 0,
expected_data_count: None,
};
// Ancillary Data Flag, component AFD description
this.push_raw_10bit_word(ANCILLARY_DATA_FLAGS[0]);
this.push_raw_10bit_word(ANCILLARY_DATA_FLAGS[1]);
this.push_raw_10bit_word(ANCILLARY_DATA_FLAGS[2]);
// did / sdid: not part of data count
let did_sdid: [u8; 2] = did16.to_be_bytes();
this.push_as_10bit_word(did_sdid[0]);
this.push_as_10bit_word(did_sdid[1]);
// Reserved for data count
this.push_raw_10bit_word(0x000);
this
}
/// Pushes the provided `word` as a 10 bits value.
///
/// The 10bits lsb are pushed at current offset as is.
fn push_raw_10bit_word(&mut self, word: u16) {
debug_assert_eq!(word & 0xfc00, 0);
let word = word & 0x3ff;
match self.offset {
0 => {
self.data.push((word >> 2) as u8);
self.data.push((word << 6) as u8);
self.offset = 2;
}
2 => {
*self.data.last_mut().unwrap() |= (word >> 4) as u8;
self.data.push((word << 4) as u8);
self.offset = 4;
}
4 => {
*self.data.last_mut().unwrap() |= (word >> 6) as u8;
self.data.push((word << 2) as u8);
self.offset = 6;
}
6 => {
*self.data.last_mut().unwrap() |= (word >> 8) as u8;
self.data.push(word as u8);
self.offset = 0;
}
_ => unreachable!(),
}
}
/// Pushes the provided `value` as a 10 bits value.
///
/// The `value` is:
///
/// - prepended with the parity bits,
/// - pushed at current buffer offset,
/// - pushed to the checksum.
fn push_as_10bit_word(&mut self, value: u8) {
let pval = with_afd_parity(value);
self.push_raw_10bit_word(pval);
self.checksum += pval;
}
/// Pushes the provided each item in `data` as a 10 bits value.
///
/// The `value` is:
///
/// - prepended with the parity bits,
/// - pushed at current buffer offset,
/// - pushed to the checksum.
///
/// The data count is incremented for each pushed value.
/// If the expected data count is defined and data count exceeds it,
/// `VideoAncillaryAFDError::UnexpectedDataCount` is returned.
pub fn push_data(&mut self, data: &[u8]) -> Result<()> {
for val in data {
self.data_count += 1;
if let Some(expected_data_count) = self.expected_data_count {
if self.data_count > expected_data_count {
bail!(VideoAncillaryAFDError::UnexpectedDataCount {
found: self.data_count,
expected: expected_data_count,
});
}
}
self.push_as_10bit_word(*val);
}
Ok(())
}
/// Terminates and returns the Video Ancillary AFD buffer.
pub fn terminate(mut self) -> VideoAncillaryAFD {
// update data_count starting at idx 6, offset 2
let data_count = with_afd_parity(self.data_count);
self.data[6] |= (data_count >> 4) as u8;
self.data[7] |= (data_count << 4) as u8;
self.checksum = (self.checksum + data_count) & 0x1ff;
self.checksum |= (!(self.checksum >> 8)) << 9;
self.checksum &= 0x3ff;
self.push_raw_10bit_word(self.checksum);
self.data
}
}
#[derive(Debug)]
/// Video Ancillary Active Format Description (AFD) Parser
pub struct VideoAncillaryAFDParser<'a> {
input: &'a [u8],
data: VideoAncillaryAFD,
did: u16,
idx: usize,
offset: u8,
checksum: u16,
data_count: u8,
}
impl<'a> VideoAncillaryAFDParser<'a> {
pub fn parse_for_cea608(input: &'a [u8]) -> Result<VideoAncillaryAFD> {
let this = Self::parse(input)?;
if this.did != EIA_608_ANCILLARY_DID_16 {
bail!(VideoAncillaryAFDError::UnexpectedDID {
found: this.did,
expected: EIA_608_ANCILLARY_DID_16,
});
}
if this.data_count != 3 {
bail!(VideoAncillaryAFDError::UnexpectedDataCount {
found: this.data_count,
expected: 3,
});
}
Ok(this.data)
}
pub fn parse_for_cea708(input: &'a [u8]) -> Result<VideoAncillaryAFD> {
let this = Self::parse(input)?;
if this.did != EIA_708_ANCILLARY_DID_16 {
bail!(VideoAncillaryAFDError::UnexpectedDID {
found: this.did,
expected: EIA_708_ANCILLARY_DID_16,
});
}
Ok(this.data)
}
fn parse(input: &'a [u8]) -> Result<Self> {
let mut this = VideoAncillaryAFDParser {
input,
data: SmallVec::new(),
did: 0,
idx: 0,
offset: 0,
checksum: 0,
data_count: 0,
};
let mut anc_data_flags = [0u16; 3];
anc_data_flags[0] = this
.pull_raw_10bit_word()
.context("Parsing anc data flags")?;
anc_data_flags[1] = this
.pull_raw_10bit_word()
.context("Parsing anc data flags")?;
anc_data_flags[2] = this
.pull_raw_10bit_word()
.context("Parsing anc data flags")?;
if anc_data_flags != ANCILLARY_DATA_FLAGS {
bail!(VideoAncillaryAFDError::UnexpectedDataFlags);
}
let did = this.pull_from_10bit_word().context("Parsing did")?;
let sdid = this.pull_from_10bit_word().context("Parsing sdid")?;
this.did = u16::from_be_bytes([did, sdid]);
let data_count = this.pull_from_10bit_word().context("Parsing data_count")?;
for _ in 0..data_count {
let val = this.pull_from_10bit_word().context("Parsing data")?;
this.data.push(val);
}
this.data_count = data_count;
let found_checksum = this.pull_raw_10bit_word().context("Parsing checksum")?;
this.checksum &= 0x1ff;
this.checksum |= (!(this.checksum >> 8)) << 9;
this.checksum &= 0x3ff;
if this.checksum != found_checksum {
bail!(VideoAncillaryAFDError::WrongChecksum {
found: found_checksum,
expected: this.checksum
});
}
Ok(this)
}
fn pull_raw_10bit_word(&mut self) -> Result<u16> {
if self.input.len() <= self.idx + 1 {
bail!(VideoAncillaryAFDError::NotEnoughData);
}
let word;
let msb = self.input[self.idx] as u16;
self.idx += 1;
let lsb = self.input[self.idx] as u16;
match self.offset {
0 => {
word = (msb << 2) | (lsb >> 6);
self.offset = 2;
}
2 => {
word = ((msb & 0x3f) << 4) | (lsb >> 4);
self.offset = 4;
}
4 => {
word = ((msb & 0x0f) << 6) | (lsb >> 2);
self.offset = 6;
}
6 => {
word = ((msb & 0x03) << 8) | lsb;
self.idx += 1;
self.offset = 0;
}
_ => unreachable!(),
}
Ok(word)
}
/// Pulls a 8bit value from next 10bit word.
///
/// Also checks parity and adds to checksum.
fn pull_from_10bit_word(&mut self) -> Result<u8> {
let word = self.pull_raw_10bit_word()?;
let val = (word & 0xff) as u8;
// Don't check parity: we will rely on the checksum for integrity
self.checksum += word;
Ok(val)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn afd_encode_cea608_raw() {
let mut anc_afd = VideoAncillaryAFDEncoder::for_cea608_raw(21);
anc_afd.push_data(&[0x94, 0x2c]).unwrap();
let buf = anc_afd.terminate();
assert_eq!(
buf.as_slice(),
[0x00, 0x3f, 0xff, 0xfd, 0x61, 0x40, 0xa0, 0x34, 0x55, 0x94, 0x4b, 0x23, 0xb0]
);
}
#[test]
fn afd_encode_cea608_s334_1a() {
let mut anc_afd = VideoAncillaryAFDEncoder::for_cea608_s334_1a();
anc_afd.push_data(&[0x80, 0x94, 0x2c]).unwrap();
let buf = anc_afd.terminate();
assert_eq!(
buf.as_slice(),
[0x00, 0x3f, 0xff, 0xfd, 0x61, 0x40, 0xa0, 0x36, 0x01, 0x94, 0x4b, 0x2a, 0x60]
);
}
#[test]
fn afd_encode_cea608_s334_1a_data_count_exceeded() {
let mut anc_afd = VideoAncillaryAFDEncoder::for_cea608_s334_1a();
assert_eq!(
anc_afd
.push_data(&[0x80, 0x94, 0x2c, 0xab])
.unwrap_err()
.downcast::<VideoAncillaryAFDError>()
.unwrap(),
VideoAncillaryAFDError::UnexpectedDataCount {
expected: 3,
found: 4
},
);
}
#[test]
fn afd_encode_cea708_cdp() {
let mut anc_afd = VideoAncillaryAFDEncoder::for_cea708_cdp();
anc_afd
.push_data(&[
0x96, 0x69, 0x55, 0x3f, 0x43, 0x00, 0x00, 0x72, 0xf8, 0xfc, 0x94, 0x2c, 0xf9, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00,
0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0x74, 0x00, 0x00,
0x1b,
])
.unwrap();
let buf = anc_afd.terminate();
assert_eq!(
buf.as_slice(),
[
0x00, 0x3f, 0xff, 0xfd, 0x61, 0x40, 0x65, 0x5a, 0x5a, 0x69, 0x95, 0x63, 0xf5, 0x0e,
0x00, 0x80, 0x27, 0x27, 0xe2, 0xfc, 0x65, 0x12, 0xcb, 0xe6, 0x00, 0x80, 0x2f, 0xa8,
0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b, 0xea, 0x00, 0x80, 0x2f,
0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b, 0xea, 0x00, 0x80,
0x2f, 0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b, 0xea, 0x00,
0x80, 0x2f, 0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b, 0xea,
0x00, 0x80, 0x2f, 0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b,
0xea, 0x00, 0x80, 0x2f, 0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0x74, 0x80, 0x20,
0x08, 0x6e, 0xb7,
]
);
}
#[test]
fn parse_afd_cea608() {
let buf = VideoAncillaryAFDParser::parse_for_cea608(&[
0x00, 0x3f, 0xff, 0xfd, 0x61, 0x40, 0xa0, 0x34, 0x55, 0x94, 0x4b, 0x23, 0xb0,
])
.unwrap();
assert_eq!(buf.as_slice(), [0x15, 0x94, 0x2c]);
}
#[test]
fn parse_afd_cea608_32bit_padded() {
let buf = VideoAncillaryAFDParser::parse_for_cea608(&[
0x00, 0x3f, 0xff, 0xfd, 0x61, 0x40, 0xa0, 0x34, 0x55, 0x94, 0x4b, 0x23, 0xb0, 0x00,
0x00, 0x00,
])
.unwrap();
assert_eq!(buf.as_slice(), [0x15, 0x94, 0x2c]);
}
#[test]
fn parse_afd_cea708() {
let buf = VideoAncillaryAFDParser::parse_for_cea708(&[
0x00, 0x3f, 0xff, 0xfd, 0x61, 0x40, 0x65, 0x5a, 0x5a, 0x69, 0x95, 0x63, 0xf5, 0x0e,
0x00, 0x80, 0x27, 0x27, 0xe2, 0xfc, 0x65, 0x12, 0xcb, 0xe6, 0x00, 0x80, 0x2f, 0xa8,
0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b, 0xea, 0x00, 0x80, 0x2f,
0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b, 0xea, 0x00, 0x80,
0x2f, 0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b, 0xea, 0x00,
0x80, 0x2f, 0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b, 0xea,
0x00, 0x80, 0x2f, 0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0xfa, 0x80, 0x20, 0x0b,
0xea, 0x00, 0x80, 0x2f, 0xa8, 0x02, 0x00, 0xbe, 0xa0, 0x08, 0x02, 0x74, 0x80, 0x20,
0x08, 0x6e, 0xb7,
])
.unwrap();
assert_eq!(
buf.as_slice(),
[
0x96, 0x69, 0x55, 0x3f, 0x43, 0x00, 0x00, 0x72, 0xf8, 0xfc, 0x94, 0x2c, 0xf9, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00,
0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00,
0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa,
0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0xfa, 0x00, 0x00, 0x74, 0x00, 0x00,
0x1b,
]
);
}
#[test]
fn parse_afd_cea608_not_enough_data() {
assert_eq!(
VideoAncillaryAFDParser::parse_for_cea608(&[0x00, 0x3f])
.unwrap_err()
.downcast::<VideoAncillaryAFDError>()
.unwrap(),
VideoAncillaryAFDError::NotEnoughData,
);
}
#[test]
fn parse_afd_cea608_unexpected_data_flags() {
assert_eq!(
VideoAncillaryAFDParser::parse_for_cea608(&[
0x00, 0x3f, 0xff, 0xdd, 0x61, 0x40, 0x60, 0x09, 0x88
])
.unwrap_err()
.downcast::<VideoAncillaryAFDError>()
.unwrap(),
VideoAncillaryAFDError::UnexpectedDataFlags,
);
}
#[test]
fn parse_afd_cea608_unexpected_did() {
assert_eq!(
VideoAncillaryAFDParser::parse_for_cea608(&[
0x00, 0x3f, 0xff, 0xfd, 0x61, 0x40, 0x60, 0x09, 0x88
])
.unwrap_err()
.downcast::<VideoAncillaryAFDError>()
.unwrap(),
VideoAncillaryAFDError::UnexpectedDID {
found: EIA_708_ANCILLARY_DID_16,
expected: EIA_608_ANCILLARY_DID_16
},
);
}
#[test]
fn parse_afd_cea708_wrong_checksum() {
assert_eq!(
VideoAncillaryAFDParser::parse_for_cea708(&[
0x00, 0x3f, 0xff, 0xfd, 0x61, 0x40, 0x60, 0x09, 0x81
])
.unwrap_err()
.downcast::<VideoAncillaryAFDError>()
.unwrap(),
VideoAncillaryAFDError::WrongChecksum {
found: 0x260,
expected: 0x262
},
);
}
}