rtp: Add linear audio (L8, L16, L20, L24) RTP payloaders / depayloaders

This commit is contained in:
Tim-Philipp Müller 2023-10-24 19:47:25 +01:00
parent 66030f36ad
commit b27c15028a
11 changed files with 2073 additions and 0 deletions

2
Cargo.lock generated
View file

@ -2717,9 +2717,11 @@ dependencies = [
"anyhow",
"atomic_refcell",
"bitstream-io",
"byte-slice-cast",
"gst-plugin-version-helper",
"gstreamer",
"gstreamer-app",
"gstreamer-audio",
"gstreamer-check",
"gstreamer-rtp",
"gstreamer-video",

View file

@ -6643,6 +6643,226 @@
"rsrtp": {
"description": "GStreamer Rust RTP Plugin",
"elements": {
"rtpL16depay2": {
"author": "Tim-Philipp Müller <tim centricular com>",
"description": "Depayload 16-bit raw audio (L16) from RTP packets",
"hierarchy": [
"GstRtpL16Depay2",
"GstRtpLinearAudioDepay",
"GstRtpBaseDepay2",
"GstElement",
"GstObject",
"GInitiallyUnowned",
"GObject"
],
"klass": "Codec/Depayloader/Network/RTP",
"pad-templates": {
"sink": {
"caps": "application/x-rtp:\n media: audio\n clock-rate: [ 1, 2147483647 ]\n encoding-name: L16\napplication/x-rtp:\n media: audio\n clock-rate: [ 1, 2147483647 ]\n payload: { (int)10, (int)11 }\n",
"direction": "sink",
"presence": "always"
},
"src": {
"caps": "audio/x-raw:\n rate: [ 1, 2147483647 ]\n channels: [ 1, 2147483647 ]\n layout: interleaved\n format: S16BE\n",
"direction": "src",
"presence": "always"
}
},
"rank": "marginal"
},
"rtpL16pay2": {
"author": "Tim-Philipp Müller <tim centricular com>",
"description": "Payload 16-bit raw audio (L16) into RTP packets (RFC 3551)",
"hierarchy": [
"GstRtpL16Pay2",
"GstRtpLinearAudioPay",
"GstRtpBaseAudioPay2",
"GstRtpBasePay2",
"GstElement",
"GstObject",
"GInitiallyUnowned",
"GObject"
],
"klass": "Codec/Payloader/Network/RTP",
"pad-templates": {
"sink": {
"caps": "audio/x-raw:\n rate: [ 1, 2147483647 ]\n channels: [ 1, 2147483647 ]\n layout: interleaved\n format: S16BE\n",
"direction": "sink",
"presence": "always"
},
"src": {
"caps": "application/x-rtp:\n media: audio\n clock-rate: [ 1, 2147483647 ]\n encoding-name: L16\napplication/x-rtp:\n media: audio\n clock-rate: 44100\n payload: { (int)10, (int)11 }\n",
"direction": "src",
"presence": "always"
}
},
"rank": "marginal"
},
"rtpL20depay": {
"author": "Tim-Philipp Müller <tim centricular com>",
"description": "Depayload 20-bit raw audio (L20) from RTP packets",
"hierarchy": [
"GstRtpL20Depay2",
"GstRtpLinearAudioDepay",
"GstRtpBaseDepay2",
"GstElement",
"GstObject",
"GInitiallyUnowned",
"GObject"
],
"klass": "Codec/Depayloader/Network/RTP",
"pad-templates": {
"sink": {
"caps": "application/x-rtp:\n media: audio\n clock-rate: [ 1, 2147483647 ]\n encoding-name: L20\n",
"direction": "sink",
"presence": "always"
},
"src": {
"caps": "audio/x-raw:\n rate: [ 1, 2147483647 ]\n channels: [ 1, 2147483647 ]\n layout: interleaved\n format: S20BE\n",
"direction": "src",
"presence": "always"
}
},
"rank": "marginal"
},
"rtpL20pay": {
"author": "Tim-Philipp Müller <tim centricular com>",
"description": "Payload 20-bit raw audio (L20) into RTP packets (RFC 3551)",
"hierarchy": [
"GstRtpL20Pay",
"GstRtpLinearAudioPay",
"GstRtpBaseAudioPay2",
"GstRtpBasePay2",
"GstElement",
"GstObject",
"GInitiallyUnowned",
"GObject"
],
"klass": "Codec/Payloader/Network/RTP",
"pad-templates": {
"sink": {
"caps": "audio/x-raw:\n rate: [ 1, 2147483647 ]\n channels: [ 1, 2147483647 ]\n layout: interleaved\n format: S20BE\n",
"direction": "sink",
"presence": "always"
},
"src": {
"caps": "application/x-rtp:\n media: audio\n clock-rate: [ 1, 2147483647 ]\n encoding-name: L20\n",
"direction": "src",
"presence": "always"
}
},
"rank": "marginal"
},
"rtpL24depay2": {
"author": "Tim-Philipp Müller <tim centricular com>",
"description": "Depayload 24-bit raw audio (L24) from RTP packets",
"hierarchy": [
"GstRtpL24Depay2",
"GstRtpLinearAudioDepay",
"GstRtpBaseDepay2",
"GstElement",
"GstObject",
"GInitiallyUnowned",
"GObject"
],
"klass": "Codec/Depayloader/Network/RTP",
"pad-templates": {
"sink": {
"caps": "application/x-rtp:\n media: audio\n clock-rate: [ 1, 2147483647 ]\n encoding-name: L24\n",
"direction": "sink",
"presence": "always"
},
"src": {
"caps": "audio/x-raw:\n rate: [ 1, 2147483647 ]\n channels: [ 1, 2147483647 ]\n layout: interleaved\n format: S24BE\n",
"direction": "src",
"presence": "always"
}
},
"rank": "marginal"
},
"rtpL24pay2": {
"author": "Tim-Philipp Müller <tim centricular com>",
"description": "Payload 24-bit raw audio (L24) into RTP packets (RFC 3551)",
"hierarchy": [
"GstRtpL24Pay2",
"GstRtpLinearAudioPay",
"GstRtpBaseAudioPay2",
"GstRtpBasePay2",
"GstElement",
"GstObject",
"GInitiallyUnowned",
"GObject"
],
"klass": "Codec/Payloader/Network/RTP",
"pad-templates": {
"sink": {
"caps": "audio/x-raw:\n rate: [ 1, 2147483647 ]\n channels: [ 1, 2147483647 ]\n layout: interleaved\n format: S24BE\n",
"direction": "sink",
"presence": "always"
},
"src": {
"caps": "application/x-rtp:\n media: audio\n clock-rate: [ 1, 2147483647 ]\n encoding-name: L24\n",
"direction": "src",
"presence": "always"
}
},
"rank": "marginal"
},
"rtpL8depay2": {
"author": "Tim-Philipp Müller <tim centricular com>",
"description": "Depayload 8-bit raw audio (L8) from RTP packets",
"hierarchy": [
"GstRtpL8Depay2",
"GstRtpLinearAudioDepay",
"GstRtpBaseDepay2",
"GstElement",
"GstObject",
"GInitiallyUnowned",
"GObject"
],
"klass": "Codec/Depayloader/Network/RTP",
"pad-templates": {
"sink": {
"caps": "application/x-rtp:\n media: audio\n clock-rate: [ 1, 2147483647 ]\n encoding-name: L8\n",
"direction": "sink",
"presence": "always"
},
"src": {
"caps": "audio/x-raw:\n rate: [ 1, 2147483647 ]\n channels: [ 1, 2147483647 ]\n layout: interleaved\n format: U8\n",
"direction": "src",
"presence": "always"
}
},
"rank": "marginal"
},
"rtpL8pay2": {
"author": "Tim-Philipp Müller <tim centricular com>",
"description": "Payload 8-bit raw audio (L8) into RTP packets (RFC 3551)",
"hierarchy": [
"GstRtpL8Pay2",
"GstRtpLinearAudioPay",
"GstRtpBaseAudioPay2",
"GstRtpBasePay2",
"GstElement",
"GstObject",
"GInitiallyUnowned",
"GObject"
],
"klass": "Codec/Payloader/Network/RTP",
"pad-templates": {
"sink": {
"caps": "audio/x-raw:\n rate: [ 1, 2147483647 ]\n channels: [ 1, 2147483647 ]\n layout: interleaved\n format: U8\n",
"direction": "sink",
"presence": "always"
},
"src": {
"caps": "application/x-rtp:\n media: audio\n encoding-name: L8\n clock-rate: [ 1, 2147483647 ]\n",
"direction": "src",
"presence": "always"
}
},
"rank": "marginal"
},
"rtpav1depay": {
"author": "Vivienne Watermeier <vwatermeier@igalia.com>",
"description": "Depayload AV1 from RTP packets",
@ -7604,6 +7824,29 @@
}
}
},
"GstRtpLinearAudioDepay": {
"hierarchy": [
"GstRtpLinearAudioDepay",
"GstRtpBaseDepay2",
"GstElement",
"GstObject",
"GInitiallyUnowned",
"GObject"
],
"kind": "object"
},
"GstRtpLinearAudioPay": {
"hierarchy": [
"GstRtpLinearAudioPay",
"GstRtpBaseAudioPay2",
"GstRtpBasePay2",
"GstElement",
"GstObject",
"GInitiallyUnowned",
"GObject"
],
"kind": "object"
},
"GstRtpPcmauDepay2": {
"hierarchy": [
"GstRtpPcmauDepay2",

View file

@ -12,7 +12,9 @@ rust-version.workspace = true
anyhow = "1"
atomic_refcell = "0.1"
bitstream-io = "2.1"
byte-slice-cast = "1"
gst = { workspace = true, features = ["v1_20"] }
gst-audio = { workspace = true, features = ["v1_20"] }
gst-rtp = { workspace = true, features = ["v1_20"] }
gst-video = { workspace = true, features = ["v1_20"] }
once_cell.workspace = true

View file

@ -24,6 +24,7 @@ mod basedepay;
mod basepay;
mod av1;
mod linear_audio;
mod mp2t;
mod pcmau;
mod vp8;
@ -50,6 +51,9 @@ fn plugin_init(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
av1::depay::register(plugin)?;
av1::pay::register(plugin)?;
linear_audio::depay::register(plugin)?;
linear_audio::pay::register(plugin)?;
mp2t::depay::register(plugin)?;
mp2t::pay::register(plugin)?;

View file

@ -0,0 +1,283 @@
// GStreamer RTP audio channel positions
//
// Copyright (C) 2023-2024 Tim-Philipp Müller <tim centricular com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use byte_slice_cast::*;
use gst_audio::AudioChannelPosition;
pub(crate) const MAX_REORDER_CHANNELS: usize = 8;
// https://www.rfc-editor.org/rfc/rfc3551.html#section-4.1
const DEFAULT_1CH: &[gst_audio::AudioChannelPosition] = [
AudioChannelPosition::Mono, // Mono
]
.as_slice();
const DEFAULT_2CH: &[gst_audio::AudioChannelPosition] = [
AudioChannelPosition::FrontLeft, // Stereo
AudioChannelPosition::FrontRight,
]
.as_slice();
const DEFAULT_3CH: &[gst_audio::AudioChannelPosition] = [
AudioChannelPosition::FrontLeft, // 3ch
AudioChannelPosition::FrontRight,
AudioChannelPosition::FrontCenter,
]
.as_slice();
const DEFAULT_4CH: &[gst_audio::AudioChannelPosition] = [
AudioChannelPosition::FrontLeft, // 4ch
AudioChannelPosition::FrontRight,
AudioChannelPosition::FrontCenter,
AudioChannelPosition::Lfe1,
]
.as_slice();
const DEFAULT_5CH: &[gst_audio::AudioChannelPosition] = [
AudioChannelPosition::FrontLeft, // 5ch
AudioChannelPosition::FrontRight,
AudioChannelPosition::FrontCenter,
AudioChannelPosition::SideLeft,
AudioChannelPosition::SideRight,
]
.as_slice();
const DEFAULT_6CH: &[gst_audio::AudioChannelPosition] = [
AudioChannelPosition::SideLeft, // 6ch
AudioChannelPosition::FrontLeft,
AudioChannelPosition::FrontCenter,
AudioChannelPosition::SideRight,
AudioChannelPosition::FrontRight,
AudioChannelPosition::Lfe1,
]
.as_slice();
const DV_4CH_1: &[gst_audio::AudioChannelPosition] = [
AudioChannelPosition::FrontLeft,
AudioChannelPosition::FrontRight,
AudioChannelPosition::SideLeft,
AudioChannelPosition::SideRight,
]
.as_slice();
const DV_4CH_2: &[gst_audio::AudioChannelPosition] = [
AudioChannelPosition::FrontLeft,
AudioChannelPosition::FrontRight,
AudioChannelPosition::FrontCenter,
AudioChannelPosition::Lfe1,
]
.as_slice();
const DV_4CH_3: &[gst_audio::AudioChannelPosition] = [
// Same as DV_4CH_2 it seems?
AudioChannelPosition::FrontLeft,
AudioChannelPosition::FrontRight,
AudioChannelPosition::FrontCenter,
AudioChannelPosition::Lfe1,
]
.as_slice();
const DV_5CH_1: &[gst_audio::AudioChannelPosition] = [
AudioChannelPosition::FrontLeft,
AudioChannelPosition::FrontRight,
AudioChannelPosition::SideLeft,
AudioChannelPosition::SideRight,
AudioChannelPosition::FrontCenter,
]
.as_slice();
const DV_6CH_1: &[gst_audio::AudioChannelPosition] = [
AudioChannelPosition::FrontLeft,
AudioChannelPosition::FrontRight,
AudioChannelPosition::SideLeft,
AudioChannelPosition::SideRight,
AudioChannelPosition::FrontCenter,
AudioChannelPosition::Lfe1,
]
.as_slice();
const DV_6CH_2: &[gst_audio::AudioChannelPosition] = [
AudioChannelPosition::FrontLeft,
AudioChannelPosition::FrontRight,
AudioChannelPosition::FrontCenter,
AudioChannelPosition::Lfe1,
AudioChannelPosition::SideLeft,
AudioChannelPosition::SideRight,
]
.as_slice();
const DV_8CH_1: &[gst_audio::AudioChannelPosition] = [
AudioChannelPosition::FrontLeft,
AudioChannelPosition::FrontRight,
AudioChannelPosition::FrontCenter,
AudioChannelPosition::Lfe1,
AudioChannelPosition::SideLeft,
AudioChannelPosition::SideRight,
AudioChannelPosition::RearLeft,
AudioChannelPosition::RearRight,
]
.as_slice();
const DV_8CH_2: &[gst_audio::AudioChannelPosition] = [
// Same as DV_8CH_1 it seems?
AudioChannelPosition::FrontLeft,
AudioChannelPosition::FrontRight,
AudioChannelPosition::FrontCenter,
AudioChannelPosition::Lfe1,
AudioChannelPosition::SideLeft,
AudioChannelPosition::SideRight,
AudioChannelPosition::RearLeft,
AudioChannelPosition::RearRight,
]
.as_slice();
const DV_8CH_3: &[gst_audio::AudioChannelPosition] = [
// Same as DV_8CH_1 it seems?
AudioChannelPosition::FrontLeft,
AudioChannelPosition::FrontRight,
AudioChannelPosition::FrontCenter,
AudioChannelPosition::Lfe1,
AudioChannelPosition::SideLeft,
AudioChannelPosition::SideRight,
AudioChannelPosition::RearLeft,
AudioChannelPosition::RearRight,
]
.as_slice();
pub(crate) fn get_channel_order(
name: Option<&str>,
n_channels: i32,
) -> Option<&'static [gst_audio::AudioChannelPosition]> {
assert!(n_channels > 0);
let name = name.unwrap_or("default");
// https://www.rfc-editor.org/rfc/rfc3551.html#section-4.1
// https://www.rfc-editor.org/rfc/rfc3555.html#section-4.1.15
match (n_channels, name) {
// mono
(1, _) => Some(DEFAULT_1CH),
// stereo
(2, _) => Some(DEFAULT_2CH),
// 3ch
(3, _) => Some(DEFAULT_3CH),
// 4ch
(4, "DV.LRLsRs") => Some(DV_4CH_1),
(4, "DV.LRCS") => Some(DV_4CH_2),
(4, "DV.LRCWo") => Some(DV_4CH_3),
(4, _) => Some(DEFAULT_4CH),
// 5ch
(5, "DV.LRLsRsC") => Some(DV_5CH_1),
(5, _) => Some(DEFAULT_5CH),
// 6ch
(6, "DV.LRLsRsCS") => Some(DV_6CH_1),
(6, "DV.LmixRmixTWoQ1Q2") => Some(DV_6CH_2),
(6, _) => Some(DEFAULT_6CH),
// 7ch
(7, _) => None,
// 8ch
(8, "DV.LRCWoLsRsLmixRmix") => Some(DV_8CH_1),
(8, "DV.LRCWoLs1Rs1Ls2Rs2") => Some(DV_8CH_2),
(8, "DV.LRCWoLsRsLcRc") => Some(DV_8CH_3),
(8, _) => None,
// >8ch
(9.., _) => None,
(..=0, _) => unreachable!(),
}
}
fn positions_are_compatible(
pos1: &[gst_audio::AudioChannelPosition],
pos2: &[gst_audio::AudioChannelPosition],
) -> bool {
if pos1.len() != pos2.len() {
return false;
}
let Ok(mask1) = AudioChannelPosition::positions_to_mask(pos1, false) else {
return false;
};
let Ok(mask2) = AudioChannelPosition::positions_to_mask(pos2, false) else {
return false;
};
mask1 == mask2
}
const CHANNEL_MAPPINGS: &[(&[gst_audio::AudioChannelPosition], &str)] = &[
// 1ch
(DEFAULT_1CH, "default"),
// 2ch
(DEFAULT_2CH, "default"),
// 3ch
(DEFAULT_3CH, "default"),
// 4ch
(DV_4CH_1, "DV.LRLsRs"),
(DV_4CH_2, "DV.LRCS"),
(DV_4CH_3, "DV.LRCWo"),
(DEFAULT_4CH, "default"),
// 5ch
(DV_5CH_1, "DV.LRLsRsC"),
(DEFAULT_5CH, "default"),
// 6ch
(DV_6CH_1, "DV.LRLsRsCS"),
(DV_6CH_2, "DV.LmixRmixTWoQ1Q2"),
(DEFAULT_6CH, "default"),
// 8ch
(DV_8CH_1, "DV.LRCWoLsRsLmixRmix"),
(DV_8CH_2, "DV.LRCWoLs1Rs1Ls2Rs2"),
(DV_8CH_3, "DV.LRCWoLsRsLcRc"),
];
// Returns either one of the "DV.*" names, or "default" or None
pub(crate) fn find_channel_order_from_positions(
pos: &[gst_audio::AudioChannelPosition],
) -> Option<&'static str> {
let n_channels = pos.len();
for (map, name) in CHANNEL_MAPPINGS {
if map.len() == n_channels && positions_are_compatible(map, pos) {
return Some(name);
}
}
None
}
#[allow(clippy::manual_range_contains)]
pub(crate) fn reorder_channels<T: Default + Clone + Copy + FromByteSlice>(
buffer_ref: &mut gst::BufferRef,
reorder_map: &[usize],
) -> Result<(), gst::FlowError> {
let mut map = buffer_ref
.map_writable()
.map_err(|_| gst::FlowError::Error)?;
let n_channels = reorder_map.len();
assert!(n_channels >= 1 && n_channels <= MAX_REORDER_CHANNELS);
let mut scratch: [T; MAX_REORDER_CHANNELS] = Default::default();
let in_frame = &mut scratch[0..n_channels];
let data = map.as_mut_slice_of::<T>().unwrap();
for out_frame in data.chunks_exact_mut(n_channels) {
in_frame.copy_from_slice(out_frame);
// "The reorder_map can be used for reordering by assigning
// channel i of the input to channel reorder_map[i] of the output."
for (i, &out_idx) in reorder_map.iter().enumerate() {
out_frame[out_idx] = in_frame[i];
}
}
Ok(())
}

View file

@ -0,0 +1,3 @@
// SPDX-License-Identifier: MPL-2.0
pub mod channel_positions;

View file

@ -0,0 +1,655 @@
// GStreamer RTP L8 / L16 / L20 / L24 linear raw audio depayloader
//
// Copyright (C) 2023-2024 Tim-Philipp Müller <tim centricular com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use atomic_refcell::AtomicRefCell;
use gst::{glib, prelude::*, subclass::prelude::*};
use gst_audio::{AudioCapsBuilder, AudioChannelPosition, AudioFormat, AudioInfo, AudioLayout};
use once_cell::sync::Lazy;
use std::num::NonZeroU32;
use crate::basedepay::{RtpBaseDepay2Ext, RtpBaseDepay2ImplExt};
use crate::linear_audio::common::channel_positions;
#[derive(Default)]
pub struct RtpLinearAudioDepay {
state: AtomicRefCell<State>,
}
#[derive(Default)]
struct State {
clock_rate: Option<NonZeroU32>,
bpf: Option<NonZeroU32>,
width: Option<NonZeroU32>,
channel_reorder_map: Option<Vec<usize>>,
}
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"rtplinearaudiodepay",
gst::DebugColorFlags::empty(),
Some("RTP L8/L16/L20/L24 Raw Audio Depayloader"),
)
});
#[glib::object_subclass]
impl ObjectSubclass for RtpLinearAudioDepay {
const NAME: &'static str = "GstRtpLinearAudioDepay";
type Type = super::RtpLinearAudioDepay;
type ParentType = crate::basedepay::RtpBaseDepay2;
}
impl ObjectImpl for RtpLinearAudioDepay {}
impl GstObjectImpl for RtpLinearAudioDepay {}
impl ElementImpl for RtpLinearAudioDepay {}
impl crate::basedepay::RtpBaseDepay2Impl for RtpLinearAudioDepay {
const ALLOWED_META_TAGS: &'static [&'static str] = &["audio"];
fn set_sink_caps(&self, caps: &gst::Caps) -> bool {
let s = caps.structure(0).unwrap();
let pt = s.get::<i32>("payload").ok().filter(|&r| r > 0);
let encoding_name = s.get::<&str>("encoding-name").ok();
// pt 10 = L16 stereo, pt 11 = L16 mono
let (implied_clock_rate, implied_channels) = match pt {
Some(10) => (Some(44100), Some(2)),
Some(11) => (Some(44100), Some(1)),
_ => (None, None),
};
if (pt == Some(10) || pt == Some(11))
&& encoding_name.is_some()
&& encoding_name != Some("L16")
{
self.post_error_message(gst::error_msg!(
gst::StreamError::Format,
[
"pt 10-11 require encoding-name=L16 but found {}",
encoding_name.unwrap()
]
));
return false;
}
let mut state = self.state.borrow_mut();
let clock_rate = s
.get::<i32>("clock-rate")
.ok()
.filter(|&r| r > 0)
.or(implied_clock_rate)
.unwrap();
state.clock_rate = NonZeroU32::new(clock_rate as u32);
let audio_format = match encoding_name {
Some("L8") => AudioFormat::U8,
Some("L16") => AudioFormat::S16be,
Some("L20") => AudioFormat::S20be,
Some("L24") => AudioFormat::S24be,
None => AudioFormat::S16be, // pt 10/11
_ => unreachable!(), // Input caps will have been checked against template caps
};
let n_channels = {
let encoding_params = s
.get::<&str>("encoding-params")
.ok()
.and_then(|params| params.parse::<i32>().ok())
.filter(|&v| v > 0);
let channels = s
.get::<&str>("channels")
.ok()
.and_then(|chans| chans.parse::<i32>().ok())
.filter(|&v| v > 0);
let channels = channels.or(s.get::<i32>("channels").ok().filter(|&v| v > 0));
encoding_params
.or(channels)
.or(implied_channels)
.unwrap_or(1i32)
};
if pt == Some(10) && n_channels != 2 {
self.post_error_message(gst::error_msg!(
gst::StreamError::Format,
["pt 10 implies stereo but found {n_channels} channels specified"]
));
return false;
}
if pt == Some(11) && n_channels != 1 {
self.post_error_message(gst::error_msg!(
gst::StreamError::Format,
["pt 11 implies mono but found {n_channels} channels specified"]
));
return false;
}
let channel_order_name = s.get::<&str>("channel-order").ok();
let order = channel_positions::get_channel_order(channel_order_name, n_channels);
let gst_positions = if let Some(rtp_positions) = order {
let mut channel_positions = rtp_positions.to_vec();
// Re-order channel positions according to GStreamer conventions. This should always
// succeed because the input channel positioning comes from internal tables.
AudioChannelPosition::positions_to_valid_order(&mut channel_positions).unwrap();
// Is channel re-ordering actually required?
if rtp_positions != channel_positions {
let mut reorder_map = vec![0usize; n_channels as usize];
gst_audio::channel_reorder_map(rtp_positions, &channel_positions, &mut reorder_map)
.unwrap();
gst::info!(CAT, imp: self, "Channel positions (RTP) : {rtp_positions:?}");
gst::info!(CAT, imp: self, "Channel positions (GStreamer) : {channel_positions:?}");
gst::info!(CAT, imp: self, "Channel reorder map : {reorder_map:?}");
state.channel_reorder_map = Some(reorder_map);
}
channel_positions
} else {
vec![AudioChannelPosition::None; n_channels as usize]
};
let audio_info = AudioInfo::builder(audio_format, clock_rate as u32, n_channels as u32)
.layout(AudioLayout::Interleaved)
.positions(&gst_positions)
.build()
.unwrap();
state.bpf = NonZeroU32::new(audio_info.bpf());
state.width = NonZeroU32::new(audio_info.width());
let src_caps = audio_info.to_caps().unwrap();
self.obj().set_src_caps(&src_caps);
true
}
// https://www.rfc-editor.org/rfc/rfc3551.html#section-4.5.10
fn handle_packet(
&self,
packet: &crate::basedepay::Packet,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let state = self.state.borrow();
let clock_rate = state.clock_rate.expect("clock-rate").get();
let bpf = state.bpf.expect("bpf").get();
if packet.payload().is_empty() {
gst::warning!(CAT, imp: self, "Empty packet {packet:?}, dropping");
self.obj().drop_packet(packet);
return Ok(gst::FlowSuccess::Ok);
}
if packet.payload().len() % (bpf as usize) != 0 {
gst::warning!(CAT, imp: self, "Wrong payload size: expected multiples of {bpf}, but have {}", packet.payload().len());
self.obj().drop_packet(packet);
return Ok(gst::FlowSuccess::Ok);
}
let mut buffer = packet.payload_buffer();
let buffer_ref = buffer.get_mut().unwrap();
buffer_ref.set_duration(
(buffer_ref.size() as u64)
.mul_div_floor(*gst::ClockTime::SECOND, bpf as u64 * clock_rate as u64)
.map(gst::ClockTime::from_nseconds),
);
// Re-order channels from RTP layout to GStreamer layout if needed
if let Some(reorder_map) = &state.channel_reorder_map {
let width = state.width.expect("width").get();
type I24 = [u8; 3];
match width {
8 => channel_positions::reorder_channels::<u8>(buffer_ref, reorder_map)?,
16 => channel_positions::reorder_channels::<i16>(buffer_ref, reorder_map)?,
24 => channel_positions::reorder_channels::<I24>(buffer_ref, reorder_map)?,
_ => unreachable!(),
}
}
// Mark start of talkspurt with RESYNC flag
if packet.marker_bit() {
buffer_ref.set_flags(gst::BufferFlags::RESYNC);
}
gst::trace!(CAT, imp: self, "Finishing buffer {buffer:?} for packet {packet:?}");
self.obj().queue_buffer(packet.into(), buffer)
}
}
impl RtpLinearAudioDepay {}
trait RtpLinearAudioDepayImpl: RtpBaseDepay2ImplExt {}
unsafe impl<T: RtpLinearAudioDepayImpl> IsSubclassable<T> for super::RtpLinearAudioDepay {}
/**
* SECTION:element-rtpL8depay2
* @see_also: rtpL8pay2, rtpL16depay2, rtpL24depay2, rtpL8pay
*
* Extracts raw 8-bit audio from RTP packets as per [RFC 3551][rfc-3551].
*
* [rfc-3551]: https://www.rfc-editor.org/rfc/rfc3551.html#section-4.5.10
*
* ## Example pipeline
*
* |[
* gst-launch-1.0 udpsrc caps='application/x-rtp, media=audio, clock-rate=48000, encoding-name=L8, encoding-params=(string)1, channels=1, payload=96' ! rtpjitterbuffer latency=50 ! rtpL8depay2 ! audioconvert ! audioresample ! autoaudiosink
* ]| This will depayload an incoming RTP 8-bit raw audio stream. You can use the #rtpL8pay2
* element to create such an RTP stream.
*
* Since: plugins-rs-0.13.0
*/
#[derive(Default)]
pub(crate) struct RtpL8Depay;
#[glib::object_subclass]
impl ObjectSubclass for RtpL8Depay {
const NAME: &'static str = "GstRtpL8Depay2";
type Type = super::RtpL8Depay;
type ParentType = super::RtpLinearAudioDepay;
}
impl ObjectImpl for RtpL8Depay {}
impl GstObjectImpl for RtpL8Depay {}
impl ElementImpl for RtpL8Depay {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"RTP 8-bit Raw Audio Depayloader",
"Codec/Depayloader/Network/RTP",
"Depayload 8-bit raw audio (L8) from RTP packets",
"Tim-Philipp Müller <tim centricular com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&gst::Caps::builder_full()
.structure(
gst::Structure::builder("application/x-rtp")
.field("media", "audio")
.field("clock-rate", gst::IntRange::new(1i32, i32::MAX))
.field("encoding-name", "L8")
.build(),
)
.build(),
)
.unwrap();
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&AudioCapsBuilder::new_interleaved()
.format(AudioFormat::U8)
.build(),
)
.unwrap();
vec![src_pad_template, sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
}
impl crate::basedepay::RtpBaseDepay2Impl for RtpL8Depay {}
impl RtpLinearAudioDepayImpl for RtpL8Depay {}
/**
* SECTION:element-rtpL16depay2
* @see_also: rtpL16pay2, rtpL8depay2, rtpL24depay2, rtpL16pay
*
* Extracts raw 16-bit audio from RTP packets as per [RFC 3551][rfc-3551].
*
* [rfc-3551]: https://www.rfc-editor.org/rfc/rfc3551.html#section-4.5.11
*
* ## Example pipeline
*
* |[
* gst-launch-1.0 udpsrc caps='application/x-rtp, media=audio, clock-rate=48000, encoding-name=L16, encoding-params=(string)1, channels=1, payload=96' ! rtpjitterbuffer latency=50 ! rtpL16depay2 ! audioconvert ! audioresample ! autoaudiosink
* ]| This will depayload an incoming RTP 16-bit raw audio stream. You can use the #rtpL16pay2
* element to create such an RTP stream.
*
* Since: plugins-rs-0.13.0
*/
#[derive(Default)]
pub(crate) struct RtpL16Depay;
#[glib::object_subclass]
impl ObjectSubclass for RtpL16Depay {
const NAME: &'static str = "GstRtpL16Depay2";
type Type = super::RtpL16Depay;
type ParentType = super::RtpLinearAudioDepay;
}
impl ObjectImpl for RtpL16Depay {}
impl GstObjectImpl for RtpL16Depay {}
impl ElementImpl for RtpL16Depay {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"RTP 16-bit Raw Audio Depayloader",
"Codec/Depayloader/Network/RTP",
"Depayload 16-bit raw audio (L16) from RTP packets",
"Tim-Philipp Müller <tim centricular com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&gst::Caps::builder_full()
.structure(
gst::Structure::builder("application/x-rtp")
.field("media", "audio")
.field("clock-rate", gst::IntRange::new(1i32, i32::MAX))
.field("encoding-name", "L16")
.build(),
)
.structure(
gst::Structure::builder("application/x-rtp")
.field("media", "audio")
.field("clock-rate", gst::IntRange::new(1i32, i32::MAX))
.field("payload", gst::List::new([10i32, 11]))
.build(),
)
.build(),
)
.unwrap();
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&AudioCapsBuilder::new_interleaved()
.format(AudioFormat::S16be)
.build(),
)
.unwrap();
vec![src_pad_template, sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
}
impl crate::basedepay::RtpBaseDepay2Impl for RtpL16Depay {}
impl RtpLinearAudioDepayImpl for RtpL16Depay {}
/**
* SECTION:element-rtpL20depay
* @see_also: rtpL20pay, rtpL8depay2, rtpL16depay2
*
* Extracts raw 20-bit audio from RTP packets as per [RFC 3551][rfc-3551] and
* [RFC 3190][rfc-3190].
*
* [rfc-3551]: https://www.rfc-editor.org/rfc/rfc3551.html#section-4.5.11
* [rfc-3190]: https://www.rfc-editor.org/rfc/rfc3190.html#section-4
*
* ## Example pipeline
*
* |[
* gst-launch-1.0 udpsrc caps='application/x-rtp, media=audio, clock-rate=48000, encoding-name=L20, encoding-params=(string)1, channels=1, payload=96' ! rtpjitterbuffer latency=50 ! rtpL20depay ! audioconvert ! audioresample ! autoaudiosink
* ]| This will depayload an incoming RTP 20-bit raw audio stream. You can use the #rtpL20pay
* element to create such an RTP stream.
*
* Since: plugins-rs-0.13.0
*/
#[derive(Default)]
pub(crate) struct RtpL20Depay;
#[glib::object_subclass]
impl ObjectSubclass for RtpL20Depay {
const NAME: &'static str = "GstRtpL20Depay2";
type Type = super::RtpL20Depay;
type ParentType = super::RtpLinearAudioDepay;
}
impl ObjectImpl for RtpL20Depay {}
impl GstObjectImpl for RtpL20Depay {}
impl ElementImpl for RtpL20Depay {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"RTP 20-bit Raw Audio Depayloader",
"Codec/Depayloader/Network/RTP",
"Depayload 20-bit raw audio (L20) from RTP packets",
"Tim-Philipp Müller <tim centricular com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&gst::Caps::builder_full()
.structure(
gst::Structure::builder("application/x-rtp")
.field("media", "audio")
.field("clock-rate", gst::IntRange::new(1i32, i32::MAX))
.field("encoding-name", "L20")
.build(),
)
.build(),
)
.unwrap();
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&AudioCapsBuilder::new_interleaved()
.format(AudioFormat::S20be)
.build(),
)
.unwrap();
vec![src_pad_template, sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
}
impl crate::basedepay::RtpBaseDepay2Impl for RtpL20Depay {}
impl RtpLinearAudioDepayImpl for RtpL20Depay {}
/**
* SECTION:element-rtpL24depay2
* @see_also: rtpL24pay2, rtpL8depay2, rtpL16depay2, rtpL24pay
*
* Extracts raw 24-bit audio from RTP packets as per [RFC 3551][rfc-3551] and
* [RFC 3190][rfc-3190].
*
* [rfc-3551]: https://www.rfc-editor.org/rfc/rfc3551.html#section-4.5.11
* [rfc-3190]: https://www.rfc-editor.org/rfc/rfc3190.html#section-4
*
* ## Example pipeline
*
* |[
* gst-launch-1.0 udpsrc caps='application/x-rtp, media=audio, clock-rate=48000, encoding-name=L24, encoding-params=(string)1, channels=1, payload=96' ! rtpjitterbuffer latency=50 ! rtpL24depay2 ! audioconvert ! audioresample ! autoaudiosink
* ]| This will depayload an incoming RTP 24-bit raw audio stream. You can use the #rtpL24pay2
* element to create such an RTP stream.
*
* Since: plugins-rs-0.13.0
*/
#[derive(Default)]
pub(crate) struct RtpL24Depay;
#[glib::object_subclass]
impl ObjectSubclass for RtpL24Depay {
const NAME: &'static str = "GstRtpL24Depay2";
type Type = super::RtpL24Depay;
type ParentType = super::RtpLinearAudioDepay;
}
impl ObjectImpl for RtpL24Depay {}
impl GstObjectImpl for RtpL24Depay {}
impl ElementImpl for RtpL24Depay {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"RTP 24-bit Raw Audio Depayloader",
"Codec/Depayloader/Network/RTP",
"Depayload 24-bit raw audio (L24) from RTP packets",
"Tim-Philipp Müller <tim centricular com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&gst::Caps::builder_full()
.structure(
gst::Structure::builder("application/x-rtp")
.field("media", "audio")
.field("clock-rate", gst::IntRange::new(1i32, i32::MAX))
.field("encoding-name", "L24")
.build(),
)
.build(),
)
.unwrap();
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&AudioCapsBuilder::new_interleaved()
.format(AudioFormat::S24be)
.build(),
)
.unwrap();
vec![src_pad_template, sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
}
impl crate::basedepay::RtpBaseDepay2Impl for RtpL24Depay {}
impl RtpLinearAudioDepayImpl for RtpL24Depay {}
#[cfg(test)]
mod tests {
use byte_slice_cast::*;
use gst_check::Harness;
#[test]
fn test_channel_reorder_l8() {
gst::init().unwrap();
crate::plugin_register_static().expect("rtp plugin");
let mut h = Harness::new("rtpL8depay2");
h.play();
let caps = gst::Caps::builder("application/x-rtp")
.field("media", "audio")
.field("payload", 96)
.field("clock-rate", 48000)
.field("encoding-name", "L8")
.field("channels", "6") // can be string or int
.field("channel-order", "DV.LRLsRsCS")
.build();
h.set_src_caps(caps);
let input_data = [1u8, 2, 3, 4, 5, 6, 11, 12, 13, 14, 15, 16];
let builder = rtp_types::RtpPacketBuilder::new()
.marker_bit(false)
.timestamp(48000)
.payload_type(96)
.sequence_number(456)
.payload(input_data.as_slice());
let buf = builder.write_vec().unwrap();
let buf = gst::Buffer::from_mut_slice(buf);
h.push(buf).unwrap();
h.push_event(gst::event::Eos::new());
let outbuf = h.pull().unwrap();
let out_map = outbuf.map_readable().unwrap();
let out_data = out_map.as_slice_of::<u8>().unwrap();
// input: [ 1, 2, 3, 4, 5, 6 | 11, 12, 13, 14, 15, 16]
// @ FrontLeft, FrontRight, SideLeft, SideRight, FrontCenter, Lfe1
//
// output: [ 1, 2, 5, 6, 3, 4 | 11, 12, 15, 16, 13, 14]
// @ FrontLeft, FrontRight, FrontCenter, Lfe1, SideLeft, SideRight
assert_eq!(out_data, [1, 2, 5, 6, 3, 4, 11, 12, 15, 16, 13, 14]);
}
}

View file

@ -0,0 +1,80 @@
// GStreamer RTP L8 / L16 / L20 / L24 linear raw audio depayloader
//
// Copyright (C) 2023-2024 Tim-Philipp Müller <tim centricular com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use gst::glib;
use gst::prelude::*;
pub mod imp;
glib::wrapper! {
pub struct RtpLinearAudioDepay(ObjectSubclass<imp::RtpLinearAudioDepay>)
@extends crate::basedepay::RtpBaseDepay2, gst::Element, gst::Object;
}
glib::wrapper! {
pub(crate) struct RtpL8Depay(ObjectSubclass<imp::RtpL8Depay>)
@extends RtpLinearAudioDepay, crate::basedepay::RtpBaseDepay2, gst::Element, gst::Object;
}
glib::wrapper! {
pub(crate) struct RtpL16Depay(ObjectSubclass<imp::RtpL16Depay>)
@extends RtpLinearAudioDepay, crate::basedepay::RtpBaseDepay2, gst::Element, gst::Object;
}
glib::wrapper! {
pub(crate) struct RtpL20Depay(ObjectSubclass<imp::RtpL20Depay>)
@extends RtpLinearAudioDepay, crate::basedepay::RtpBaseDepay2, gst::Element, gst::Object;
}
glib::wrapper! {
pub(crate) struct RtpL24Depay(ObjectSubclass<imp::RtpL24Depay>)
@extends RtpLinearAudioDepay, crate::basedepay::RtpBaseDepay2, gst::Element, gst::Object;
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
#[cfg(feature = "doc")]
{
use gst::prelude::*;
// Make internal base class available in docs
crate::linear_audio::depay::RtpLinearAudioDepay::static_type()
.mark_as_plugin_api(gst::PluginAPIFlags::empty());
}
gst::Element::register(
Some(plugin),
"rtpL8depay2",
gst::Rank::MARGINAL,
RtpL8Depay::static_type(),
)?;
gst::Element::register(
Some(plugin),
"rtpL16depay2",
gst::Rank::MARGINAL,
RtpL16Depay::static_type(),
)?;
gst::Element::register(
Some(plugin),
"rtpL20depay",
gst::Rank::MARGINAL,
RtpL20Depay::static_type(),
)?;
gst::Element::register(
Some(plugin),
"rtpL24depay2",
gst::Rank::MARGINAL,
RtpL24Depay::static_type(),
)?;
Ok(())
}

View file

@ -0,0 +1,6 @@
// SPDX-License-Identifier: MPL-2.0
pub mod common;
pub mod depay;
pub mod pay;

View file

@ -0,0 +1,717 @@
// GStreamer RTP L8 / L16 / L20 / L24 linear raw audio payloader
//
// Copyright (C) 2023-2024 Tim-Philipp Müller <tim centricular com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use atomic_refcell::AtomicRefCell;
use gst::{glib, prelude::*, subclass::prelude::*};
use gst_audio::{AudioCapsBuilder, AudioChannelPosition, AudioFormat};
use once_cell::sync::Lazy;
use std::num::NonZeroU32;
use crate::{
baseaudiopay::{RtpBaseAudioPay2Ext, RtpBaseAudioPay2Impl},
basepay::{RtpBasePay2Ext, RtpBasePay2ImplExt},
};
use crate::linear_audio::common::channel_positions;
#[derive(Default)]
pub struct RtpLinearAudioPay {
state: AtomicRefCell<State>,
}
#[derive(Default)]
struct State {
width: Option<NonZeroU32>,
channel_reorder_map: Option<Vec<usize>>,
}
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"rtplinearaudiopay",
gst::DebugColorFlags::empty(),
Some("RTP L8/L16/L20/L24 Raw Audio Payloader"),
)
});
#[glib::object_subclass]
impl ObjectSubclass for RtpLinearAudioPay {
const NAME: &'static str = "GstRtpLinearAudioPay";
type Type = super::RtpLinearAudioPay;
type ParentType = crate::baseaudiopay::RtpBaseAudioPay2;
}
impl ObjectImpl for RtpLinearAudioPay {}
impl GstObjectImpl for RtpLinearAudioPay {}
impl ElementImpl for RtpLinearAudioPay {}
impl crate::basepay::RtpBasePay2Impl for RtpLinearAudioPay {
fn set_sink_caps(&self, caps: &gst::Caps) -> bool {
let Ok(info) = gst_audio::AudioInfo::from_caps(caps) else {
gst::error!(CAT, imp: self, "Can't parse input caps {caps} into audio info");
return false;
};
gst::info!(CAT, imp: self, "Got caps, audio info: {info:?}");
let encoding_name = match info.format() {
AudioFormat::U8 => "L8",
AudioFormat::S16be => "L16", // and/or pt 10/11
AudioFormat::S20be => "L20",
AudioFormat::S24be => "L24",
_ => unreachable!(), // Input caps will have been checked against template caps
};
let n_channels = info.channels();
let rate = info.rate();
// pt 10 = L16 stereo @ 44.1kHz, pt 11 = L16 mono @ 44.1kHz
let prop_pt = self.obj().property::<u32>("pt");
if prop_pt == 10 && (n_channels != 2 || rate != 44100 || encoding_name != "L16") {
gst::element_imp_error!(
self,
gst::StreamError::Format,
["Static payload type 10 is reserved for stereo 16-bit audio @ 44100 Hz"]
);
return false;
}
if prop_pt == 11 && (n_channels != 1 || rate != 44100 || encoding_name != "L16") {
gst::element_imp_error!(
self,
gst::StreamError::Format,
["Static payload type 11 is reserved for mono 16-bit audio @ 44100 Hz"]
);
return false;
}
let mut src_caps = gst::Caps::builder("application/x-rtp")
.field("media", "audio")
.field("encoding-name", encoding_name)
.field("clock-rate", rate as i32)
.field("channels", n_channels as i32)
.field("encoding-params", info.channels().to_string());
let mut reorder_map = None;
// Figure out channel order for multi-channel audio and if channel reordering is required
if n_channels > 2 {
if let Some(positions) = info.positions() {
match channel_positions::find_channel_order_from_positions(positions) {
Some(name) => {
gst::info!(CAT, imp: self,
"Using {name} channel order mapping for {n_channels} channels"
);
if name != "default" {
src_caps = src_caps.field("channel-order", name);
}
let rtp_positions =
channel_positions::get_channel_order(Some(name), n_channels as i32)
.unwrap();
let mut gst_positions = rtp_positions.to_vec();
// Re-order channel positions according to GStreamer conventions. This should always
// succeed because the input channel positioning comes from internal tables.
AudioChannelPosition::positions_to_valid_order(&mut gst_positions).unwrap();
// Is channel re-ordering actually required?
if rtp_positions != gst_positions {
let mut map = vec![0usize; n_channels as usize];
gst_audio::channel_reorder_map(&gst_positions, rtp_positions, &mut map)
.unwrap();
gst::info!(CAT, imp: self, "Channel positions (GStreamer) : {gst_positions:?}");
gst::info!(CAT, imp: self, "Channel positions (RTP) : {rtp_positions:?}");
gst::info!(CAT, imp: self, "Channel reorder map : {map:?}");
reorder_map = Some(map);
}
}
_ => {
gst::element_imp_warning!(
self,
gst::StreamError::Encode,
["Couldn't find canonical channel order mapping for {positions:?}"]
);
}
}
}
}
self.obj().set_src_caps(&src_caps.build());
let mut state = self.state.borrow_mut();
state.width = NonZeroU32::new(info.width());
state.channel_reorder_map = reorder_map;
self.obj().set_bpf(info.bpf() as usize);
true
}
// https://www.rfc-editor.org/rfc/rfc3551.html#section-4.5.10
//
fn handle_buffer(
&self,
buffer: &gst::Buffer,
id: u64,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let mut buffer = buffer.clone();
let state = self.state.borrow_mut();
// Re-order channels from GStreamer layout to RTP layout if needed
if let Some(reorder_map) = &state.channel_reorder_map {
let buffer_ref = buffer.make_mut();
let width = state.width.expect("width").get();
type I24 = [u8; 3];
match width {
8 => channel_positions::reorder_channels::<u8>(buffer_ref, reorder_map)?,
16 => channel_positions::reorder_channels::<i16>(buffer_ref, reorder_map)?,
24 => channel_positions::reorder_channels::<I24>(buffer_ref, reorder_map)?,
_ => unreachable!(),
}
}
self.parent_handle_buffer(&buffer, id)
}
#[allow(clippy::single_match)]
fn sink_query(&self, query: &mut gst::QueryRef) -> bool {
match query.view_mut() {
gst::QueryViewMut::Caps(query) => {
let src_tmpl_caps = self.obj().src_pad().pad_template_caps();
let peer_caps = self.obj().src_pad().peer_query_caps(Some(&src_tmpl_caps));
if peer_caps.is_empty() {
query.set_result(&peer_caps);
return true;
}
// Baseline: sink pad template caps
let mut ret_caps = self.obj().sink_pad().pad_template_caps();
let format = ret_caps
.structure(0)
.unwrap()
.get::<&str>("format")
.unwrap();
// If downstream has restrictions re. sample rate or number of channels,
// proxy that upstream (we assume the restriction is a single fixed value
// and not something fancy like a list or array of values).
let peer_s = peer_caps.structure(0).unwrap();
let (implied_channels, implied_rate): (Option<i32>, Option<i32>) = {
let peer_pt = peer_s.get::<i32>("payload").ok().filter(|&v| v > 0);
let prop_pt = self.obj().property::<u32>("pt");
// pt 10 = L16 stereo @ 44.1kHz, pt 11 = L16 mono @ 44.1kHz
match (peer_pt, prop_pt) {
(Some(10), _) | (_, 10) => {
if format == "S16BE" {
(Some(2), Some(44100))
} else {
gst::warning!(CAT, imp: self, "pt 10 only supported for S16BE/L16!");
query.set_result(&gst::Caps::new_empty());
return true;
}
}
(Some(11), _) | (_, 11) => {
if format == "S16BE" {
(Some(1), Some(44100))
} else {
gst::warning!(CAT, imp: self, "pt 10 only supported for S16BE/L16!");
query.set_result(&gst::Caps::new_empty());
return true;
}
}
_ => (None, None),
}
};
let peer_rate = peer_s.get::<i32>("clock-rate").ok().filter(|&r| r > 0);
// We're strict and enforce the implied 44100Hz requirement for pt=10/11
if let Some(pref_rate) = implied_rate.or(peer_rate) {
let caps = ret_caps.make_mut();
caps.set("rate", pref_rate);
}
let peer_chans = {
let encoding_params = peer_s
.get::<&str>("encoding-params")
.ok()
.and_then(|params| params.parse::<i32>().ok())
.filter(|&v| v > 0);
let channels = peer_s.get::<i32>("channels").ok().filter(|&v| v > 0);
encoding_params.or(channels)
};
// We're strict and enforce the stereo/mono channel requirement for pt=10/11
if let Some(pref_chans) = implied_channels.or(peer_chans) {
let caps = ret_caps.make_mut();
caps.set("channels", pref_chans);
}
if let Some(filter) = query.filter() {
ret_caps = ret_caps.intersect_with_mode(filter, gst::CapsIntersectMode::First);
}
query.set_result(&ret_caps);
return true;
}
_ => (),
}
self.parent_sink_query(query)
}
}
impl RtpBaseAudioPay2Impl for RtpLinearAudioPay {}
impl RtpLinearAudioPay {}
trait RtpLinearAudioPayImpl: RtpBaseAudioPay2Impl {}
unsafe impl<T: RtpLinearAudioPayImpl> IsSubclassable<T> for super::RtpLinearAudioPay {}
/**
* SECTION:element-rtpL8pay2
* @see_also: rtpL8depay2, rtpL16pay2, rtpL24pay2, rtpL8pay
*
* Payloads raw 8-bit audio into RTP packets as per [RFC 3551][rfc-3551].
*
* [rfc-3551]: https://www.rfc-editor.org/rfc/rfc3551.html#section-4.5.10
*
* ## Example pipeline
*
* |[
* gst-launch-1.0 audiotestsrc wave=ticks ! rtpL8pay2 ! udpsink host=127.0.0.1 port=5004
* ]| This will generate an 8-bit raw audio test signal and payload it as RTP and send it out
* as UDP to localhost port 5004.
*
* Since: plugins-rs-0.13.0
*/
#[derive(Default)]
pub(crate) struct RtpL8Pay;
#[glib::object_subclass]
impl ObjectSubclass for RtpL8Pay {
const NAME: &'static str = "GstRtpL8Pay2";
type Type = super::RtpL8Pay;
type ParentType = super::RtpLinearAudioPay;
}
impl ObjectImpl for RtpL8Pay {}
impl GstObjectImpl for RtpL8Pay {}
impl ElementImpl for RtpL8Pay {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"RTP 8-bit Raw Audio Payloader",
"Codec/Payloader/Network/RTP",
"Payload 8-bit raw audio (L8) into RTP packets (RFC 3551)",
"Tim-Philipp Müller <tim centricular com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&AudioCapsBuilder::new_interleaved()
.format(AudioFormat::U8)
.build(),
)
.unwrap();
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&gst::Caps::builder_full()
.structure(
gst::Structure::builder("application/x-rtp")
.field("media", "audio")
.field("encoding-name", "L8")
.field("clock-rate", gst::IntRange::new(1i32, i32::MAX))
.build(),
)
.build(),
)
.unwrap();
vec![src_pad_template, sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
}
impl crate::basepay::RtpBasePay2Impl for RtpL8Pay {}
impl RtpLinearAudioPayImpl for RtpL8Pay {}
impl RtpBaseAudioPay2Impl for RtpL8Pay {}
/**
* SECTION:element-rtpL16pay2
* @see_also: rtpL16depay2, rtpL8pay2, rtpL24pay2, rtpL16pay
*
* Payloads raw 16-bit audio into RTP packets as per [RFC 3551][rfc-3551].
*
* [rfc-3551]: https://www.rfc-editor.org/rfc/rfc3551.html#section-4.5.11
*
* ## Example pipeline
*
* |[
* gst-launch-1.0 audiotestsrc wave=ticks ! rtpL16pay2 ! udpsink host=127.0.0.1 port=5004
* ]| This will generate an 16-bit raw audio test signal and payload it as RTP and send it out
* as UDP to localhost port 5004.
*
* Since: plugins-rs-0.13.0
*/
#[derive(Default)]
pub(crate) struct RtpL16Pay;
#[glib::object_subclass]
impl ObjectSubclass for RtpL16Pay {
const NAME: &'static str = "GstRtpL16Pay2";
type Type = super::RtpL16Pay;
type ParentType = super::RtpLinearAudioPay;
}
impl ObjectImpl for RtpL16Pay {}
impl GstObjectImpl for RtpL16Pay {}
impl ElementImpl for RtpL16Pay {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"RTP 16-bit Raw Audio Payloader",
"Codec/Payloader/Network/RTP",
"Payload 16-bit raw audio (L16) into RTP packets (RFC 3551)",
"Tim-Philipp Müller <tim centricular com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&AudioCapsBuilder::new_interleaved()
.format(AudioFormat::S16be)
.build(),
)
.unwrap();
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&gst::Caps::builder_full()
.structure(
gst::Structure::builder("application/x-rtp")
.field("media", "audio")
.field("clock-rate", gst::IntRange::new(1i32, i32::MAX))
.field("encoding-name", "L16")
.build(),
)
.structure(
gst::Structure::builder("application/x-rtp")
.field("media", "audio")
.field("clock-rate", 44100i32)
.field("payload", gst::List::new([10i32, 11]))
.build(),
)
.build(),
)
.unwrap();
vec![src_pad_template, sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
}
impl crate::basepay::RtpBasePay2Impl for RtpL16Pay {}
impl RtpLinearAudioPayImpl for RtpL16Pay {}
impl RtpBaseAudioPay2Impl for RtpL16Pay {}
/**
* SECTION:element-rtpL20pay
* @see_also: rtpL20depay, rtpL8pay2, rtpL16pay2
*
* Payloads raw 20-bit audio into RTP packets as per [RFC 3551][rfc-3551] and
* [RFC 3190][rfc-3190].
*
* [rfc-3551]: https://www.rfc-editor.org/rfc/rfc3551.html#section-4.5.11
* [rfc-3190]: https://www.rfc-editor.org/rfc/rfc3190.html#section-4
*
* ## Example pipeline
*
* |[
* gst-launch-1.0 audiotestsrc wave=ticks ! rtpL20pay ! udpsink host=127.0.0.1 port=5004
* ]| This will generate a 20-bit raw audio test signal and payload it as RTP and send it out
* as UDP to localhost port 5004.
*
* Since: plugins-rs-0.13.0
*/
#[derive(Default)]
pub(crate) struct RtpL20Pay;
#[glib::object_subclass]
impl ObjectSubclass for RtpL20Pay {
const NAME: &'static str = "GstRtpL20Pay";
type Type = super::RtpL20Pay;
type ParentType = super::RtpLinearAudioPay;
}
impl ObjectImpl for RtpL20Pay {}
impl GstObjectImpl for RtpL20Pay {}
impl ElementImpl for RtpL20Pay {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"RTP 20-bit Raw Audio Payloader",
"Codec/Payloader/Network/RTP",
"Payload 20-bit raw audio (L20) into RTP packets (RFC 3551)",
"Tim-Philipp Müller <tim centricular com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&AudioCapsBuilder::new_interleaved()
.format(AudioFormat::S20be)
.build(),
)
.unwrap();
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&gst::Caps::builder_full()
.structure(
gst::Structure::builder("application/x-rtp")
.field("media", "audio")
.field("clock-rate", gst::IntRange::new(1i32, i32::MAX))
.field("encoding-name", "L20")
.build(),
)
.build(),
)
.unwrap();
vec![src_pad_template, sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
}
impl crate::basepay::RtpBasePay2Impl for RtpL20Pay {}
impl RtpLinearAudioPayImpl for RtpL20Pay {}
impl RtpBaseAudioPay2Impl for RtpL20Pay {}
/**
* SECTION:element-rtpL24pay2
* @see_also: rtpL24depay2, rtpL8pay2, rtpL16pay2, rtpL24pay
*
* Payloads raw 24-bit audio into RTP packets as per [RFC 3551][rfc-3551] and
* [RFC 3190][rfc-3190].
*
* [rfc-3551]: https://www.rfc-editor.org/rfc/rfc3551.html#section-4.5.11
* [rfc-3190]: https://www.rfc-editor.org/rfc/rfc3190.html#section-4
*
* ## Example pipeline
*
* |[
* gst-launch-1.0 audiotestsrc wave=ticks ! audioconvert ! rtpL24pay2 ! udpsink host=127.0.0.1 port=5004
* ]| This will generate a 24-bit raw audio test signal and payload it as RTP and send it out
* as UDP to localhost port 5004.
*
* Since: plugins-rs-0.13.0
*/
#[derive(Default)]
pub(crate) struct RtpL24Pay;
#[glib::object_subclass]
impl ObjectSubclass for RtpL24Pay {
const NAME: &'static str = "GstRtpL24Pay2";
type Type = super::RtpL24Pay;
type ParentType = super::RtpLinearAudioPay;
}
impl ObjectImpl for RtpL24Pay {}
impl GstObjectImpl for RtpL24Pay {}
impl ElementImpl for RtpL24Pay {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"RTP 24-bit Raw Audio Payloader",
"Codec/Payloader/Network/RTP",
"Payload 24-bit raw audio (L24) into RTP packets (RFC 3551)",
"Tim-Philipp Müller <tim centricular com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&AudioCapsBuilder::new_interleaved()
.format(AudioFormat::S24be)
.build(),
)
.unwrap();
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&gst::Caps::builder_full()
.structure(
gst::Structure::builder("application/x-rtp")
.field("media", "audio")
.field("clock-rate", gst::IntRange::new(1i32, i32::MAX))
.field("encoding-name", "L24")
.build(),
)
.build(),
)
.unwrap();
vec![src_pad_template, sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
}
impl crate::basepay::RtpBasePay2Impl for RtpL24Pay {}
impl RtpLinearAudioPayImpl for RtpL24Pay {}
impl RtpBaseAudioPay2Impl for RtpL24Pay {}
#[cfg(test)]
mod tests {
use byte_slice_cast::*;
use gst_check::Harness;
// Same test as in the depayloader, just in reverse for the payloader
#[test]
fn test_channel_reorder_l8() {
gst::init().unwrap();
crate::plugin_register_static().expect("rtp plugin");
let mut h = Harness::new("rtpL8pay2");
h.play();
use gst_audio::AudioChannelPosition::*;
let pos = &[
FrontLeft,
FrontRight,
FrontCenter,
Lfe1,
SideLeft,
SideRight,
];
let mask = gst_audio::AudioChannelPosition::positions_to_mask(pos, true).unwrap();
let input_caps = gst_audio::AudioCapsBuilder::new_interleaved()
.format(gst_audio::AudioFormat::U8)
.rate(48000)
.channels(6)
.channel_mask(mask)
.build();
h.set_src_caps(input_caps);
let input_data = [1u8, 2, 3, 4, 5, 6, 11, 12, 13, 14, 15, 16];
let mut buf = gst::Buffer::from_slice(input_data);
buf.get_mut().unwrap().set_pts(gst::ClockTime::ZERO);
h.push(buf).unwrap();
h.push_event(gst::event::Eos::new());
let outbuf = h.pull().unwrap();
let out_map = outbuf.map_readable().unwrap();
let out_data = out_map.as_slice_of::<u8>().unwrap();
let packet = rtp_types::RtpPacket::parse(out_data).unwrap();
let out_data = packet.payload();
// input: [ 1, 2, 3, 4, 5, 6 | 11, 12, 13, 14, 15, 16]
// @ FrontLeft, FrontRight, FrontCenter, Lfe1, SideLeft, SideRight
//
// output: [ 1, 2, 5, 6, 3, 4 | 11, 12, 15, 16, 13, 14]
// @ FrontLeft, FrontRight, SideLeft, SideRight, FrontCenter, Lfe1
assert_eq!(out_data, [1, 2, 5, 6, 3, 4, 11, 12, 15, 16, 13, 14]);
}
}

View file

@ -0,0 +1,78 @@
// GStreamer RTP L8 / L16 / L20 / L24 linear raw audio payloader
//
// Copyright (C) 2023-2024 Tim-Philipp Müller <tim centricular com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use gst::glib;
use gst::prelude::*;
pub mod imp;
glib::wrapper! {
pub struct RtpLinearAudioPay(ObjectSubclass<imp::RtpLinearAudioPay>)
@extends crate::baseaudiopay::RtpBaseAudioPay2, crate::basepay::RtpBasePay2, gst::Element, gst::Object;
}
glib::wrapper! {
pub(crate) struct RtpL8Pay(ObjectSubclass<imp::RtpL8Pay>)
@extends RtpLinearAudioPay, crate::baseaudiopay::RtpBaseAudioPay2, crate::basepay::RtpBasePay2, gst::Element, gst::Object;
}
glib::wrapper! {
pub(crate) struct RtpL16Pay(ObjectSubclass<imp::RtpL16Pay>)
@extends RtpLinearAudioPay, crate::baseaudiopay::RtpBaseAudioPay2, crate::basepay::RtpBasePay2, gst::Element, gst::Object;
}
glib::wrapper! {
pub(crate) struct RtpL20Pay(ObjectSubclass<imp::RtpL20Pay>)
@extends RtpLinearAudioPay, crate::baseaudiopay::RtpBaseAudioPay2, crate::basepay::RtpBasePay2, gst::Element, gst::Object;
}
glib::wrapper! {
pub(crate) struct RtpL24Pay(ObjectSubclass<imp::RtpL24Pay>)
@extends RtpLinearAudioPay, crate::baseaudiopay::RtpBaseAudioPay2, crate::basepay::RtpBasePay2, gst::Element, gst::Object;
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
#[cfg(feature = "doc")]
{
use gst::prelude::*;
// Make internal base class available in docs
crate::linear_audio::pay::RtpLinearAudioPay::static_type()
.mark_as_plugin_api(gst::PluginAPIFlags::empty());
}
gst::Element::register(
Some(plugin),
"rtpL8pay2",
gst::Rank::MARGINAL,
RtpL8Pay::static_type(),
)?;
gst::Element::register(
Some(plugin),
"rtpL16pay2",
gst::Rank::MARGINAL,
RtpL16Pay::static_type(),
)?;
gst::Element::register(
Some(plugin),
"rtpL20pay",
gst::Rank::MARGINAL,
RtpL20Pay::static_type(),
)?;
gst::Element::register(
Some(plugin),
"rtpL24pay2",
gst::Rank::MARGINAL,
RtpL24Pay::static_type(),
)?;
Ok(())
}