Compare commits

...

13 commits

Author SHA1 Message Date
Rubén Gonzalez 0ea8200cfb Merge branch 'xdpscreencastbin' into 'main'
Video source bin wrapping pipewiresrc using xdg-desktop-portal

See merge request gstreamer/gst-plugins-rs!1405
2024-04-27 23:13:28 +00:00
Maksym Khomenko a87eaa4b79 hrtfrender: use bitmask, not int, to prevent a capsnego failure
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1549>
2024-04-26 20:24:19 +00:00
Philippe Normand 88cbc93338 dav1ddec: Negotiate bt709 colorimetry when values from seq header are unspecified
With unknown range colorimetry validation would fail in video-info. As our
decoder outputs only YUV formats Bt709 should be a reasonable default.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1548>
2024-04-26 19:35:41 +00:00
Sebastian Dröge 927c3fcdb6 gtk4paintablesink: Update README.md with all the new features
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1547>
2024-04-26 12:29:10 +03:00
Sebastian Dröge 5803904deb gtk4paintablesink: meson: Add auto-detection of GTK4 versions and dmabuf feature
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1547>
2024-04-26 12:29:10 +03:00
Sebastian Dröge c95e07a897 gtk4paintablesink: Improve scaling logic
If force-aspect-ratio=false then make sure to fully fill the given
width/height with the video frame and avoid rounding errors. This makes
sure that the video is rendered in the exact position selected by the
caller and that graphics offloading is going to work more likely.

In other cases and for all overlays, make sure that the calculated
positions are staying inside (0, 0, width, height) as rendering outside
is not allowed by GTK.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1547>
2024-04-26 12:29:10 +03:00
Sebastian Dröge b42bd3d026 gtk4paintablesink: Add force-aspect-ratio property like in other video sinks
Unlike in other sinks this defaults to false as generally every user of
GDK paintables already ensures that the aspect ratio is kept and the
paintable is layed out in the most optimal way based on the context.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1547>
2024-04-26 12:29:10 +03:00
Sebastian Dröge 3dd800ac77 gtk4paintablesink: Implement child proxy interface
This allows setting properties on the paintable from gst-launch-1.0.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1547>
2024-04-26 12:29:10 +03:00
Sebastian Dröge c92462b240 gtk4: Implement support for directly importing dmabufs
Fixes https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/441

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1547>
2024-04-26 12:29:10 +03:00
Sebastian Dröge 7573caa8e9 rtpgccbwe: Move away from deprecated time::Instant to std::time::Instant
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1554>
2024-04-25 15:37:28 +03:00
Sebastian Dröge c12585377c Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1554>
2024-04-25 14:46:45 +03:00
Mathieu Duponchelle 17d7997137 transcriberbin: add support for consuming secondary audio streams
In some situations, a translated alternate audio stream for a content
might be available.

Instead of going through transcription and translation of the original
audio stream, it may be preferrable for accuracy purposes to simply
transcribe the secondary audio stream.

This MR adds support for doing just that:

* Secondary audio sink pads can be requested as "sink_audio_%u"

* Sometimes audio source pads are added at that point to pass through
  the audio, as "src_audio_%u"

* The main transcription bin now contains per-input stream transcription
  bins. Those can be individually controlled through properties on the
  sink pads, for instance translation-languages can be dynamically set
  per audio stream

* Some properties that originally existed on the main element still
  remain, but are now simply mapped to the always audio sink pad

* Releasing of secondary sink pads is nominally implemented, but not
  tested in states other than NULL

An example launch line for this would be:

```
$ gst-launch-1.0 transcriberbin name=transcriberbin latency=8000 accumulate-time=0 \
      cc-caps="closedcaption/x-cea-708, format=cc_data" sink_audio_0::language-code="es-US" \
      sink_audio_0::translation-languages="languages, transcript=cc3"
    uridecodebin uri=file:///home/meh/Music/chaplin.mkv name=d
      d. ! videoconvert ! transcriberbin.sink_video
      d. ! clocksync ! audioconvert ! transcriberbin.sink_audio
      transcriberbin.src_video ! cea608overlay field=1 ! videoconvert ! autovideosink \
      transcriberbin.src_audio ! audioconvert ! fakesink \
    uridecodebin uri=file:///home/meh/Music/chaplin-spanish.webm name=d2 \
      d2. ! audioconvert ! transcriberbin.sink_audio_0 \
      transcriberbin.src_audio_0 ! fakesink
```

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1546>
2024-04-25 11:56:01 +02:00
Ruben Gonzalez c7e1525b36 xdgscreencapsrc: new xdg-desktop-portal screen capture plugin
Source element wrapping pipewiresrc using xdg-desktop-portal to start
a screencast session. Useful for testing pipelines with gst-launch:

$ gst-launch-1.0 xdgscreencapsrc ! videoconvert ! gtkwaylandsink

Based on https://gitlab.gnome.org/-/snippets/19 using ashpd crate.
2024-04-01 22:45:20 +02:00
24 changed files with 2561 additions and 958 deletions

552
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -45,6 +45,8 @@ members = [
"utils/togglerecord",
"utils/tracers",
"utils/uriplaylistbin",
"utils/xdgscreencapsrc",
"video/cdg",
"video/closedcaption",
@ -134,6 +136,7 @@ gdk-wayland = { package = "gdk4-wayland", git = "https://github.com/gtk-rs/gtk4-
gdk-x11 = { package = "gdk4-x11", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master"}
gdk-win32 = { package = "gdk4-win32", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master"}
gst = { package = "gstreamer", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-allocators = { package = "gstreamer-allocators", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-app = { package = "gstreamer-app", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-audio = { package = "gstreamer-audio", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-base = { package = "gstreamer-base", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }

View file

@ -145,6 +145,8 @@ You will find the following plugins in this repository:
- `uriplaylistbin`: Helper bin to gaplessly play a list of URIs.
- `xdgscreencapsrc`: GStreamer xdg-desktop-portal screen capture plugin
## Building
gst-plugins-rs relies on [cargo-c](https://github.com/lu-zero/cargo-c/) to

View file

@ -649,7 +649,7 @@ impl BaseTransformImpl for HrtfRender {
if direction == gst::PadDirection::Sink {
s.set("channels", 2);
s.set("channel-mask", 0x3);
s.set("channel-mask", gst::Bitmask(0x3));
} else {
let settings = self.settings.lock().unwrap();
if let Some(objs) = &settings.spatial_objects {

View file

@ -2472,11 +2472,14 @@
"GInitiallyUnowned",
"GObject"
],
"interfaces": [
"GstChildProxy"
],
"klass": "Sink/Video",
"long-name": "GTK 4 Paintable Sink",
"pad-templates": {
"sink": {
"caps": "video/x-raw:\n format: { BGRA, ARGB, RGBA, ABGR, RGB, BGR }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n\nvideo/x-raw(memory:GLMemory, meta:GstVideoOverlayComposition):\n format: { RGBA, RGB }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n texture-target: 2D\n\nvideo/x-raw(memory:GLMemory):\n format: { RGBA, RGB }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n texture-target: 2D\n\nvideo/x-raw(memory:SystemMemory, meta:GstVideoOverlayComposition):\n format: { BGRA, ARGB, RGBA, ABGR, RGB, BGR }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n\nvideo/x-raw(meta:GstVideoOverlayComposition):\n format: { BGRA, ARGB, RGBA, ABGR, RGB, BGR }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n",
"caps": "video/x-raw(memory:GLMemory, meta:GstVideoOverlayComposition):\n format: { RGBA, RGB }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n texture-target: 2D\n\nvideo/x-raw(memory:GLMemory):\n format: { RGBA, RGB }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n texture-target: 2D\n\nvideo/x-raw(memory:SystemMemory, meta:GstVideoOverlayComposition):\n format: { BGRA, ARGB, RGBA, ABGR, RGB, BGR }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n\nvideo/x-raw(meta:GstVideoOverlayComposition):\n format: { BGRA, ARGB, RGBA, ABGR, RGB, BGR }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\nvideo/x-raw:\n format: { BGRA, ARGB, RGBA, ABGR, RGB, BGR }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n",
"direction": "sink",
"presence": "always"
}
@ -5585,7 +5588,14 @@
"sink_audio": {
"caps": "audio/x-raw:\n",
"direction": "sink",
"presence": "always"
"presence": "always",
"type": "GstTranscriberSinkPad"
},
"sink_audio_%%u": {
"caps": "audio/x-raw:\n",
"direction": "sink",
"presence": "request",
"type": "GstTranscriberSinkPad"
},
"sink_video": {
"caps": "video/x-raw(ANY):\n",
@ -5597,6 +5607,12 @@
"direction": "src",
"presence": "always"
},
"src_audio_%%u": {
"caps": "audio/x-raw:\n",
"direction": "src",
"presence": "sometimes",
"type": "GstTranscriberSrcPad"
},
"src_video": {
"caps": "video/x-raw(ANY):\n",
"direction": "src",
@ -5735,6 +5751,7 @@
"construct": false,
"construct-only": false,
"controllable": false,
"default": "languages, transcript=(string)cc1;",
"mutable": "playing",
"readable": true,
"type": "GstStructure",
@ -6038,6 +6055,79 @@
}
]
},
"GstTranscriberSinkPad": {
"hierarchy": [
"GstTranscriberSinkPad",
"GstGhostPad",
"GstProxyPad",
"GstPad",
"GstObject",
"GInitiallyUnowned",
"GObject"
],
"kind": "object",
"properties": {
"language-code": {
"blurb": "The language of the input stream",
"conditionally-available": false,
"construct": false,
"construct-only": false,
"controllable": false,
"default": "en-US",
"mutable": "playing",
"readable": true,
"type": "gchararray",
"writable": true
},
"mode": {
"blurb": "Which closed caption mode to operate in",
"conditionally-available": false,
"construct": false,
"construct-only": false,
"controllable": false,
"default": "roll-up2 (2)",
"mutable": "playing",
"readable": true,
"type": "GstTtToCea608Mode",
"writable": true
},
"transcriber": {
"blurb": "The transcriber element to use",
"conditionally-available": false,
"construct": false,
"construct-only": false,
"controllable": false,
"mutable": "ready",
"readable": true,
"type": "GstElement",
"writable": true
},
"translation-languages": {
"blurb": "A map of language codes to caption channels, e.g. translation-languages=\"languages, transcript={CC1, 708_1}, fr={708_2, CC3}\" will map the French translation to CC1/service 1 and the original transcript to CC3/service 2",
"conditionally-available": false,
"construct": false,
"construct-only": false,
"controllable": false,
"default": "languages, transcript=(string)cc1;",
"mutable": "playing",
"readable": true,
"type": "GstStructure",
"writable": true
}
}
},
"GstTranscriberSrcPad": {
"hierarchy": [
"GstTranscriberSrcPad",
"GstGhostPad",
"GstProxyPad",
"GstPad",
"GstObject",
"GInitiallyUnowned",
"GObject"
],
"kind": "object"
},
"GstTtToCea608Mode": {
"kind": "enum",
"values": [

View file

@ -205,6 +205,7 @@ plugins = {
'extra-deps': {'cairo-gobject': []},
},
'gopbuffer': {'library': 'libgstgopbuffer'},
'xdgscreencapsrc': {'library': 'libgstxdgscreencapsrc'},
}
if get_option('examples').allowed()
@ -304,6 +305,23 @@ if get_option('gtk4').allowed()
gtk4_features += 'winegl'
endif
endif
gst_allocators_dep = dependency('gstreamer-allocators-1.0', version: '>=1.24', required: false)
gtk_dep = dependency('gtk4', version: '>=4.6', required: get_option('gtk4'))
if gtk_dep.found()
if host_system == 'linux' and gtk_dep.version().version_compare('>=4.14') and gst_allocators_dep.found()
gtk4_features += 'dmabuf'
endif
if gtk_dep.version().version_compare('>=4.14')
gtk4_features += 'gtk_v4_14'
elif gtk_dep.version().version_compare('>=4.12')
gtk4_features += 'gtk_v4_12'
elif gtk_dep.version().version_compare('>=4.10')
gtk4_features += 'gtk_v4_10'
endif
endif
plugins += {
'gtk4': {
'library': 'libgstgtk4',

View file

@ -47,6 +47,7 @@ option('livesync', type: 'feature', value: 'auto', description: 'Build livesync
option('togglerecord', type: 'feature', value: 'auto', description: 'Build togglerecord plugin')
option('tracers', type: 'feature', value: 'auto', description: 'Build tracers plugin')
option('uriplaylistbin', type: 'feature', value: 'auto', description: 'Build uriplaylistbin plugin')
option('xdgscreencapsrc', type: 'feature', value: 'auto', description: 'Build xdgscreencapsrc plugin')
# video
option('cdg', type: 'feature', value: 'auto', description: 'Build cdg plugin')

View file

@ -26,6 +26,7 @@ use std::{
fmt::Debug,
mem,
sync::Mutex,
time::Instant,
};
use time::Duration;
@ -268,7 +269,7 @@ struct Detector {
last_received_packets: BTreeMap<u64, Packet>, // Order by seqnums, front is the newest, back is the oldest
// Last loss update
last_loss_update: Option<time::Instant>,
last_loss_update: Option<Instant>,
// Moving average of the packet loss
loss_average: f64,
@ -280,13 +281,13 @@ struct Detector {
// Threshold fields
threshold: Duration,
last_threshold_update: Option<time::Instant>,
last_threshold_update: Option<Instant>,
num_deltas: i64,
// Overuse related fields
increasing_counter: u32,
last_overuse_estimate: Duration,
last_use_detector_update: time::Instant,
last_use_detector_update: Instant,
increasing_duration: Duration,
// round-trip-time estimations
@ -337,7 +338,7 @@ impl Detector {
last_threshold_update: None,
num_deltas: 0,
last_use_detector_update: time::Instant::now(),
last_use_detector_update: Instant::now(),
increasing_counter: 0,
last_overuse_estimate: Duration::ZERO,
increasing_duration: Duration::ZERO,
@ -519,11 +520,14 @@ impl Detector {
}
fn compute_loss_average(&mut self, loss_fraction: f64) {
let now = time::Instant::now();
let now = Instant::now();
if let Some(ref last_update) = self.last_loss_update {
self.loss_average = loss_fraction
+ (-(now - *last_update).whole_milliseconds() as f64).exp()
+ (-Duration::try_from(now - *last_update)
.unwrap()
.whole_milliseconds() as f64)
.exp()
* (self.loss_average - loss_fraction);
}
@ -588,7 +592,7 @@ impl Detector {
const K_D: f64 = 0.00018; // Table1. Coefficient for the adaptive threshold
const MAX_TIME_DELTA: Duration = Duration::milliseconds(100);
let now = time::Instant::now();
let now = Instant::now();
if self.last_threshold_update.is_none() {
self.last_threshold_update = Some(now);
}
@ -604,7 +608,9 @@ impl Detector {
} else {
K_U
};
let time_delta = (now - self.last_threshold_update.unwrap()).min(MAX_TIME_DELTA);
let time_delta = Duration::try_from(now - self.last_threshold_update.unwrap())
.unwrap()
.min(MAX_TIME_DELTA);
let d = abs_estimate - self.threshold;
let add = k * d.whole_milliseconds() as f64 * time_delta.whole_milliseconds() as f64;
@ -616,7 +622,7 @@ impl Detector {
fn overuse_filter(&mut self) {
let (th_usage, estimate) = self.compare_threshold();
let now = time::Instant::now();
let now = Instant::now();
let delta = now - self.last_use_detector_update;
self.last_use_detector_update = now;
match th_usage {
@ -695,14 +701,14 @@ struct State {
/// Used in additive mode to track last control time, influences
/// calculation of added value according to gcc section 5.5
last_increase_on_delay: Option<time::Instant>,
last_decrease_on_delay: time::Instant,
last_increase_on_delay: Option<Instant>,
last_decrease_on_delay: Instant,
/// Bitrate target based on loss for all video streams.
target_bitrate_on_loss: Bitrate,
last_increase_on_loss: time::Instant,
last_decrease_on_loss: time::Instant,
last_increase_on_loss: Instant,
last_decrease_on_loss: Instant,
/// Exponential moving average, updated when bitrate is
/// decreased
@ -723,7 +729,7 @@ struct State {
budget_offset: i64,
flow_return: Result<gst::FlowSuccess, gst::FlowError>,
last_push: time::Instant,
last_push: Instant,
}
impl Default for State {
@ -731,11 +737,11 @@ impl Default for State {
Self {
target_bitrate_on_delay: DEFAULT_ESTIMATED_BITRATE,
target_bitrate_on_loss: DEFAULT_ESTIMATED_BITRATE,
last_increase_on_loss: time::Instant::now(),
last_decrease_on_loss: time::Instant::now(),
last_increase_on_loss: Instant::now(),
last_decrease_on_loss: Instant::now(),
ema: Default::default(),
last_increase_on_delay: None,
last_decrease_on_delay: time::Instant::now(),
last_decrease_on_delay: Instant::now(),
min_bitrate: DEFAULT_MIN_BITRATE,
max_bitrate: DEFAULT_MAX_BITRATE,
detector: Detector::new(),
@ -744,7 +750,7 @@ impl Default for State {
last_control_op: BandwidthEstimationOp::Increase("Initial increase".into()),
flow_return: Err(gst::FlowError::Flushing),
clock_entry: None,
last_push: time::Instant::now(),
last_push: Instant::now(),
budget_offset: 0,
}
}
@ -753,8 +759,8 @@ impl Default for State {
impl State {
// 4. sending engine implementing a "leaky bucket"
fn create_buffer_list(&mut self, bwe: &super::BandwidthEstimator) -> BufferList {
let now = time::Instant::now();
let elapsed = now - self.last_push;
let now = Instant::now();
let elapsed = Duration::try_from(now - self.last_push).unwrap();
let mut budget = (elapsed.whole_nanoseconds() as i64)
.mul_div_round(
self.estimated_bitrate as i64,
@ -803,7 +809,7 @@ impl State {
}
fn compute_increased_rate(&mut self, bwe: &super::BandwidthEstimator) -> Option<Bitrate> {
let now = time::Instant::now();
let now = Instant::now();
let target_bitrate = self.target_bitrate_on_delay as f64;
let effective_bitrate = self.detector.effective_bitrate();
let time_since_last_update_ms = match self.last_increase_on_delay {
@ -813,7 +819,7 @@ impl State {
return None;
}
(now - prev).whole_milliseconds() as f64
Duration::try_from(now - prev).unwrap().whole_milliseconds() as f64
}
};
@ -950,7 +956,7 @@ impl State {
fn loss_control(&mut self, bwe: &super::BandwidthEstimator) -> bool {
let loss_ratio = self.detector.loss_ratio();
let now = time::Instant::now();
let now = Instant::now();
if loss_ratio > LOSS_DECREASE_THRESHOLD
&& (now - self.last_decrease_on_loss) > LOSS_UPDATE_INTERVAL
@ -993,7 +999,7 @@ impl State {
_ => (),
},
NetworkUsage::Over => {
let now = time::Instant::now();
let now = Instant::now();
if now - self.last_decrease_on_delay > DELAY_UPDATE_INTERVAL {
let effective_bitrate = self.detector.effective_bitrate();
let target =

View file

@ -0,0 +1,47 @@
[package]
name = "gst-plugin-xdgscreencapsrc"
version = "0.12.0-alpha.1"
authors = ["Ruben Gonzalez <rgonzalez@fluendo.com"]
license = "MPL-2.0"
description = "GStreamer xdg-desktop-portal screen capture plugin"
repository = "https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs"
edition = "2021"
rust-version = "1.70"
[dependencies]
gst.workspace = true
parking_lot = "0.12"
ashpd = {version = "0.6", default-features = false, features = ["tokio"]}
tokio = { version = "1", features = ["fs", "macros", "rt-multi-thread", "time"] }
once_cell.workspace = true
[dev-dependencies]
either = "1.0"
gst-check.workspace = true
[lib]
name = "gstxdgscreencapsrc"
crate-type = ["cdylib", "rlib"]
path = "src/lib.rs"
[build-dependencies]
gst-plugin-version-helper.workspace = true
[features]
static = []
capi = []
doc = ["gst/v1_18"]
[package.metadata.capi]
min_version = "0.9.21"
[package.metadata.capi.header]
enabled = false
[package.metadata.capi.library]
install_subdir = "gstreamer-1.0"
versioning = false
import_library = false
[package.metadata.capi.pkg_config]
requires_private = "gstreamer-1.0, gstreamer-audio-1.0, gstreamer-video-1.0, gobject-2.0, glib-2.0, gmodule-2.0"

View file

@ -0,0 +1,3 @@
fn main() {
gst_plugin_version_helper::info()
}

View file

@ -0,0 +1,34 @@
// Copyright (C) 2023 Ruben Gonzalez <rgonzalez@fluendo.com
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
#![allow(clippy::non_send_fields_in_send_ty, unused_doc_comments)]
/**
* plugin-xdgscreencapsrc:
*
* Since: plugins-rs-0.12.0
*/
use gst::glib;
mod xdgscreencapsrc;
fn plugin_init(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
xdgscreencapsrc::register(plugin)
}
gst::plugin_define!(
xdgscreencapsrc,
env!("CARGO_PKG_DESCRIPTION"),
plugin_init,
concat!(env!("CARGO_PKG_VERSION"), "-", "COMMIT_ID"),
// FIXME: MPL-2.0 is only allowed since 1.18.3 (as unknown) and 1.20 (as known)
"MPL",
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_REPOSITORY"),
env!("BUILD_REL_DATE")
);

View file

@ -0,0 +1,333 @@
// Copyright (C) 2023 Ruben Gonzalez <rgonzalez@fluendo.com
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
/**
* element-xdgscreencapsrc:
* @short_description: Source element wrapping pipewiresrc using xdg-desktop-portal to start a screencast session.
*
* Based on https://gitlab.gnome.org/-/snippets/19 using https://crates.io/crates/ashpd
*
* ## Example pipeline
* ```bash
* gst-launch-1.0 -v xdgscreencapsrc ! videoconvert ! identity silent=false ! gtkwaylandsink
* ```
*
* Since: plugins-rs-0.12.0
*/
use gst::glib;
use gst::prelude::*;
use gst::subclass::prelude::*;
use ashpd::{
desktop::screencast::{PersistMode, Screencast},
WindowIdentifier,
};
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Copy, glib::Enum)]
#[repr(u32)]
#[enum_type(name = "SourceType")]
pub enum SourceType {
#[enum_value(name = "A monitor", nick = "monitor")]
Monitor,
#[enum_value(name = "A specific window", nick = "window")]
Window,
#[enum_value(name = "Virtual", nick = "virtual")]
Virtual,
#[enum_value(name = "monitor+window+virtual", nick = "all")]
All,
}
impl From<SourceType> for ashpd::enumflags2::BitFlags<ashpd::desktop::screencast::SourceType, u32> {
fn from(v: SourceType) -> Self {
use ashpd::desktop::screencast;
match v {
SourceType::Monitor => screencast::SourceType::Monitor.into(),
SourceType::Window => screencast::SourceType::Window.into(),
SourceType::Virtual => screencast::SourceType::Virtual.into(),
SourceType::All => {
screencast::SourceType::Monitor
| screencast::SourceType::Window
| screencast::SourceType::Virtual
}
}
}
}
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Copy, glib::Enum)]
#[repr(u32)]
#[enum_type(name = "CursorMode")]
pub enum CursorMode {
#[enum_value(
name = "The cursor is not part of the screen cast stream",
nick = "hidden"
)]
Hidden,
#[enum_value(
name = "The cursor is embedded as part of the stream buffers",
nick = "embedded"
)]
Embedded,
#[enum_value(
name = "The cursor is not part of the screen cast stream, but sent as PipeWire stream metadata. Not implemented",
nick = "metadata"
)]
Metadata,
}
impl From<CursorMode> for ashpd::desktop::screencast::CursorMode {
fn from(v: CursorMode) -> Self {
use ashpd::desktop::screencast;
match v {
CursorMode::Hidden => screencast::CursorMode::Hidden,
CursorMode::Embedded => screencast::CursorMode::Embedded,
CursorMode::Metadata => unimplemented!(),
}
}
}
use once_cell::sync::Lazy;
use parking_lot::Mutex;
pub fn block_on<F: std::future::Future>(future: F) -> F::Output {
static TOKIO_RT: once_cell::sync::Lazy<tokio::runtime::Runtime> =
once_cell::sync::Lazy::new(|| {
tokio::runtime::Builder::new_current_thread()
.enable_io()
.enable_time()
.build()
.expect("launch of single-threaded tokio runtime")
});
TOKIO_RT.block_on(future)
}
async fn portal_main(cursor_mode: CursorMode, source_type: SourceType) -> ashpd::Result<u32> {
let proxy = Screencast::new().await?;
let session = proxy.create_session().await?;
proxy
.select_sources(
&session,
cursor_mode.into(),
source_type.into(),
false,
None,
PersistMode::DoNot,
)
.await?;
let response = proxy
.start(&session, &WindowIdentifier::default())
.await?
.response()?;
if let Some(first_value) = response.streams().iter().next() {
let id = first_value.pipe_wire_node_id();
Ok(id)
} else {
Err(ashpd::Error::NoResponse)
}
}
const DEFAULT_CURSOR_MODE: CursorMode = CursorMode::Hidden;
const DEFAULT_SOURCE_TYPE: SourceType = SourceType::All;
#[derive(Debug, Clone, Copy)]
struct Settings {
cursor_mode: CursorMode,
source_type: SourceType,
}
impl Default for Settings {
fn default() -> Self {
Settings {
cursor_mode: DEFAULT_CURSOR_MODE,
source_type: DEFAULT_SOURCE_TYPE,
}
}
}
pub struct XdpScreenCast {
settings: Mutex<Settings>,
src: gst::Element,
srcpad: gst::GhostPad,
}
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"xdgscreencapsrc",
gst::DebugColorFlags::empty(),
Some("XDP Screen Cast Bin"),
)
});
impl XdpScreenCast {}
#[glib::object_subclass]
impl ObjectSubclass for XdpScreenCast {
const NAME: &'static str = "GstXdpScreenCast";
type Type = super::XdpScreenCast;
type ParentType = gst::Bin;
fn with_class(klass: &Self::Class) -> Self {
let settings = Mutex::new(Settings::default());
let src = gst::ElementFactory::make("pipewiresrc").build().unwrap();
let templ = klass.pad_template("src").unwrap();
let srcpad = gst::GhostPad::from_template(&templ);
Self {
settings,
src,
srcpad,
}
}
}
impl ObjectImpl for XdpScreenCast {
// based on https://flatpak.github.io/xdg-desktop-portal/docs/doc-org.freedesktop.portal.ScreenCast.html#org-freedesktop-portal-screencast-selectsources
fn properties() -> &'static [glib::ParamSpec] {
static PROPERTIES: Lazy<Vec<glib::ParamSpec>> = Lazy::new(|| {
vec![
glib::ParamSpecEnum::builder_with_default::<CursorMode>(
"cursor-mode",
DEFAULT_CURSOR_MODE,
)
.nick("cursor mode")
.blurb("Determines how the cursor will be drawn in the screen cast stream")
.mutable_ready()
.build(),
glib::ParamSpecEnum::builder_with_default::<SourceType>(
"source-type",
DEFAULT_SOURCE_TYPE,
)
.nick("source type")
.blurb("Sets the types of content to record")
.mutable_ready()
.build(),
]
});
PROPERTIES.as_ref()
}
fn set_property(&self, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec) {
match pspec.name() {
"cursor-mode" => {
let mut settings = self.settings.lock();
let cursor_mode = value.get().expect("type checked upstream");
gst::debug!(
CAT,
imp: self,
"Setting cursor-mode from {:?} to {:?}",
settings.cursor_mode,
cursor_mode
);
settings.cursor_mode = cursor_mode;
}
"source-type" => {
let mut settings = self.settings.lock();
let source_type = value.get().expect("type checked upstream");
gst::debug!(
CAT,
imp: self,
"Setting source-type from {:?} to {:?}",
settings.source_type,
source_type
);
settings.source_type = source_type;
}
_ => unimplemented!(),
}
}
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
match pspec.name() {
"cursor-mode" => {
let settings = self.settings.lock();
settings.cursor_mode.to_value()
}
"source-type" => {
let settings = self.settings.lock();
settings.source_type.to_value()
}
_ => unimplemented!(),
}
}
fn constructed(&self) {
self.parent_constructed();
let obj = self.obj();
obj.add(&self.src).unwrap();
self.srcpad
.set_target(Some(&self.src.static_pad("src").unwrap()))
.unwrap();
obj.add_pad(&self.srcpad).unwrap();
}
}
impl GstObjectImpl for XdpScreenCast {}
impl ElementImpl for XdpScreenCast {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"xdg-desktop-portal screen capture",
"Generic",
"Source element wrapping pipewiresrc using \
xdg-desktop-portal to start a screencast session.",
"Ruben Gonzalez <rgonzalez@fluendo.com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let caps = gst::Caps::new_any();
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
)
.unwrap();
vec![src_pad_template]
});
PAD_TEMPLATES.as_ref()
}
fn change_state(
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::debug!(CAT, imp: self, "Changing state {:?}", transition);
let settings = self.settings.lock();
let success = self.parent_change_state(transition)?;
if transition == gst::StateChange::NullToReady {
if let Ok(fd) = block_on(portal_main(settings.cursor_mode, settings.source_type)) {
self.src.set_property("fd", fd as i32);
} else {
return Err(gst::StateChangeError);
}
}
Ok(success)
}
}
impl BinImpl for XdpScreenCast {}

View file

@ -0,0 +1,25 @@
// Copyright (C) 2023 Ruben Gonzalez <rgonzalez@fluendo.com
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use gst::glib;
use gst::prelude::*;
mod imp;
glib::wrapper! {
pub struct XdpScreenCast(ObjectSubclass<imp::XdpScreenCast>) @extends gst::Bin, gst::Element, gst::Object;
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"xdgscreencapsrc",
gst::Rank::NONE,
XdpScreenCast::static_type(),
)
}

File diff suppressed because it is too large Load diff

View file

@ -30,7 +30,15 @@ enum MuxMethod {
}
glib::wrapper! {
pub struct TranscriberBin(ObjectSubclass<imp::TranscriberBin>) @extends gst::Bin, gst::Element, gst::Object;
pub struct TranscriberBin(ObjectSubclass<imp::TranscriberBin>) @extends gst::Bin, gst::Element, gst::Object, @implements gst::ChildProxy;
}
glib::wrapper! {
pub struct TranscriberSinkPad(ObjectSubclass<imp::TranscriberSinkPad>) @extends gst::GhostPad, gst::ProxyPad, gst::Pad, gst::Object;
}
glib::wrapper! {
pub struct TranscriberSrcPad(ObjectSubclass<imp::TranscriberSrcPad>) @extends gst::GhostPad, gst::ProxyPad, gst::Pad, gst::Object;
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
@ -38,6 +46,8 @@ pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
{
CaptionSource::static_type().mark_as_plugin_api(gst::PluginAPIFlags::empty());
MuxMethod::static_type().mark_as_plugin_api(gst::PluginAPIFlags::empty());
TranscriberSinkPad::static_type().mark_as_plugin_api(gst::PluginAPIFlags::empty());
TranscriberSrcPad::static_type().mark_as_plugin_api(gst::PluginAPIFlags::empty());
}
gst::Element::register(

View file

@ -134,7 +134,7 @@ impl Dav1dDec {
let matrix = match pic.matrix_coefficients() {
pixel::MatrixCoefficients::Identity => gst_video::VideoColorMatrix::Rgb,
pixel::MatrixCoefficients::BT709 => gst_video::VideoColorMatrix::Bt709,
pixel::MatrixCoefficients::Unspecified => gst_video::VideoColorMatrix::Unknown,
pixel::MatrixCoefficients::Unspecified => gst_video::VideoColorMatrix::Bt709,
pixel::MatrixCoefficients::BT470M => gst_video::VideoColorMatrix::Fcc,
pixel::MatrixCoefficients::BT470BG => gst_video::VideoColorMatrix::Bt601,
pixel::MatrixCoefficients::ST240M => gst_video::VideoColorMatrix::Smpte240m,
@ -149,7 +149,7 @@ impl Dav1dDec {
let transfer = match pic.transfer_characteristic() {
pixel::TransferCharacteristic::BT1886 => gst_video::VideoTransferFunction::Bt709,
pixel::TransferCharacteristic::Unspecified => gst_video::VideoTransferFunction::Unknown,
pixel::TransferCharacteristic::Unspecified => gst_video::VideoTransferFunction::Bt709,
pixel::TransferCharacteristic::BT470M => gst_video::VideoTransferFunction::Bt709,
pixel::TransferCharacteristic::BT470BG => gst_video::VideoTransferFunction::Gamma28,
pixel::TransferCharacteristic::ST170M => gst_video::VideoTransferFunction::Bt601,
@ -180,7 +180,7 @@ impl Dav1dDec {
let primaries = match pic.color_primaries() {
pixel::ColorPrimaries::BT709 => gst_video::VideoColorPrimaries::Bt709,
pixel::ColorPrimaries::Unspecified => gst_video::VideoColorPrimaries::Unknown,
pixel::ColorPrimaries::Unspecified => gst_video::VideoColorPrimaries::Bt709,
pixel::ColorPrimaries::BT470M => gst_video::VideoColorPrimaries::Bt470m,
pixel::ColorPrimaries::BT470BG => gst_video::VideoColorPrimaries::Bt470bg,
pixel::ColorPrimaries::ST240M => gst_video::VideoColorPrimaries::Smpte240m,

View file

@ -17,6 +17,7 @@ gst = { workspace = true, features = ["v1_16"] }
gst-base.workspace = true
gst-video.workspace = true
gst-gl = { workspace = true, features = ["v1_16"], optional = true }
gst-allocators = { workspace = true, features = ["v1_24"], optional = true }
gst-gl-wayland = { workspace = true, features = ["v1_16"], optional = true }
gst-gl-x11 = { workspace = true, features = ["v1_16"], optional = true }
@ -50,6 +51,7 @@ wayland = ["gtk/v4_6", "gdk-wayland", "gst-gl", "gst-gl-wayland"]
x11glx = ["gtk/v4_6", "gdk-x11", "gst-gl", "gst-gl-x11"]
x11egl = ["gtk/v4_6", "gdk-x11", "gst-gl", "gst-gl-egl"]
winegl = ["gdk-win32/egl", "gst-gl-egl"]
dmabuf = ["gst-allocators", "wayland", "gtk_v4_14", "gst-video/v1_24"]
capi = []
doc = ["gst/v1_18"]
gtk_v4_10 = ["gtk/v4_10"]

View file

@ -1,10 +1,20 @@
# Gtk 4 Sink & Paintable
# GTK 4 Sink & Paintable
GTK 4 provides `gtk::Video` & `gtk::Picture` for rendering media such as videos. As the default `gtk::Video` widget doesn't
offer the possibility to use a custom `gst::Pipeline`. The plugin provides a `gst_video::VideoSink` along with a `gdk::Paintable` that's capable of rendering the sink's frames.
The Sink can generate GL Textures if the system is capable of it, but it needs to be compiled
with either `wayland`, `x11glx` or `x11egl` cargo features.
The sink can generate GL Textures if the system is capable of it, but it needs
to be compiled with either `wayland`, `x11glx` or `x11egl` cargo features. On
Windows and macOS this is enabled by default.
Additionally, the sink can render DMABufs directly on Linux if GTK 4.14 or
newer is used. For this the `dmabuf` feature needs to be enabled.
Depending on the GTK version that is used and should be supported as minimum,
new features or more efficient processing can be opted in with the `gtk_v4_10`,
`gtk_v4_12` and `gtk_v4_14` features. The minimum GTK version required by the
sink is GTK 4.4 on Linux without GL support, and 4.6 on Windows and macOS, and
on Linux with GL support.
# Flatpak Integration
@ -44,7 +54,7 @@ To build and include the plugin in a Flatpak manifest, you can add the following
{
"type": "git",
"url": "https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs",
"branch": "0.10"
"branch": "0.12"
}
],
"build-options": {

View file

@ -6,13 +6,6 @@ use gtk::{gdk, gio, glib};
use std::cell::RefCell;
fn create_ui(app: &gtk::Application) {
let window = gtk::ApplicationWindow::new(app);
window.set_default_size(640, 480);
let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);
let picture = gtk::Picture::new();
let label = gtk::Label::new(Some("Position: 00:00:00"));
let pipeline = gst::Pipeline::new();
let overlay = gst::ElementFactory::make("clockoverlay")
@ -64,8 +57,26 @@ fn create_ui(app: &gtk::Application) {
src.link_filtered(&overlay, &caps).unwrap();
overlay.link(&sink).unwrap();
let window = gtk::ApplicationWindow::new(app);
window.set_default_size(640, 480);
let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);
let picture = gtk::Picture::new();
picture.set_paintable(Some(&paintable));
vbox.append(&picture);
#[cfg(feature = "gtk_v4_14")]
{
let offload = gtk::GraphicsOffload::new(Some(&picture));
offload.set_enabled(gtk::GraphicsOffloadEnabled::Enabled);
vbox.append(&offload);
}
#[cfg(not(feature = "gtk_v4_14"))]
{
vbox.append(&picture);
}
let label = gtk::Label::new(Some("Position: 00:00:00"));
vbox.append(&label);
window.set_child(Some(&vbox));

View file

@ -14,7 +14,61 @@ use gst_video::prelude::*;
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
use gst_gl::prelude::*;
use gtk::{gdk, glib};
use std::collections::{HashMap, HashSet};
use std::{
collections::{HashMap, HashSet},
ops,
};
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum VideoInfo {
VideoInfo(gst_video::VideoInfo),
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
DmaDrm(gst_video::VideoInfoDmaDrm),
}
impl From<gst_video::VideoInfo> for VideoInfo {
fn from(v: gst_video::VideoInfo) -> Self {
VideoInfo::VideoInfo(v)
}
}
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
impl From<gst_video::VideoInfoDmaDrm> for VideoInfo {
fn from(v: gst_video::VideoInfoDmaDrm) -> Self {
VideoInfo::DmaDrm(v)
}
}
impl ops::Deref for VideoInfo {
type Target = gst_video::VideoInfo;
fn deref(&self) -> &Self::Target {
match self {
VideoInfo::VideoInfo(info) => info,
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
VideoInfo::DmaDrm(info) => info,
}
}
}
impl VideoInfo {
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
fn dma_drm(&self) -> Option<&gst_video::VideoInfoDmaDrm> {
match self {
VideoInfo::VideoInfo(..) => None,
VideoInfo::DmaDrm(info) => Some(info),
}
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
pub enum TextureCacheId {
Memory(usize),
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
GL(usize),
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
DmaBuf([i32; 4]),
}
#[derive(Debug)]
enum MappedFrame {
@ -24,6 +78,17 @@ enum MappedFrame {
frame: gst_gl::GLVideoFrame<gst_gl::gl_video_frame::Readable>,
wrapped_context: gst_gl::GLContext,
},
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
DmaBuf {
buffer: gst::Buffer,
info: gst_video::VideoInfoDmaDrm,
n_planes: u32,
fds: [i32; 4],
offsets: [usize; 4],
strides: [usize; 4],
width: u32,
height: u32,
},
}
impl MappedFrame {
@ -32,6 +97,8 @@ impl MappedFrame {
MappedFrame::SysMem(frame) => frame.buffer(),
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
MappedFrame::GL { frame, .. } => frame.buffer(),
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
MappedFrame::DmaBuf { buffer, .. } => buffer,
}
}
@ -40,6 +107,8 @@ impl MappedFrame {
MappedFrame::SysMem(frame) => frame.width(),
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
MappedFrame::GL { frame, .. } => frame.width(),
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
MappedFrame::DmaBuf { info, .. } => info.width(),
}
}
@ -48,6 +117,8 @@ impl MappedFrame {
MappedFrame::SysMem(frame) => frame.height(),
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
MappedFrame::GL { frame, .. } => frame.height(),
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
MappedFrame::DmaBuf { info, .. } => info.height(),
}
}
@ -56,6 +127,8 @@ impl MappedFrame {
MappedFrame::SysMem(frame) => frame.format_info(),
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
MappedFrame::GL { frame, .. } => frame.format_info(),
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
MappedFrame::DmaBuf { info, .. } => info.format_info(),
}
}
}
@ -108,16 +181,16 @@ fn video_format_to_memory_format(f: gst_video::VideoFormat) -> gdk::MemoryFormat
fn video_frame_to_memory_texture(
frame: gst_video::VideoFrame<gst_video::video_frame::Readable>,
cached_textures: &mut HashMap<usize, gdk::Texture>,
used_textures: &mut HashSet<usize>,
cached_textures: &mut HashMap<TextureCacheId, gdk::Texture>,
used_textures: &mut HashSet<TextureCacheId>,
) -> (gdk::Texture, f64) {
let texture_id = frame.plane_data(0).unwrap().as_ptr() as usize;
let ptr = frame.plane_data(0).unwrap().as_ptr() as usize;
let pixel_aspect_ratio =
(frame.info().par().numer() as f64) / (frame.info().par().denom() as f64);
if let Some(texture) = cached_textures.get(&texture_id) {
used_textures.insert(texture_id);
if let Some(texture) = cached_textures.get(&TextureCacheId::Memory(ptr)) {
used_textures.insert(TextureCacheId::Memory(ptr));
return (texture.clone(), pixel_aspect_ratio);
}
@ -135,8 +208,8 @@ fn video_frame_to_memory_texture(
)
.upcast::<gdk::Texture>();
cached_textures.insert(texture_id, texture.clone());
used_textures.insert(texture_id);
cached_textures.insert(TextureCacheId::Memory(ptr), texture.clone());
used_textures.insert(TextureCacheId::Memory(ptr));
(texture, pixel_aspect_ratio)
}
@ -144,8 +217,8 @@ fn video_frame_to_memory_texture(
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
fn video_frame_to_gl_texture(
frame: gst_gl::GLVideoFrame<gst_gl::gl_video_frame::Readable>,
cached_textures: &mut HashMap<usize, gdk::Texture>,
used_textures: &mut HashSet<usize>,
cached_textures: &mut HashMap<TextureCacheId, gdk::Texture>,
used_textures: &mut HashSet<TextureCacheId>,
gdk_context: &gdk::GLContext,
#[allow(unused)] wrapped_context: &gst_gl::GLContext,
) -> (gdk::Texture, f64) {
@ -154,8 +227,8 @@ fn video_frame_to_gl_texture(
let pixel_aspect_ratio =
(frame.info().par().numer() as f64) / (frame.info().par().denom() as f64);
if let Some(texture) = cached_textures.get(&(texture_id)) {
used_textures.insert(texture_id);
if let Some(texture) = cached_textures.get(&TextureCacheId::GL(texture_id)) {
used_textures.insert(TextureCacheId::GL(texture_id));
return (texture.clone(), pixel_aspect_ratio);
}
@ -237,18 +310,64 @@ fn video_frame_to_gl_texture(
.upcast::<gdk::Texture>()
};
cached_textures.insert(texture_id, texture.clone());
used_textures.insert(texture_id);
cached_textures.insert(TextureCacheId::GL(texture_id), texture.clone());
used_textures.insert(TextureCacheId::GL(texture_id));
(texture, pixel_aspect_ratio)
}
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
#[allow(clippy::too_many_arguments)]
fn video_frame_to_dmabuf_texture(
buffer: gst::Buffer,
cached_textures: &mut HashMap<TextureCacheId, gdk::Texture>,
used_textures: &mut HashSet<TextureCacheId>,
info: &gst_video::VideoInfoDmaDrm,
n_planes: u32,
fds: &[i32; 4],
offsets: &[usize; 4],
strides: &[usize; 4],
width: u32,
height: u32,
) -> Result<(gdk::Texture, f64), glib::Error> {
let pixel_aspect_ratio = (info.par().numer() as f64) / (info.par().denom() as f64);
if let Some(texture) = cached_textures.get(&TextureCacheId::DmaBuf(*fds)) {
used_textures.insert(TextureCacheId::DmaBuf(*fds));
return Ok((texture.clone(), pixel_aspect_ratio));
}
let builder = gdk::DmabufTextureBuilder::new();
builder.set_display(&gdk::Display::default().unwrap());
builder.set_fourcc(info.fourcc());
builder.set_modifier(info.modifier());
builder.set_width(width);
builder.set_height(height);
builder.set_n_planes(n_planes);
for plane in 0..(n_planes as usize) {
builder.set_fd(plane as u32, fds[plane]);
builder.set_offset(plane as u32, offsets[plane] as u32);
builder.set_stride(plane as u32, strides[plane] as u32);
}
let texture = unsafe {
builder.build_with_release_func(move || {
drop(buffer);
})?
};
cached_textures.insert(TextureCacheId::DmaBuf(*fds), texture.clone());
used_textures.insert(TextureCacheId::DmaBuf(*fds));
Ok((texture, pixel_aspect_ratio))
}
impl Frame {
pub(crate) fn into_textures(
self,
#[allow(unused_variables)] gdk_context: Option<&gdk::GLContext>,
cached_textures: &mut HashMap<usize, gdk::Texture>,
) -> Vec<Texture> {
cached_textures: &mut HashMap<TextureCacheId, gdk::Texture>,
) -> Result<Vec<Texture>, glib::Error> {
let mut textures = Vec::with_capacity(1 + self.overlays.len());
let mut used_textures = HashSet::with_capacity(1 + self.overlays.len());
@ -278,6 +397,28 @@ impl Frame {
&wrapped_context,
)
}
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
MappedFrame::DmaBuf {
buffer,
info,
n_planes,
fds,
offsets,
strides,
width,
height,
} => video_frame_to_dmabuf_texture(
buffer,
cached_textures,
&mut used_textures,
&info,
n_planes,
&fds,
&offsets,
&strides,
width,
height,
)?,
};
textures.push(Texture {
@ -309,14 +450,14 @@ impl Frame {
// Remove textures that were not used this time
cached_textures.retain(|id, _| used_textures.contains(id));
textures
Ok(textures)
}
}
impl Frame {
pub(crate) fn new(
buffer: &gst::Buffer,
info: &gst_video::VideoInfo,
info: &VideoInfo,
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))] wrapped_context: Option<
&gst_gl::GLContext,
>,
@ -327,77 +468,125 @@ impl Frame {
// Empty buffers get filtered out in show_frame
debug_assert!(buffer.n_memory() > 0);
let mut frame;
#[allow(unused_mut)]
let mut frame = None;
#[cfg(not(any(target_os = "macos", target_os = "windows", feature = "gst-gl")))]
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
{
frame = Self {
frame: MappedFrame::SysMem(
// Check we received a buffer with dmabuf memory and if so do some checks before
// passing it onwards
if frame.is_none()
&& buffer
.peek_memory(0)
.is_memory_type::<gst_allocators::DmaBufMemory>()
{
if let Some((vmeta, info)) =
Option::zip(buffer.meta::<gst_video::VideoMeta>(), info.dma_drm())
{
let mut fds = [-1i32; 4];
let mut offsets = [0; 4];
let mut strides = [0; 4];
let n_planes = vmeta.n_planes() as usize;
let vmeta_offsets = vmeta.offset();
let vmeta_strides = vmeta.stride();
for plane in 0..n_planes {
let Some((range, skip)) =
buffer.find_memory(vmeta_offsets[plane]..(vmeta_offsets[plane] + 1))
else {
break;
};
let mem = buffer.peek_memory(range.start);
let Some(mem) = mem.downcast_memory_ref::<gst_allocators::DmaBufMemory>()
else {
break;
};
let fd = mem.fd();
fds[plane] = fd;
offsets[plane] = mem.offset() + skip;
strides[plane] = vmeta_strides[plane] as usize;
}
// All fds valid?
if fds[0..n_planes].iter().all(|fd| *fd != -1) {
frame = Some(MappedFrame::DmaBuf {
buffer: buffer.clone(),
info: info.clone(),
n_planes: n_planes as u32,
fds,
offsets,
strides,
width: vmeta.width(),
height: vmeta.height(),
});
}
}
}
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
{
if frame.is_none() {
// Check we received a buffer with GL memory and if the context of that memory
// can share with the wrapped context around the GDK GL context.
//
// If not it has to be uploaded to the GPU.
let memory_ctx = buffer
.peek_memory(0)
.downcast_memory_ref::<gst_gl::GLBaseMemory>()
.and_then(|m| {
let ctx = m.context();
if wrapped_context
.map_or(false, |wrapped_context| wrapped_context.can_share(ctx))
{
Some(ctx)
} else {
None
}
});
if let Some(memory_ctx) = memory_ctx {
// If there is no GLSyncMeta yet then we need to add one here now, which requires
// obtaining a writable buffer.
let mapped_frame = if buffer.meta::<gst_gl::GLSyncMeta>().is_some() {
gst_gl::GLVideoFrame::from_buffer_readable(buffer.clone(), info)
.map_err(|_| gst::FlowError::Error)?
} else {
let mut buffer = buffer.clone();
{
let buffer = buffer.make_mut();
gst_gl::GLSyncMeta::add(buffer, memory_ctx);
}
gst_gl::GLVideoFrame::from_buffer_readable(buffer, info)
.map_err(|_| gst::FlowError::Error)?
};
// Now that it's guaranteed that there is a sync meta and the frame is mapped, set
// a sync point so we can ensure that the texture is ready later when making use of
// it as gdk::GLTexture.
let meta = mapped_frame.buffer().meta::<gst_gl::GLSyncMeta>().unwrap();
meta.set_sync_point(memory_ctx);
frame = Some(MappedFrame::GL {
frame: mapped_frame,
wrapped_context: wrapped_context.unwrap().clone(),
});
}
}
}
}
let mut frame = Self {
frame: match frame {
Some(frame) => frame,
None => MappedFrame::SysMem(
gst_video::VideoFrame::from_buffer_readable(buffer.clone(), info)
.map_err(|_| gst::FlowError::Error)?,
),
overlays: vec![],
};
}
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
{
// Check we received a buffer with GL memory and if the context of that memory
// can share with the wrapped context around the GDK GL context.
//
// If not it has to be uploaded to the GPU.
let memory_ctx = buffer
.peek_memory(0)
.downcast_memory_ref::<gst_gl::GLBaseMemory>()
.and_then(|m| {
let ctx = m.context();
if wrapped_context
.map_or(false, |wrapped_context| wrapped_context.can_share(ctx))
{
Some(ctx)
} else {
None
}
});
if let Some(memory_ctx) = memory_ctx {
// If there is no GLSyncMeta yet then we need to add one here now, which requires
// obtaining a writable buffer.
let mapped_frame = if buffer.meta::<gst_gl::GLSyncMeta>().is_some() {
gst_gl::GLVideoFrame::from_buffer_readable(buffer.clone(), info)
.map_err(|_| gst::FlowError::Error)?
} else {
let mut buffer = buffer.clone();
{
let buffer = buffer.make_mut();
gst_gl::GLSyncMeta::add(buffer, memory_ctx);
}
gst_gl::GLVideoFrame::from_buffer_readable(buffer, info)
.map_err(|_| gst::FlowError::Error)?
};
// Now that it's guaranteed that there is a sync meta and the frame is mapped, set
// a sync point so we can ensure that the texture is ready later when making use of
// it as gdk::GLTexture.
let meta = mapped_frame.buffer().meta::<gst_gl::GLSyncMeta>().unwrap();
meta.set_sync_point(memory_ctx);
frame = Self {
frame: MappedFrame::GL {
frame: mapped_frame,
wrapped_context: wrapped_context.unwrap().clone(),
},
overlays: vec![],
};
} else {
frame = Self {
frame: MappedFrame::SysMem(
gst_video::VideoFrame::from_buffer_readable(buffer.clone(), info)
.map_err(|_| gst::FlowError::Error)?,
),
overlays: vec![],
};
}
}
},
overlays: vec![],
};
frame.overlays = frame
.frame

View file

@ -1,7 +1,7 @@
//
// Copyright (C) 2021 Bilal Elmoussaoui <bil.elmoussaoui@gmail.com>
// Copyright (C) 2021 Jordan Petridis <jordan@centricular.com>
// Copyright (C) 2021 Sebastian Dröge <sebastian@centricular.com>
// Copyright (C) 2021-2024 Sebastian Dröge <sebastian@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
@ -62,7 +62,7 @@ pub(crate) static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
pub struct PaintableSink {
paintable: Mutex<Option<ThreadGuard<Paintable>>>,
window: Mutex<Option<ThreadGuard<gtk::Window>>>,
info: Mutex<Option<gst_video::VideoInfo>>,
info: Mutex<Option<super::frame::VideoInfo>>,
sender: Mutex<Option<async_channel::Sender<SinkEvent>>>,
pending_frame: Mutex<Option<Frame>>,
cached_caps: Mutex<Option<gst::Caps>>,
@ -82,6 +82,7 @@ impl ObjectSubclass for PaintableSink {
const NAME: &'static str = "GstGtk4PaintableSink";
type Type = super::PaintableSink;
type ParentType = gst_video::VideoSink;
type Interfaces = (gst::ChildProxy,);
}
impl ObjectImpl for PaintableSink {
@ -110,12 +111,14 @@ impl ObjectImpl for PaintableSink {
return None::<&gdk::Paintable>.to_value();
}
let mut paintable = self.paintable.lock().unwrap();
if paintable.is_none() {
self.create_paintable(&mut paintable);
let mut paintable_guard = self.paintable.lock().unwrap();
let mut created = false;
if paintable_guard.is_none() {
created = true;
self.create_paintable(&mut paintable_guard);
}
let paintable = match &*paintable {
let paintable = match &*paintable_guard {
Some(ref paintable) => paintable,
None => {
gst::error!(CAT, imp: self, "Failed to create paintable");
@ -124,16 +127,31 @@ impl ObjectImpl for PaintableSink {
};
// Getter must be called from the main thread
if paintable.is_owner() {
paintable.get_ref().to_value()
} else {
if !paintable.is_owner() {
gst::error!(
CAT,
imp: self,
"Can't retrieve Paintable from non-main thread"
);
None::<&gdk::Paintable>.to_value()
return None::<&gdk::Paintable>.to_value();
}
let paintable = paintable.get_ref().clone();
drop(paintable_guard);
if created {
let self_ = self.to_owned();
glib::MainContext::default().invoke(move || {
let paintable_guard = self_.paintable.lock().unwrap();
if let Some(paintable) = &*paintable_guard {
let paintable_clone = paintable.get_ref().clone();
drop(paintable_guard);
self_.obj().child_added(&paintable_clone, "paintable");
}
});
}
paintable.to_value()
}
_ => unimplemented!(),
}
@ -163,53 +181,99 @@ impl ElementImpl for PaintableSink {
{
let caps = caps.get_mut().unwrap();
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
{
for features in [
[
gst_allocators::CAPS_FEATURE_MEMORY_DMABUF,
gst_video::CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
]
.as_slice(),
[gst_allocators::CAPS_FEATURE_MEMORY_DMABUF].as_slice(),
] {
let c = gst_video::VideoCapsBuilder::new()
.format(gst_video::VideoFormat::DmaDrm)
.features(features.iter().copied())
.build();
caps.append(c);
}
}
for features in [
None,
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
Some(gst::CapsFeatures::new([
"memory:GLMemory",
"meta:GstVideoOverlayComposition",
gst_gl::CAPS_FEATURE_MEMORY_GL_MEMORY,
gst_video::CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
])),
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
Some(gst::CapsFeatures::new(["memory:GLMemory"])),
Some(gst::CapsFeatures::new([
gst_gl::CAPS_FEATURE_MEMORY_GL_MEMORY,
])),
Some(gst::CapsFeatures::new([
"memory:SystemMemory",
"meta:GstVideoOverlayComposition",
gst_video::CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
])),
Some(gst::CapsFeatures::new(["meta:GstVideoOverlayComposition"])),
Some(gst::CapsFeatures::new([
gst_video::CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
])),
None,
] {
const GL_FORMATS: &[gst_video::VideoFormat] =
&[gst_video::VideoFormat::Rgba, gst_video::VideoFormat::Rgb];
const NON_GL_FORMATS: &[gst_video::VideoFormat] = &[
gst_video::VideoFormat::Bgra,
gst_video::VideoFormat::Argb,
gst_video::VideoFormat::Rgba,
gst_video::VideoFormat::Abgr,
gst_video::VideoFormat::Rgb,
gst_video::VideoFormat::Bgr,
];
let formats = if features
.as_ref()
.is_some_and(|features| features.contains("memory:GLMemory"))
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
{
GL_FORMATS
} else {
NON_GL_FORMATS
};
const GL_FORMATS: &[gst_video::VideoFormat] =
&[gst_video::VideoFormat::Rgba, gst_video::VideoFormat::Rgb];
const NON_GL_FORMATS: &[gst_video::VideoFormat] = &[
gst_video::VideoFormat::Bgra,
gst_video::VideoFormat::Argb,
gst_video::VideoFormat::Rgba,
gst_video::VideoFormat::Abgr,
gst_video::VideoFormat::Rgb,
gst_video::VideoFormat::Bgr,
];
let mut c = gst_video::video_make_raw_caps(formats).build();
let formats = if features.as_ref().is_some_and(|features| {
features.contains(gst_gl::CAPS_FEATURE_MEMORY_GL_MEMORY)
}) {
GL_FORMATS
} else {
NON_GL_FORMATS
};
if let Some(features) = features {
let c = c.get_mut().unwrap();
let mut c = gst_video::video_make_raw_caps(formats).build();
if features.contains("memory:GLMemory") {
c.set("texture-target", "2D")
if let Some(features) = features {
let c = c.get_mut().unwrap();
if features.contains(gst_gl::CAPS_FEATURE_MEMORY_GL_MEMORY) {
c.set("texture-target", "2D")
}
c.set_features_simple(Some(features));
}
c.set_features_simple(Some(features));
caps.append(c);
}
#[cfg(not(any(
target_os = "macos",
target_os = "windows",
feature = "gst-gl"
)))]
{
const FORMATS: &[gst_video::VideoFormat] = &[
gst_video::VideoFormat::Bgra,
gst_video::VideoFormat::Argb,
gst_video::VideoFormat::Rgba,
gst_video::VideoFormat::Abgr,
gst_video::VideoFormat::Rgb,
gst_video::VideoFormat::Bgr,
];
caps.append(c);
let mut c = gst_video::video_make_raw_caps(FORMATS).build();
if let Some(features) = features {
let c = c.get_mut().unwrap();
c.set_features_simple(Some(features));
}
caps.append(c);
}
}
}
@ -244,18 +308,31 @@ impl ElementImpl for PaintableSink {
}
}
let mut paintable = self.paintable.lock().unwrap();
if paintable.is_none() {
self.create_paintable(&mut paintable);
let mut paintable_guard = self.paintable.lock().unwrap();
let mut created = false;
if paintable_guard.is_none() {
created = true;
self.create_paintable(&mut paintable_guard);
}
if paintable.is_none() {
if paintable_guard.is_none() {
gst::error!(CAT, imp: self, "Failed to create paintable");
return Err(gst::StateChangeError);
}
drop(paintable);
drop(paintable_guard);
if created {
let self_ = self.to_owned();
glib::MainContext::default().invoke(move || {
let paintable_guard = self_.paintable.lock().unwrap();
if let Some(paintable) = &*paintable_guard {
let paintable_clone = paintable.get_ref().clone();
drop(paintable_guard);
self_.obj().child_added(&paintable_clone, "paintable");
}
});
}
// Notify the pipeline about the GL display and wrapped context so that any other
// elements in the pipeline ideally use the same / create GL contexts that are
@ -361,8 +438,21 @@ impl BaseSinkImpl for PaintableSink {
fn set_caps(&self, caps: &gst::Caps) -> Result<(), gst::LoggableError> {
gst::debug!(CAT, imp: self, "Setting caps {caps:?}");
let video_info = gst_video::VideoInfo::from_caps(caps)
.map_err(|_| gst::loggable_error!(CAT, "Invalid caps"))?;
#[allow(unused_mut)]
let mut video_info = None;
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
{
if let Ok(info) = gst_video::VideoInfoDmaDrm::from_caps(caps) {
video_info = Some(info.into());
}
}
let video_info = match video_info {
Some(info) => info,
None => gst_video::VideoInfo::from_caps(caps)
.map_err(|_| gst::loggable_error!(CAT, "Invalid caps"))?
.into(),
};
self.info.lock().unwrap().replace(video_info);
@ -516,10 +606,11 @@ impl PaintableSink {
match action {
SinkEvent::FrameChanged => {
let Some(frame) = self.pending_frame() else {
return glib::ControlFlow::Continue;
};
gst::trace!(CAT, imp: self, "Frame changed");
paintable
.get_ref()
.handle_frame_changed(self.pending_frame())
paintable.get_ref().handle_frame_changed(&self.obj(), frame);
}
}
@ -530,13 +621,59 @@ impl PaintableSink {
#[allow(unused_mut)]
let mut tmp_caps = Self::pad_templates()[0].caps().clone();
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
{
let formats = utils::invoke_on_main_thread(move || {
let Some(display) = gdk::Display::default() else {
return vec![];
};
let dmabuf_formats = display.dmabuf_formats();
let mut formats = vec![];
let n_formats = dmabuf_formats.n_formats();
for i in 0..n_formats {
let (fourcc, modifier) = dmabuf_formats.format(i);
if fourcc == 0 || modifier == (u64::MAX >> 8) {
continue;
}
formats.push(gst_video::dma_drm_fourcc_to_string(fourcc, modifier));
}
formats
});
if formats.is_empty() {
// Filter out dmabufs caps from the template pads if we have no supported formats
if !matches!(&*GL_CONTEXT.lock().unwrap(), GLContext::Initialized { .. }) {
tmp_caps = tmp_caps
.iter_with_features()
.filter(|(_, features)| {
!features.contains(gst_allocators::CAPS_FEATURE_MEMORY_DMABUF)
})
.map(|(s, c)| (s.to_owned(), c.to_owned()))
.collect::<gst::Caps>();
}
} else {
let tmp_caps = tmp_caps.make_mut();
for (s, f) in tmp_caps.iter_with_features_mut() {
if f.contains(gst_allocators::CAPS_FEATURE_MEMORY_DMABUF) {
s.set("drm-format", gst::List::new(&formats));
}
}
}
}
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
{
// Filter out GL caps from the template pads if we have no context
if !matches!(&*GL_CONTEXT.lock().unwrap(), GLContext::Initialized { .. }) {
tmp_caps = tmp_caps
.iter_with_features()
.filter(|(_, features)| !features.contains("memory:GLMemory"))
.filter(|(_, features)| {
!features.contains(gst_gl::CAPS_FEATURE_MEMORY_GL_MEMORY)
})
.map(|(s, c)| (s.to_owned(), c.to_owned()))
.collect::<gst::Caps>();
}
@ -564,7 +701,17 @@ impl PaintableSink {
let window = gtk::Window::new();
let picture = gtk::Picture::new();
picture.set_paintable(Some(&paintable));
window.set_child(Some(&picture));
#[cfg(feature = "gtk_v4_14")]
{
let offload = gtk::GraphicsOffload::new(Some(&picture));
offload.set_enabled(gtk::GraphicsOffloadEnabled::Enabled);
window.set_child(Some(&offload));
}
#[cfg(not(feature = "gtk_v4_14"))]
{
window.set_child(Some(&picture));
}
window.set_default_size(640, 480);
window.connect_close_request({
@ -1073,3 +1220,33 @@ impl PaintableSink {
}
}
}
impl ChildProxyImpl for PaintableSink {
fn child_by_index(&self, index: u32) -> Option<glib::Object> {
if index != 0 {
return None;
}
let paintable = self.paintable.lock().unwrap();
paintable
.as_ref()
.filter(|p| p.is_owner())
.map(|p| p.get_ref().upcast_ref::<glib::Object>().clone())
}
fn child_by_name(&self, name: &str) -> Option<glib::Object> {
if name == "paintable" {
return self.child_by_index(0);
}
None
}
fn children_count(&self) -> u32 {
let paintable = self.paintable.lock().unwrap();
if paintable.is_some() {
1
} else {
0
}
}
}

View file

@ -22,7 +22,8 @@ enum SinkEvent {
glib::wrapper! {
pub struct PaintableSink(ObjectSubclass<imp::PaintableSink>)
@extends gst_video::VideoSink, gst_base::BaseSink, gst::Element, gst::Object;
@extends gst_video::VideoSink, gst_base::BaseSink, gst::Element, gst::Object,
@implements gst::ChildProxy;
}
impl PaintableSink {

View file

@ -31,12 +31,13 @@ static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
#[derive(Debug)]
pub struct Paintable {
paintables: RefCell<Vec<Texture>>,
cached_textures: RefCell<HashMap<usize, gdk::Texture>>,
cached_textures: RefCell<HashMap<super::super::frame::TextureCacheId, gdk::Texture>>,
gl_context: RefCell<Option<gdk::GLContext>>,
background_color: Cell<gdk::RGBA>,
#[cfg(feature = "gtk_v4_10")]
scaling_filter: Cell<gsk::ScalingFilter>,
use_scaling_filter: Cell<bool>,
force_aspect_ratio: Cell<bool>,
#[cfg(not(feature = "gtk_v4_10"))]
premult_shader: gsk::GLShader,
}
@ -51,6 +52,7 @@ impl Default for Paintable {
#[cfg(feature = "gtk_v4_10")]
scaling_filter: Cell::new(gsk::ScalingFilter::Linear),
use_scaling_filter: Cell::new(false),
force_aspect_ratio: Cell::new(false),
#[cfg(not(feature = "gtk_v4_10"))]
premult_shader: gsk::GLShader::from_bytes(&glib::Bytes::from_static(include_bytes!(
"premult.glsl"
@ -94,6 +96,11 @@ impl ObjectImpl for Paintable {
.blurb("Use selected scaling filter or GTK default for rendering")
.default_value(false)
.build(),
glib::ParamSpecBoolean::builder("force-aspect-ratio")
.nick("Force Aspect Ratio")
.blurb("When enabled, scaling will respect original aspect ratio")
.default_value(true)
.build(),
]
});
@ -117,6 +124,7 @@ impl ObjectImpl for Paintable {
"scaling-filter" => self.scaling_filter.get().to_value(),
#[cfg(feature = "gtk_v4_10")]
"use-scaling-filter" => self.use_scaling_filter.get().to_value(),
"force-aspect-ratio" => self.force_aspect_ratio.get().to_value(),
_ => unimplemented!(),
}
}
@ -139,6 +147,7 @@ impl ObjectImpl for Paintable {
"scaling-filter" => self.scaling_filter.set(value.get().unwrap()),
#[cfg(feature = "gtk_v4_10")]
"use-scaling-filter" => self.use_scaling_filter.set(value.get().unwrap()),
"force-aspect-ratio" => self.force_aspect_ratio.set(value.get().unwrap()),
_ => unimplemented!(),
}
}
@ -173,40 +182,66 @@ impl PaintableImpl for Paintable {
let snapshot = snapshot.downcast_ref::<gtk::Snapshot>().unwrap();
let background_color = self.background_color.get();
let force_aspect_ratio = self.force_aspect_ratio.get();
let paintables = self.paintables.borrow();
if !paintables.is_empty() {
gst::trace!(CAT, imp: self, "Snapshotting frame");
let (frame_width, frame_height) =
paintables.first().map(|p| (p.width, p.height)).unwrap();
let mut scale_x = width / frame_width as f64;
let mut scale_y = height / frame_height as f64;
let mut trans_x = 0.0;
let mut trans_y = 0.0;
// TODO: Property for keeping aspect ratio or not
if (scale_x - scale_y).abs() > f64::EPSILON {
if scale_x > scale_y {
trans_x =
((frame_width as f64 * scale_x) - (frame_width as f64 * scale_y)) / 2.0;
scale_x = scale_y;
} else {
trans_y =
((frame_height as f64 * scale_y) - (frame_height as f64 * scale_x)) / 2.0;
scale_y = scale_x;
}
}
let Some(first_paintable) = paintables.first() else {
gst::trace!(CAT, imp: self, "Snapshotting black frame");
snapshot.append_color(
&background_color,
&graphene::Rect::new(0f32, 0f32, width as f32, height as f32),
);
snapshot.translate(&graphene::Point::new(trans_x as f32, trans_y as f32));
return;
};
for Texture {
gst::trace!(CAT, imp: self, "Snapshotting frame");
// The first paintable is the actual video frame and defines the overall size.
//
// Based on its size relative to the snapshot width/height, all other paintables are
// scaled accordingly.
let (frame_width, frame_height) = (first_paintable.width, first_paintable.height);
let mut scale_x = width / frame_width as f64;
let mut scale_y = height / frame_height as f64;
// Usually the caller makes sure that the aspect ratio is preserved. To enforce this here
// optionally, we scale the frame equally in both directions and center it. In addition the
// background color is drawn behind the frame to fill the gaps.
//
// This is not done by default for performance reasons and usually would draw a <1px
// background.
if force_aspect_ratio {
let mut trans_x = 0.0;
let mut trans_y = 0.0;
if (scale_x - scale_y).abs() > f64::EPSILON {
if scale_x > scale_y {
trans_x = (width - (frame_width as f64 * scale_y)) / 2.0;
scale_x = scale_y;
} else {
trans_y = (height - (frame_height as f64 * scale_x)) / 2.0;
scale_y = scale_x;
}
}
if !background_color.is_clear() && (trans_x > f64::EPSILON || trans_y > f64::EPSILON) {
snapshot.append_color(
&background_color,
&graphene::Rect::new(0f32, 0f32, width as f32, height as f32),
);
}
snapshot.translate(&graphene::Point::new(trans_x as f32, trans_y as f32));
}
// Make immutable
let scale_x = scale_x;
let scale_y = scale_y;
for (
idx,
Texture {
texture,
x,
y,
@ -214,151 +249,159 @@ impl PaintableImpl for Paintable {
height: paintable_height,
global_alpha,
has_alpha,
} in &*paintables
},
) in paintables.iter().enumerate()
{
snapshot.push_opacity(*global_alpha as f64);
let bounds = if !force_aspect_ratio && idx == 0 {
// While this should end up with width again, be explicit in this case to avoid
// rounding errors and fill the whole area with the video frame.
graphene::Rect::new(0.0, 0.0, width as f32, height as f32)
} else {
// Scale texture position and size with the same scale factor as the main video
// frame, and make sure to not render outside (0, 0, width, height).
let x = f32::clamp(*x * scale_x as f32, 0.0, width as f32);
let y = f32::clamp(*y * scale_y as f32, 0.0, height as f32);
let texture_width = f32::min(*paintable_width * scale_x as f32, width as f32);
let texture_height = f32::min(*paintable_height * scale_y as f32, height as f32);
graphene::Rect::new(x, y, texture_width, texture_height)
};
// Only premultiply GL textures that expect to be in premultiplied RGBA format.
//
// For GTK 4.14 or newer we use the correct format directly when building the
// texture, but only if a GLES3+ context is used. In that case the NGL renderer is
// used by GTK, which supports non-premultiplied formats correctly and fast.
//
// For GTK 4.10-4.12, or 4.14 and newer if a GLES2 context is used, we use a
// self-mask to pre-multiply the alpha.
//
// For GTK before 4.10, we use a GL shader and hope that it works.
#[cfg(feature = "gtk_v4_10")]
{
snapshot.push_opacity(*global_alpha as f64);
let texture_width = *paintable_width * scale_x as f32;
let texture_height = *paintable_height * scale_y as f32;
let x = *x * scale_x as f32;
let y = *y * scale_y as f32;
let bounds = graphene::Rect::new(x, y, texture_width, texture_height);
// Only premultiply GL textures that expect to be in premultiplied RGBA format.
//
// For GTK 4.14 or newer we use the correct format directly when building the
// texture, but only if a GLES3+ context is used. In that case the NGL renderer is
// used by GTK, which supports non-premultiplied formats correctly and fast.
//
// For GTK 4.10-4.12, or 4.14 and newer if a GLES2 context is used, we use a
// self-mask to pre-multiply the alpha.
//
// For GTK before 4.10, we use a GL shader and hope that it works.
#[cfg(feature = "gtk_v4_10")]
{
let context_requires_premult = {
#[cfg(feature = "gtk_v4_14")]
{
self.gl_context.borrow().as_ref().map_or(false, |context| {
context.api() != gdk::GLAPI::GLES || context.version().0 < 3
})
}
#[cfg(not(feature = "gtk_v4_14"))]
{
true
}
};
let do_premult =
context_requires_premult && texture.is::<gdk::GLTexture>() && *has_alpha;
if do_premult {
snapshot.push_mask(gsk::MaskMode::Alpha);
if self.use_scaling_filter.get() {
#[cfg(feature = "gtk_v4_10")]
snapshot.append_scaled_texture(
texture,
self.scaling_filter.get(),
&bounds,
);
} else {
snapshot.append_texture(texture, &bounds);
}
snapshot.pop(); // pop mask
// color matrix to set alpha of the source to 1.0 as it was
// already applied via the mask just above.
snapshot.push_color_matrix(
&graphene::Matrix::from_float({
[
1.0, 0.0, 0.0, 0.0, //
0.0, 1.0, 0.0, 0.0, //
0.0, 0.0, 1.0, 0.0, //
0.0, 0.0, 0.0, 0.0,
]
}),
&graphene::Vec4::new(0.0, 0.0, 0.0, 1.0),
);
let context_requires_premult = {
#[cfg(feature = "gtk_v4_14")]
{
self.gl_context.borrow().as_ref().map_or(false, |context| {
context.api() != gdk::GLAPI::GLES || context.version().0 < 3
})
}
#[cfg(not(feature = "gtk_v4_14"))]
{
true
}
};
let do_premult =
context_requires_premult && texture.is::<gdk::GLTexture>() && *has_alpha;
if do_premult {
snapshot.push_mask(gsk::MaskMode::Alpha);
if self.use_scaling_filter.get() {
#[cfg(feature = "gtk_v4_10")]
snapshot.append_scaled_texture(texture, self.scaling_filter.get(), &bounds);
} else {
snapshot.append_texture(texture, &bounds);
}
snapshot.pop(); // pop mask
if do_premult {
snapshot.pop(); // pop color matrix
snapshot.pop(); // pop mask 2
}
}
#[cfg(not(feature = "gtk_v4_10"))]
{
let do_premult =
texture.is::<gdk::GLTexture>() && *has_alpha && gtk::micro_version() < 13;
if do_premult {
snapshot.push_gl_shader(
&self.premult_shader,
&bounds,
gsk::ShaderArgsBuilder::new(&self.premult_shader, None).to_args(),
);
}
if self.use_scaling_filter.get() {
#[cfg(feature = "gtk_v4_10")]
snapshot.append_scaled_texture(texture, self.scaling_filter.get(), &bounds);
} else {
snapshot.append_texture(texture, &bounds);
}
if do_premult {
snapshot.gl_shader_pop_texture(); // pop texture appended above from the shader
snapshot.pop(); // pop shader
}
// color matrix to set alpha of the source to 1.0 as it was
// already applied via the mask just above.
snapshot.push_color_matrix(
&graphene::Matrix::from_float({
[
1.0, 0.0, 0.0, 0.0, //
0.0, 1.0, 0.0, 0.0, //
0.0, 0.0, 1.0, 0.0, //
0.0, 0.0, 0.0, 0.0,
]
}),
&graphene::Vec4::new(0.0, 0.0, 0.0, 1.0),
);
}
snapshot.pop(); // pop opacity
if self.use_scaling_filter.get() {
#[cfg(feature = "gtk_v4_10")]
snapshot.append_scaled_texture(texture, self.scaling_filter.get(), &bounds);
} else {
snapshot.append_texture(texture, &bounds);
}
if do_premult {
snapshot.pop(); // pop color matrix
snapshot.pop(); // pop mask 2
}
}
} else {
gst::trace!(CAT, imp: self, "Snapshotting black frame");
snapshot.append_color(
&background_color,
&graphene::Rect::new(0f32, 0f32, width as f32, height as f32),
);
#[cfg(not(feature = "gtk_v4_10"))]
{
let do_premult =
texture.is::<gdk::GLTexture>() && *has_alpha && gtk::micro_version() < 13;
if do_premult {
snapshot.push_gl_shader(
&self.premult_shader,
&bounds,
gsk::ShaderArgsBuilder::new(&self.premult_shader, None).to_args(),
);
}
if self.use_scaling_filter.get() {
#[cfg(feature = "gtk_v4_10")]
snapshot.append_scaled_texture(texture, self.scaling_filter.get(), &bounds);
} else {
snapshot.append_texture(texture, &bounds);
}
if do_premult {
snapshot.gl_shader_pop_texture(); // pop texture appended above from the shader
snapshot.pop(); // pop shader
}
}
snapshot.pop(); // pop opacity
}
}
}
impl Paintable {
pub(super) fn handle_frame_changed(&self, frame: Option<Frame>) {
pub(super) fn handle_frame_changed(&self, sink: &crate::PaintableSink, frame: Frame) {
let context = self.gl_context.borrow();
if let Some(frame) = frame {
gst::trace!(CAT, imp: self, "Received new frame");
let new_paintables =
frame.into_textures(context.as_ref(), &mut self.cached_textures.borrow_mut());
let new_size = new_paintables
.first()
.map(|p| (f32::round(p.width) as u32, f32::round(p.height) as u32))
.unwrap();
gst::trace!(CAT, imp: self, "Received new frame");
let old_paintables = self.paintables.replace(new_paintables);
let old_size = old_paintables
.first()
.map(|p| (f32::round(p.width) as u32, f32::round(p.height) as u32));
let new_paintables =
match frame.into_textures(context.as_ref(), &mut self.cached_textures.borrow_mut()) {
Ok(textures) => textures,
Err(err) => {
gst::element_error!(
sink,
gst::ResourceError::Failed,
["Failed to transform frame into textures: {err}"]
);
return;
}
};
if Some(new_size) != old_size {
gst::debug!(
CAT,
imp: self,
"Size changed from {old_size:?} to {new_size:?}",
);
self.obj().invalidate_size();
}
let new_size = new_paintables
.first()
.map(|p| (f32::round(p.width) as u32, f32::round(p.height) as u32))
.unwrap();
self.obj().invalidate_contents();
let old_paintables = self.paintables.replace(new_paintables);
let old_size = old_paintables
.first()
.map(|p| (f32::round(p.width) as u32, f32::round(p.height) as u32));
if Some(new_size) != old_size {
gst::debug!(
CAT,
imp: self,
"Size changed from {old_size:?} to {new_size:?}",
);
self.obj().invalidate_size();
}
self.obj().invalidate_contents();
}
pub(super) fn handle_flush_frames(&self) {

View file

@ -30,8 +30,8 @@ impl Paintable {
}
impl Paintable {
pub(crate) fn handle_frame_changed(&self, frame: Option<Frame>) {
self.imp().handle_frame_changed(frame);
pub(crate) fn handle_frame_changed(&self, sink: &crate::PaintableSink, frame: Frame) {
self.imp().handle_frame_changed(sink, frame);
}
pub(crate) fn handle_flush_frames(&self) {