Remove GTK3 examples/tutorials

GTK3 is deprecated and the GTK videooverlay example does not even work
on modern systems with Wayland anymore.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/-/merge_requests/1293>
This commit is contained in:
Sebastian Dröge 2023-07-06 17:11:33 +03:00
parent f9fa7f55fc
commit 89ab9d09c8
8 changed files with 3 additions and 882 deletions

View file

@ -135,7 +135,7 @@ trigger:
variables:
FDO_DISTRIBUTION_PACKAGES: >-
build-essential curl python3-setuptools liborc-0.4-dev libglib2.0-dev
libxml2-dev libgtk-3-dev libegl1-mesa libgles2-mesa libgl1-mesa-dri
libxml2-dev libegl1-mesa libgles2-mesa libgl1-mesa-dri
libgl1-mesa-glx libwayland-egl1-mesa xz-utils libssl-dev git wget
ca-certificates ninja-build python3-pip flex bison libglib2.0-dev
libx11-dev libx11-xcb-dev libsoup2.4-dev libvorbis-dev libogg-dev

View file

@ -23,7 +23,7 @@ done
if [ -n "$EXAMPLES_TUTORIALS" ]; then
# Keep in sync with examples/Cargo.toml
# List all features except windows/win32
EXAMPLES_FEATURES="--features=gtksink,gtkvideooverlay,gtkvideooverlay-x11,gtkvideooverlay-quartz,rtsp-server,rtsp-server-record,pango-cairo,overlay-composition,gl,gst-gl-x11,gst-gl-wayland,gst-gl-egl,allocators,gst-play,gst-player,ges,image,cairo-rs,gst-video/v1_18"
EXAMPLES_FEATURES="--features=rtsp-server,rtsp-server-record,pango-cairo,overlay-composition,gl,gst-gl-x11,gst-gl-wayland,gst-gl-egl,allocators,gst-play,gst-player,ges,image,cairo-rs,gst-video/v1_18"
cargo build --locked --color=always --manifest-path examples/Cargo.toml --bins --examples "$EXAMPLES_FEATURES"
cargo build --locked --color=always --manifest-path tutorials/Cargo.toml --bins --examples --all-features

View file

@ -34,7 +34,7 @@ done
# Keep in sync with examples/Cargo.toml
# List all features except windows/win32
EXAMPLES_FEATURES="--features=gtksink,gtkvideooverlay,gtkvideooverlay-x11,gtkvideooverlay-quartz,rtsp-server,rtsp-server-record,pango-cairo,overlay-composition,gl,gst-gl-x11,gst-gl-wayland,gst-gl-egl,allocators,gst-play,gst-player,ges,image,cairo-rs,gst-video/v1_18"
EXAMPLES_FEATURES="--features=rtsp-server,rtsp-server-record,pango-cairo,overlay-composition,gl,gst-gl-x11,gst-gl-wayland,gst-gl-egl,allocators,gst-play,gst-player,ges,image,cairo-rs,gst-video/v1_18"
# And also run over all the examples/tutorials
cargo clippy --locked --color=always --manifest-path examples/Cargo.toml --all-targets "$EXAMPLES_FEATURES" -- $CLIPPY_LINTS

View file

@ -25,8 +25,6 @@ gst-sdp = { package = "gstreamer-sdp", path = "../gstreamer-sdp", optional = tru
gst-rtsp = { package = "gstreamer-rtsp", path = "../gstreamer-rtsp", optional = true }
gst-rtsp-server = { package = "gstreamer-rtsp-server", path = "../gstreamer-rtsp-server", optional = true }
gst-allocators = { package = "gstreamer-allocators", path = "../gstreamer-allocators", optional = true }
gtk = { git = "https://github.com/gtk-rs/gtk3-rs", optional = true }
gdk = { git = "https://github.com/gtk-rs/gtk3-rs", optional = true }
gio = { git = "https://github.com/gtk-rs/gtk-rs-core", optional = true }
anyhow = "1.0"
derive_more = "0.99.5"
@ -56,10 +54,6 @@ gl_generator = { version = "0.14", optional = true }
[features]
default = []
gtksink = ["gtk", "gio"]
gtkvideooverlay = ["gtk", "gdk", "gio"]
gtkvideooverlay-x11 = ["gtkvideooverlay"]
gtkvideooverlay-quartz = ["gtkvideooverlay"]
rtsp-server = ["gst-rtsp-server", "gst-rtsp", "gst-sdp"]
rtsp-server-record = ["gst-rtsp-server", "gst-rtsp", "gio"]
pango-cairo = ["pango", "pangocairo", "cairo-rs"]
@ -94,14 +88,6 @@ name = "encodebin"
[[bin]]
name = "events"
[[bin]]
name = "gtksink"
required-features = ["gtksink"]
[[bin]]
name = "gtkvideooverlay"
required-features = ["gtkvideooverlay"]
[[bin]]
name = "iterator"

View file

@ -1,179 +0,0 @@
// This example demonstrates how to use gstreamer in conjunction with the gtk widget toolkit.
// This example shows the video produced by a videotestsrc within a small gtk gui.
// For this, the gtkglsink is used, which creates a gtk widget one can embed the gtk gui.
// For this, there multiple types of widgets. gtkglsink uses OpenGL to render frames, and
// gtksink uses the CPU to render the frames (which is way slower).
// So the example application first tries to use OpenGL, and when that fails, fall back.
// The pipeline looks like the following:
// gtk-gui: {gtkglsink}-widget
// (|)
// {videotestsrc} - {glsinkbin}
use std::cell::RefCell;
use gio::prelude::*;
use gst::prelude::*;
use gtk::prelude::*;
fn create_ui(app: &gtk::Application) {
let pipeline = gst::Pipeline::default();
let src = gst::ElementFactory::make("videotestsrc").build().unwrap();
// Create the gtk sink and retrieve the widget from it. The sink element will be used
// in the pipeline, and the widget will be embedded in our gui.
// Gstreamer then displays frames in the gtk widget.
// First, we try to use the OpenGL version - and if that fails, we fall back to non-OpenGL.
let (sink, widget) = if let Ok(gtkglsink) = gst::ElementFactory::make("gtkglsink").build() {
// Using the OpenGL widget succeeded, so we are in for a nice playback experience with
// low cpu usage. :)
// The gtkglsink essentially allocates an OpenGL texture on the GPU, that it will display.
// Now we create the glsinkbin element, which is responsible for conversions and for uploading
// video frames to our texture (if they are not already in the GPU). Now we tell the OpenGL-sink
// about our gtkglsink element, form where it will retrieve the OpenGL texture to fill.
let glsinkbin = gst::ElementFactory::make("glsinkbin")
.property("sink", &gtkglsink)
.build()
.unwrap();
// The gtkglsink creates the gtk widget for us. This is accessible through a property.
// So we get it and use it later to add it to our gui.
let widget = gtkglsink.property::<gtk::Widget>("widget");
(glsinkbin, widget)
} else {
// Unfortunately, using the OpenGL widget didn't work out, so we will have to render
// our frames manually, using the CPU. An example why this may fail is, when
// the PC doesn't have proper graphics drivers installed.
let sink = gst::ElementFactory::make("gtksink").build().unwrap();
// The gtksink creates the gtk widget for us. This is accessible through a property.
// So we get it and use it later to add it to our gui.
let widget = sink.property::<gtk::Widget>("widget");
(sink, widget)
};
pipeline.add_many([&src, &sink]).unwrap();
src.link(&sink).unwrap();
// Create a simple gtk gui window to place our widget into.
let window = gtk::Window::new(gtk::WindowType::Toplevel);
window.set_default_size(320, 240);
let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);
// Add our widget to the gui
vbox.pack_start(&widget, true, true, 0);
let label = gtk::Label::new(Some("Position: 00:00:00"));
vbox.pack_start(&label, true, true, 5);
window.add(&vbox);
window.show_all();
app.add_window(&window);
// Need to move a new reference into the closure.
// !!ATTENTION!!:
// It might seem appealing to use pipeline.clone() here, because that greatly
// simplifies the code within the callback. What this actually does, however, is creating
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
// which is in turn stored in another strong reference on the pipeline is creating a
// reference cycle.
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
let pipeline_weak = pipeline.downgrade();
// Add a timeout to the main loop that will periodically (every 500ms) be
// executed. This will query the current position within the stream from
// the underlying pipeline, and display it in our gui.
// Since this closure is called by the mainloop thread, we are allowed
// to modify the gui widgets here.
let timeout_id = glib::timeout_add_local(std::time::Duration::from_millis(500), move || {
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
// we moved into this callback.
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return glib::ControlFlow::Continue,
};
// Query the current playing position from the underlying pipeline.
let position = pipeline.query_position::<gst::ClockTime>();
// Display the playing position in the gui.
label.set_text(&format!("Position: {:.0}", position.display()));
// Tell the callback to continue calling this closure.
glib::ControlFlow::Continue
});
let bus = pipeline.bus().unwrap();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let app_weak = app.downgrade();
let _bus_watch = bus
.add_watch_local(move |_, msg| {
use gst::MessageView;
let app = match app_weak.upgrade() {
Some(app) => app,
None => return glib::ControlFlow::Break,
};
match msg.view() {
MessageView::Eos(..) => app.quit(),
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
app.quit();
}
_ => (),
};
glib::ControlFlow::Continue
})
.expect("Failed to add bus watch");
// Pipeline reference is owned by the closure below, so will be
// destroyed once the app is destroyed
let timeout_id = RefCell::new(Some(timeout_id));
let pipeline = RefCell::new(Some(pipeline));
app.connect_shutdown(move |_| {
// Optional, by manually destroying the window here we ensure that
// the gst element is destroyed when shutting down instead of having to wait
// for the process to terminate, allowing us to use the leaks tracer.
unsafe {
window.destroy();
}
// GTK will keep the Application alive for the whole process lifetime.
// Wrapping the pipeline in a RefCell<Option<_>> and removing it from it here
// ensures the pipeline is actually destroyed when shutting down, allowing us
// to use the leaks tracer for example.
if let Some(pipeline) = pipeline.borrow_mut().take() {
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
if let Some(timeout_id) = timeout_id.borrow_mut().take() {
timeout_id.remove();
}
});
}
fn main() -> glib::ExitCode {
// Initialize gstreamer and the gtk widget toolkit libraries.
gst::init().unwrap();
gtk::init().unwrap();
let res = {
let app = gtk::Application::new(None, gio::ApplicationFlags::FLAGS_NONE);
app.connect_activate(create_ui);
app.run()
};
// Optional, can be used to detect leaks using the leaks tracer
unsafe {
gst::deinit();
}
res
}

View file

@ -1,286 +0,0 @@
// This example demonstrates another type of combination of gtk and gstreamer,
// in comparison to the gtksink example.
// This example uses regions that are managed by the window system, and uses
// the window system's api to insert a videostream into these regions.
// So essentially, the window system of the system overlays our gui with
// the video frames - within the region that we tell it to use.
// Disadvantage of this method is, that it's highly platform specific, since
// the big platforms all have their own window system. Thus, this example
// has special code to handle differences between platforms.
// Windows could theoretically be supported by this example, but is not yet implemented.
// One of the very few (if not the single one) platform, that can not provide the API
// needed for this are Linux desktops using Wayland.
// TODO: Add Windows support
// In this case, a testvideo is displayed within our gui, using the
// following pipeline:
// {videotestsrc} - {xvimagesink(on linux)}
// {videotestsrc} - {glimagesink(on mac)}
use std::{cell::RefCell, os::raw::c_void, process};
use gio::prelude::*;
use gst_video::prelude::*;
use gtk::prelude::*;
#[cfg(all(target_os = "linux", feature = "gtkvideooverlay-x11"))]
fn create_video_sink() -> gst::Element {
// When we are on linux with the Xorg display server, we use the
// X11 protocol's XV extension, which allows to overlay regions
// with video streams. For this, we use the xvimagesink element.
gst::ElementFactory::make("xvimagesink").build().unwrap()
}
#[cfg(all(target_os = "linux", feature = "gtkvideooverlay-x11"))]
fn set_window_handle(video_overlay: &gst_video::VideoOverlay, gdk_window: &gdk::Window) {
let display_type_name = gdk_window.display().type_().name();
// Check if we're using X11 or ...
if display_type_name == "GdkX11Display" {
extern "C" {
pub fn gdk_x11_window_get_xid(window: *mut glib::gobject_ffi::GObject) -> *mut c_void;
}
// This is unsafe because the "window handle" we pass here is basically like a raw pointer.
// If a wrong value were to be passed here (and you can pass any integer), then the window
// system will most likely cause the application to crash.
#[allow(clippy::cast_ptr_alignment)]
unsafe {
// Here we ask gdk what native window handle we got assigned for
// our video region from the window system, and then we will
// pass this unique identifier to the overlay provided by our
// sink - so the sink can then arrange the overlay.
let xid = gdk_x11_window_get_xid(gdk_window.as_ptr() as *mut _);
video_overlay.set_window_handle(xid as usize);
}
} else {
println!("Add support for display type '{display_type_name}'");
process::exit(-1);
}
}
#[cfg(all(target_os = "macos", feature = "gtkvideooverlay-quartz"))]
fn create_video_sink() -> gst::Element {
// On Mac, this is done by overlaying a window region with an
// OpenGL-texture, using the glimagesink element.
gst::ElementFactory::make("glimagesink").build().unwrap()
}
#[cfg(all(target_os = "macos", feature = "gtkvideooverlay-quartz"))]
fn set_window_handle(video_overlay: &gst_video::VideoOverlay, gdk_window: &gdk::Window) {
let display_type_name = gdk_window.display().type_().name();
if display_type_name == "GdkQuartzDisplay" {
extern "C" {
pub fn gdk_quartz_window_get_nsview(
window: *mut glib::gobject_ffi::GObject,
) -> *mut c_void;
}
// This is unsafe because the "window handle" we pass here is basically like a raw pointer.
// If a wrong value were to be passed here (and you can pass any integer), then the window
// system will most likely cause the application to crash.
#[allow(clippy::cast_ptr_alignment)]
unsafe {
// Here we ask gdk what native window handle we got assigned for
// our video region from the windowing system, and then we will
// pass this unique identifier to the overlay provided by our
// sink - so the sink can then arrange the overlay.
let window = gdk_quartz_window_get_nsview(gdk_window.as_ptr() as *mut _);
video_overlay.set_window_handle(window as usize);
}
} else {
println!("Unsupported display type '{}", display_type_name);
process::exit(-1);
}
}
fn create_ui(app: &gtk::Application) {
let pipeline = gst::Pipeline::default();
let src = gst::ElementFactory::make("videotestsrc").build().unwrap();
// Since using the window system to overlay our gui window is making
// direct contact with the windowing system, this is highly platform-
// specific. This example supports Linux and Mac (using X11 and Quartz).
let sink = create_video_sink();
pipeline.add_many([&src, &sink]).unwrap();
src.link(&sink).unwrap();
// First, we create our gtk window - which will contain a region where
// our overlaid video will be displayed in.
let window = gtk::Window::new(gtk::WindowType::Toplevel);
window.set_default_size(320, 240);
let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);
// This creates the widget we will display our overlay in.
// Later, we will try to tell our window system about this region, so
// it can overlay it with our video stream.
let video_window = gtk::DrawingArea::new();
video_window.set_size_request(320, 240);
// Use the platform-specific sink to create our overlay.
// Since we only use the video_overlay in the closure below, we need a weak reference.
// !!ATTENTION!!:
// It might seem appealing to use .clone() here, because that greatly
// simplifies the code within the callback. What this actually does, however, is creating
// a memory leak.
let video_overlay = sink
.dynamic_cast::<gst_video::VideoOverlay>()
.unwrap()
.downgrade();
// Connect to this widget's realize signal, which will be emitted
// after its display has been initialized. This is necessary, because
// the window system doesn't know about our region until it was initialized.
video_window.connect_realize(move |video_window| {
// Here we temporarily retrieve a strong reference on the video-overlay from the
// weak reference that we moved into the closure.
let video_overlay = match video_overlay.upgrade() {
Some(video_overlay) => video_overlay,
None => return,
};
// Gtk uses gdk under the hood, to handle its drawing. Drawing regions are
// called gdk windows. We request this underlying drawing region from the
// widget we will overlay with our video.
let gdk_window = video_window.window().unwrap();
// This is where we tell our window system about the drawing-region we
// want it to overlay. Most often, the window system would only know
// about our most outer region (or: our window).
if !gdk_window.ensure_native() {
println!("Can't create native window for widget");
process::exit(-1);
}
set_window_handle(&video_overlay, &gdk_window);
});
vbox.pack_start(&video_window, true, true, 0);
let label = gtk::Label::new(Some("Position: 00:00:00"));
vbox.pack_start(&label, true, true, 5);
window.add(&vbox);
window.show_all();
app.add_window(&window);
// Need to move a new reference into the closure.
// !!ATTENTION!!:
// It might seem appealing to use pipeline.clone() here, because that greatly
// simplifies the code within the callback. What this actually does, however, is creating
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
// which is in turn stored in another strong reference on the pipeline is creating a
// reference cycle.
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
let pipeline_weak = pipeline.downgrade();
// Add a timeout to the main loop that will periodically (every 500ms) be
// executed. This will query the current position within the stream from
// the underlying pipeline, and display it in our gui.
// Since this closure is called by the mainloop thread, we are allowed
// to modify the gui widgets here.
let timeout_id = glib::timeout_add_local(std::time::Duration::from_millis(500), move || {
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
// we moved into this callback.
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return glib::ControlFlow::Break,
};
// Query the current playing position from the underlying pipeline.
let position = pipeline.query_position::<gst::ClockTime>();
// Display the playing position in the gui.
label.set_text(&format!("Position: {:.0}", position.display()));
// Tell the timeout to continue calling this callback.
glib::ControlFlow::Continue
});
let bus = pipeline.bus().unwrap();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let app_weak = app.downgrade();
let _bus_watch = bus
.add_watch_local(move |_, msg| {
use gst::MessageView;
let app = match app_weak.upgrade() {
Some(app) => app,
None => return glib::ControlFlow::Break,
};
match msg.view() {
MessageView::Eos(..) => app.quit(),
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
app.quit();
}
_ => (),
};
glib::ControlFlow::Continue
})
.expect("Failed to add bus watch");
// Pipeline reference is owned by the closure below, so will be
// destroyed once the app is destroyed
let timeout_id = RefCell::new(Some(timeout_id));
let pipeline = RefCell::new(Some(pipeline));
app.connect_shutdown(move |_| {
// Optional, by manually destroying the window here we ensure that
// the gst element is destroyed when shutting down instead of having to wait
// for the process to terminate, allowing us to use the leaks tracer.
unsafe {
window.destroy();
}
// GTK will keep the Application alive for the whole process lifetime.
// Wrapping the pipeline in a RefCell<Option<_>> and removing it from it here
// ensures the pipeline is actually destroyed when shutting down, allowing us
// to use the leaks tracer for example.
if let Some(pipeline) = pipeline.borrow_mut().take() {
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
if let Some(timeout_id) = timeout_id.borrow_mut().take() {
timeout_id.remove();
}
});
}
fn main() -> glib::ExitCode {
#[cfg(not(unix))]
{
println!("Add support for target platform");
process::exit(-1);
}
// Initialize gstreamer and the gtk widget toolkit libraries.
gst::init().unwrap();
gtk::init().unwrap();
let res = {
let app = gtk::Application::new(None, gio::ApplicationFlags::FLAGS_NONE);
app.connect_activate(create_ui);
app.run()
};
// Optional, can be used to detect leaks using the leaks tracer
unsafe {
gst::deinit();
}
res
}

View file

@ -8,8 +8,6 @@ rust-version = "1.66"
[dependencies]
glib = { git = "https://github.com/gtk-rs/gtk-rs-core" }
gdk = { git = "https://github.com/gtk-rs/gtk3-rs", optional = true }
gtk = { git = "https://github.com/gtk-rs/gtk3-rs", optional = true }
gst = { package = "gstreamer", path = "../gstreamer" }
gst-audio = { package = "gstreamer-audio", path = "../gstreamer-audio" }
gst-video = { package = "gstreamer-video", path = "../gstreamer-video", optional = true }
@ -23,11 +21,6 @@ termion = { version = "2", optional = true }
cocoa = "0.25"
objc = "0.2.7"
[features]
tutorial5 = ["gtk", "gdk", "gst-video"]
tutorial5-x11 = ["tutorial5"]
tutorial5-quartz = ["tutorial5"]
[[bin]]
name = "basic-tutorial-13"
required-features = ["termion"]

View file

@ -1,393 +0,0 @@
#[cfg(feature = "tutorial5")]
mod tutorial5 {
use std::{ops, os::raw::c_void, process};
use gdk::prelude::*;
use gst_video::prelude::*;
use gtk::prelude::*;
// Custom struct to keep our window reference alive
// and to store the timeout id so that we can remove
// it from the main context again later and drop the
// references it keeps inside its closures
struct AppWindow {
main_window: gtk::Window,
timeout_id: Option<glib::SourceId>,
}
impl ops::Deref for AppWindow {
type Target = gtk::Window;
fn deref(&self) -> &gtk::Window {
&self.main_window
}
}
impl Drop for AppWindow {
fn drop(&mut self) {
if let Some(source_id) = self.timeout_id.take() {
source_id.remove();
}
}
}
// Extract tags from streams of @stype and add the info in the UI.
fn add_streams_info(playbin: &gst::Element, textbuf: &gtk::TextBuffer, stype: &str) {
let propname: &str = &format!("n-{stype}");
let signame: &str = &format!("get-{stype}-tags");
let x = playbin.property::<i32>(propname);
for i in 0..x {
let tags = playbin.emit_by_name::<Option<gst::TagList>>(signame, &[&i]);
if let Some(tags) = tags {
textbuf.insert_at_cursor(&format!("{stype} stream {i}:\n "));
if let Some(codec) = tags.get::<gst::tags::VideoCodec>() {
textbuf.insert_at_cursor(&format!(" codec: {} \n", codec.get()));
}
if let Some(codec) = tags.get::<gst::tags::AudioCodec>() {
textbuf.insert_at_cursor(&format!(" codec: {} \n", codec.get()));
}
if let Some(lang) = tags.get::<gst::tags::LanguageCode>() {
textbuf.insert_at_cursor(&format!(" language: {} \n", lang.get()));
}
if let Some(bitrate) = tags.get::<gst::tags::Bitrate>() {
textbuf.insert_at_cursor(&format!(" bitrate: {} \n", bitrate.get()));
}
}
}
}
// Extract metadata from all the streams and write it to the text widget in the GUI
fn analyze_streams(playbin: &gst::Element, textbuf: &gtk::TextBuffer) {
{
textbuf.set_text("");
}
add_streams_info(playbin, textbuf, "video");
add_streams_info(playbin, textbuf, "audio");
add_streams_info(playbin, textbuf, "text");
}
// This creates all the GTK+ widgets that compose our application, and registers the callbacks
fn create_ui(playbin: &gst::Element) -> AppWindow {
let main_window = gtk::Window::new(gtk::WindowType::Toplevel);
main_window.connect_delete_event(|_, _| {
gtk::main_quit();
Inhibit(false)
});
let play_button =
gtk::Button::from_icon_name(Some("media-playback-start"), gtk::IconSize::SmallToolbar);
let pipeline = playbin.clone();
play_button.connect_clicked(move |_| {
let pipeline = &pipeline;
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
});
let pause_button =
gtk::Button::from_icon_name(Some("media-playback-pause"), gtk::IconSize::SmallToolbar);
let pipeline = playbin.clone();
pause_button.connect_clicked(move |_| {
let pipeline = &pipeline;
pipeline
.set_state(gst::State::Paused)
.expect("Unable to set the pipeline to the `Paused` state");
});
let stop_button =
gtk::Button::from_icon_name(Some("media-playback-stop"), gtk::IconSize::SmallToolbar);
let pipeline = playbin.clone();
stop_button.connect_clicked(move |_| {
let pipeline = &pipeline;
pipeline
.set_state(gst::State::Ready)
.expect("Unable to set the pipeline to the `Ready` state");
});
let slider = gtk::Scale::with_range(gtk::Orientation::Horizontal, 0.0, 100.0, 1.0);
let pipeline = playbin.clone();
let slider_update_signal_id = slider.connect_value_changed(move |slider| {
let pipeline = &pipeline;
let value = slider.value() as u64;
if pipeline
.seek_simple(
gst::SeekFlags::FLUSH | gst::SeekFlags::KEY_UNIT,
value * gst::ClockTime::SECOND,
)
.is_err()
{
eprintln!("Seeking to {value} failed");
}
});
slider.set_draw_value(false);
let pipeline = playbin.clone();
let lslider = slider.clone();
// Update the UI (seekbar) every second
let timeout_id = glib::timeout_add_seconds_local(1, move || {
let pipeline = &pipeline;
let lslider = &lslider;
if let Some(dur) = pipeline.query_duration::<gst::ClockTime>() {
lslider.set_range(0.0, dur.seconds() as f64);
if let Some(pos) = pipeline.query_position::<gst::ClockTime>() {
lslider.block_signal(&slider_update_signal_id);
lslider.set_value(pos.seconds() as f64);
lslider.unblock_signal(&slider_update_signal_id);
}
}
glib::ControlFlow::Continue
});
let controls = gtk::Box::new(gtk::Orientation::Horizontal, 0);
controls.pack_start(&play_button, false, false, 0);
controls.pack_start(&pause_button, false, false, 0);
controls.pack_start(&stop_button, false, false, 0);
controls.pack_start(&slider, true, true, 2);
let video_window = gtk::DrawingArea::new();
let video_overlay = playbin
.clone()
.dynamic_cast::<gst_video::VideoOverlay>()
.unwrap();
video_window.connect_realize(move |video_window| {
let video_overlay = &video_overlay;
let gdk_window = video_window.window().unwrap();
if !gdk_window.ensure_native() {
println!("Can't create native window for widget");
process::exit(-1);
}
let display_type_name = gdk_window.display().type_().name();
#[cfg(all(target_os = "linux", feature = "tutorial5-x11"))]
{
// Check if we're using X11 or ...
if display_type_name == "GdkX11Display" {
extern "C" {
pub fn gdk_x11_window_get_xid(
window: *mut glib::gobject_ffi::GObject,
) -> *mut c_void;
}
#[allow(clippy::cast_ptr_alignment)]
unsafe {
let xid = gdk_x11_window_get_xid(gdk_window.as_ptr() as *mut _);
video_overlay.set_window_handle(xid as usize);
}
} else {
println!("Add support for display type '{display_type_name}'");
process::exit(-1);
}
}
#[cfg(all(target_os = "macos", feature = "tutorial5-quartz"))]
{
if display_type_name == "GdkQuartzDisplay" {
extern "C" {
pub fn gdk_quartz_window_get_nsview(
window: *mut glib::gobject_ffi::GObject,
) -> *mut c_void;
}
#[allow(clippy::cast_ptr_alignment)]
unsafe {
let window = gdk_quartz_window_get_nsview(gdk_window.as_ptr() as *mut _);
video_overlay.set_window_handle(window as usize);
}
} else {
println!(
"Unsupported display type '{}', compile with `--feature `",
display_type_name
);
process::exit(-1);
}
}
});
let streams_list = gtk::TextView::new();
streams_list.set_editable(false);
let pipeline_weak = playbin.downgrade();
let streams_list_weak = glib::SendWeakRef::from(streams_list.downgrade());
let bus = playbin.bus().unwrap();
#[allow(clippy::single_match)]
bus.connect_message(Some("application"), move |_, msg| match msg.view() {
gst::MessageView::Application(application) => {
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return,
};
let streams_list = match streams_list_weak.upgrade() {
Some(streams_list) => streams_list,
None => return,
};
if application.structure().map(|s| s.name().as_str()) == Some("tags-changed") {
let textbuf = streams_list
.buffer()
.expect("Couldn't get buffer from text_view");
analyze_streams(&pipeline, &textbuf);
}
}
_ => unreachable!(),
});
let vbox = gtk::Box::new(gtk::Orientation::Horizontal, 0);
vbox.pack_start(&video_window, true, true, 0);
vbox.pack_start(&streams_list, false, false, 2);
let main_box = gtk::Box::new(gtk::Orientation::Vertical, 0);
main_box.pack_start(&vbox, true, true, 0);
main_box.pack_start(&controls, false, false, 0);
main_window.add(&main_box);
main_window.set_default_size(640, 480);
main_window.show_all();
AppWindow {
main_window,
timeout_id: Some(timeout_id),
}
}
// We are possibly in a GStreamer working thread, so we notify the main
// thread of this event through a message in the bus
fn post_app_message(playbin: &gst::Element) {
let _ = playbin.post_message(gst::message::Application::new(gst::Structure::new_empty(
"tags-changed",
)));
}
pub fn run() {
// Make sure the right features were activated
#[allow(clippy::eq_op)]
{
if !cfg!(feature = "tutorial5-x11") && !cfg!(feature = "tutorial5-quartz") {
eprintln!(
"No Gdk backend selected, compile with --features tutorial5[-x11][-quartz]."
);
return;
}
}
// Initialize GTK
if let Err(err) = gtk::init() {
eprintln!("Failed to initialize GTK: {err}");
return;
}
// Initialize GStreamer
if let Err(err) = gst::init() {
eprintln!("Failed to initialize Gst: {err}");
return;
}
let uri = "https://www.freedesktop.org/software/gstreamer-sdk/\
data/media/sintel_trailer-480p.webm";
let playbin = gst::ElementFactory::make("playbin")
.property("uri", uri)
.build()
.unwrap();
playbin.connect("video-tags-changed", false, |args| {
let pipeline = args[0]
.get::<gst::Element>()
.expect("playbin \"video-tags-changed\" args[0]");
post_app_message(&pipeline);
None
});
playbin.connect("audio-tags-changed", false, |args| {
let pipeline = args[0]
.get::<gst::Element>()
.expect("playbin \"audio-tags-changed\" args[0]");
post_app_message(&pipeline);
None
});
playbin.connect("text-tags-changed", false, move |args| {
let pipeline = args[0]
.get::<gst::Element>()
.expect("playbin \"text-tags-changed\" args[0]");
post_app_message(&pipeline);
None
});
let window = create_ui(&playbin);
let bus = playbin.bus().unwrap();
bus.add_signal_watch();
let pipeline_weak = playbin.downgrade();
bus.connect_message(None, move |_, msg| {
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return,
};
match msg.view() {
// This is called when an End-Of-Stream message is posted on the bus.
// We just set the pipeline to READY (which stops playback).
gst::MessageView::Eos(..) => {
println!("End-Of-Stream reached.");
pipeline
.set_state(gst::State::Ready)
.expect("Unable to set the pipeline to the `Ready` state");
}
// This is called when an error message is posted on the bus
gst::MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
}
// This is called when the pipeline changes states. We use it to
// keep track of the current state.
gst::MessageView::StateChanged(state_changed) => {
if state_changed.src().map(|s| s == &pipeline).unwrap_or(false) {
println!("State set to {:?}", state_changed.current());
}
}
_ => (),
}
});
playbin
.set_state(gst::State::Playing)
.expect("Unable to set the playbin to the `Playing` state");
gtk::main();
window.hide();
playbin
.set_state(gst::State::Null)
.expect("Unable to set the playbin to the `Null` state");
bus.remove_signal_watch();
}
}
#[cfg(feature = "tutorial5")]
fn main() {
tutorial5::run();
}
#[cfg(not(feature = "tutorial5"))]
fn main() {
println!("Please compile with --features tutorial5[-x11][-quartz]");
}