Add VideoFrameRef API

This is like VideoFrame, but can work on gst::BufferRefs and borrows
from it. VideoFrames can be converted into this by borrowing.
This commit is contained in:
Sebastian Dröge 2018-01-01 13:26:31 +02:00
parent a8c2815c3b
commit 0dd0442fb7
2 changed files with 307 additions and 2 deletions

View file

@ -54,8 +54,8 @@ mod video_format_info;
pub use video_format_info::*;
mod video_info;
pub use video_info::*;
mod video_frame;
pub use video_frame::VideoFrame;
pub mod video_frame;
pub use video_frame::{VideoFrame, VideoFrameRef};
mod video_overlay;
pub use video_overlay::VideoOverlayExtManual;
mod video_event;

View file

@ -18,6 +18,7 @@ use std::mem;
use std::ptr;
use std::marker::PhantomData;
use std::slice;
use std::ops;
pub struct Readable;
pub struct Writable;
@ -219,6 +220,12 @@ impl VideoFrame<Readable> {
}
}
}
pub fn as_video_frame_ref(&self) -> VideoFrameRef<&gst::BufferRef> {
let vframe = unsafe { ptr::read(&self.0) };
let info = self.2.clone();
VideoFrameRef(vframe, Some(self.buffer()), info, true)
}
}
impl VideoFrame<Writable> {
@ -311,6 +318,304 @@ impl VideoFrame<Writable> {
))
}
}
pub fn as_mut_video_frame_ref(&mut self) -> VideoFrameRef<&mut gst::BufferRef> {
let vframe = unsafe { ptr::read(&self.0) };
let info = self.2.clone();
VideoFrameRef(vframe, Some(self.buffer_mut()), info, true)
}
}
pub struct VideoFrameRef<T>(ffi::GstVideoFrame, Option<T>, ::VideoInfo, bool);
impl<'a> VideoFrameRef<&'a gst::BufferRef> {
pub fn from_buffer_ref_readable<'b>(
buffer: &'a gst::BufferRef,
info: &'b ::VideoInfo,
) -> Option<VideoFrameRef<&'a gst::BufferRef>> {
skip_assert_initialized!();
unsafe {
let mut frame = mem::zeroed();
let res: bool = from_glib(ffi::gst_video_frame_map(
&mut frame,
info.to_glib_none().0 as *mut _,
buffer.as_mut_ptr(),
mem::transmute(
ffi::GST_VIDEO_FRAME_MAP_FLAG_NO_REF.bits() | gst_ffi::GST_MAP_READ.bits(),
),
));
if !res {
None
} else {
let info = ::VideoInfo(ptr::read(&frame.info));
Some(VideoFrameRef(frame, Some(buffer), info, false))
}
}
}
pub fn from_buffer_ref_id_readable<'b>(
buffer: &'a gst::BufferRef,
id: i32,
info: &'b ::VideoInfo,
) -> Option<VideoFrameRef<&'a gst::BufferRef>> {
skip_assert_initialized!();
unsafe {
let mut frame = mem::zeroed();
let res: bool = from_glib(ffi::gst_video_frame_map_id(
&mut frame,
info.to_glib_none().0 as *mut _,
buffer.as_mut_ptr(),
id,
mem::transmute(
ffi::GST_VIDEO_FRAME_MAP_FLAG_NO_REF.bits() | gst_ffi::GST_MAP_READ.bits(),
),
));
if !res {
None
} else {
let info = ::VideoInfo(ptr::read(&frame.info));
Some(VideoFrameRef(frame, Some(buffer), info, false))
}
}
}
pub fn info(&self) -> &::VideoInfo {
&self.2
}
pub fn flags(&self) -> ::VideoFrameFlags {
from_glib(self.0.flags)
}
pub fn id(&self) -> i32 {
self.0.id
}
pub fn copy(&self, dest: &mut VideoFrame<Writable>) -> Result<(), glib::BoolError> {
unsafe {
let res: bool = from_glib(ffi::gst_video_frame_copy(&mut dest.0, &self.0));
if res {
Ok(())
} else {
Err(glib::BoolError("Failed to copy video frame"))
}
}
}
pub fn copy_plane(
&self,
dest: &mut VideoFrame<Writable>,
plane: u32,
) -> Result<(), glib::BoolError> {
skip_assert_initialized!();
unsafe {
let res: bool = from_glib(ffi::gst_video_frame_copy_plane(&mut dest.0, &self.0, plane));
if res {
Ok(())
} else {
Err(glib::BoolError("Failed to copy video frame plane"))
}
}
}
pub fn format(&self) -> ::VideoFormat {
self.info().format()
}
pub fn format_info(&self) -> ::VideoFormatInfo {
self.info().format_info()
}
pub fn width(&self) -> u32 {
self.info().width()
}
pub fn height(&self) -> u32 {
self.info().height()
}
pub fn size(&self) -> usize {
self.info().size()
}
pub fn is_interlaced(&self) -> bool {
self.flags().contains(::VideoFrameFlags::INTERLACED)
}
pub fn is_tff(&self) -> bool {
self.flags().contains(::VideoFrameFlags::TFF)
}
pub fn is_rff(&self) -> bool {
self.flags().contains(::VideoFrameFlags::RFF)
}
pub fn is_onefield(&self) -> bool {
self.flags().contains(::VideoFrameFlags::ONEFIELD)
}
pub fn n_planes(&self) -> u32 {
self.info().n_planes()
}
pub fn n_components(&self) -> u32 {
self.info().n_components()
}
pub fn plane_stride(&self) -> &[i32] {
self.info().stride()
}
pub fn plane_offset(&self) -> &[usize] {
self.info().offset()
}
pub fn buffer(&self) -> &gst::BufferRef {
self.1.as_ref().unwrap()
}
pub fn plane_data(&self, plane: u32) -> Option<&[u8]> {
if plane >= self.n_planes() {
return None;
}
let format_info = self.format_info();
// Just get the palette
if format_info.has_palette() && plane == 1 {
unsafe {
return Some(slice::from_raw_parts(self.0.data[1] as *const u8, 256 * 4));
}
}
let w = self.plane_stride()[plane as usize] as u32;
// FIXME: This assumes that the horizontal subsampling of all
// components in the plane is the same, which is probably safe
let h = format_info.scale_height(plane as u8, self.height());
unsafe {
Some(slice::from_raw_parts(
self.0.data[plane as usize] as *const u8,
(w * h) as usize,
))
}
}
}
impl<'a> VideoFrameRef<&'a mut gst::BufferRef> {
pub fn from_buffer_ref_writable<'b>(
buffer: &'a mut gst::BufferRef,
info: &'b ::VideoInfo,
) -> Option<VideoFrameRef<&'a mut gst::BufferRef>> {
skip_assert_initialized!();
unsafe {
let mut frame = mem::zeroed();
let res: bool = from_glib(ffi::gst_video_frame_map(
&mut frame,
info.to_glib_none().0 as *mut _,
buffer.as_mut_ptr(),
mem::transmute(
ffi::GST_VIDEO_FRAME_MAP_FLAG_NO_REF.bits() | gst_ffi::GST_MAP_READ.bits()
| gst_ffi::GST_MAP_WRITE.bits(),
),
));
if !res {
None
} else {
let info = ::VideoInfo(ptr::read(&frame.info));
Some(VideoFrameRef(frame, Some(buffer), info, false))
}
}
}
pub fn from_buffer_ref_id_writable<'b>(
buffer: &'a mut gst::BufferRef,
id: i32,
info: &'b ::VideoInfo,
) -> Option<VideoFrameRef<&'a mut gst::BufferRef>> {
skip_assert_initialized!();
unsafe {
let mut frame = mem::zeroed();
let res: bool = from_glib(ffi::gst_video_frame_map_id(
&mut frame,
info.to_glib_none().0 as *mut _,
buffer.as_mut_ptr(),
id,
mem::transmute(
ffi::GST_VIDEO_FRAME_MAP_FLAG_NO_REF.bits() | gst_ffi::GST_MAP_READ.bits()
| gst_ffi::GST_MAP_WRITE.bits(),
),
));
if !res {
None
} else {
let info = ::VideoInfo(ptr::read(&frame.info));
Some(VideoFrameRef(frame, Some(buffer), info, false))
}
}
}
pub fn buffer_mut(&mut self) -> &mut gst::BufferRef {
self.1.as_mut().unwrap()
}
pub fn plane_data_mut(&mut self, plane: u32) -> Option<&mut [u8]> {
if plane >= self.n_planes() {
return None;
}
let format_info = self.format_info();
// Just get the palette
if format_info.has_palette() && plane == 1 {
unsafe {
return Some(slice::from_raw_parts_mut(
self.0.data[1] as *mut u8,
256 * 4,
));
}
}
let w = self.plane_stride()[plane as usize] as u32;
// FIXME: This assumes that the horizontal subsampling of all
// components in the plane is the same, which is probably safe
let h = format_info.scale_height(plane as u8, self.height());
unsafe {
Some(slice::from_raw_parts_mut(
self.0.data[plane as usize] as *mut u8,
(w * h) as usize,
))
}
}
}
impl<'a> ops::Deref for VideoFrameRef<&'a mut gst::BufferRef> {
type Target = VideoFrameRef<&'a gst::BufferRef>;
fn deref(&self) -> &Self::Target {
use std::mem;
unsafe { mem::transmute(self) }
}
}
impl<T> Drop for VideoFrameRef<T> {
fn drop(&mut self) {
if !self.3 {
unsafe {
ffi::gst_video_frame_unmap(&mut self.0);
}
}
}
}
#[cfg(test)]