docs: Convert gtkdoc comments to markdown

Modernizing the documentation, making it simpler to read an
modify and allowing us to possibly switch to hotdoc in the
future.
This commit is contained in:
Thibault Saunier 2017-01-23 16:36:11 -03:00
parent a122135194
commit 099ac9faf2
143 changed files with 1216 additions and 1708 deletions

View file

@ -20,6 +20,7 @@
*/
/**
* SECTION:element-alsamidisrc
* @title: alsamidisrc
* @see_also: #GstPushSrc
*
* The alsamidisrc element is an element that fetches ALSA MIDI sequencer
@ -28,13 +29,13 @@
*
* It can be used to generate notes from a MIDI input device.
*
* <refsect2>
* <title>Example launch line</title>
* ## Example launch line
* |[
* gst-launch -v alsamidisrc ports=129:0 ! fluiddec ! audioconvert ! autoaudiosink
* ]| This pipeline will listen for events from the sequencer device at port 129:0,
* ]|
* This pipeline will listen for events from the sequencer device at port 129:0,
* and generate notes using the fluiddec element.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -22,16 +22,18 @@
/**
* SECTION:element-alsasink
* @title: alsasink
* @see_also: alsasrc
*
* This element renders audio samples using the ALSA audio API.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v uridecodebin uri=file:///path/to/audio.ogg ! audioconvert ! audioresample ! autoaudiosink
* ]| Play an Ogg/Vorbis file and output audio via ALSA.
* </refsect2>
* ]|
*
* Play an Ogg/Vorbis file and output audio via ALSA.
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -21,16 +21,17 @@
/**
* SECTION:element-alsasrc
* @title: alsasrc
* @see_also: alsasink
*
* This element reads data from an audio card using the ALSA API.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v alsasrc ! queue ! audioconvert ! vorbisenc ! oggmux ! filesink location=alsasrc.ogg
* ]| Record from a sound card using ALSA and encode to Ogg/Vorbis.
* </refsect2>
* ]|
* Record from a sound card using ALSA and encode to Ogg/Vorbis.
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -21,16 +21,17 @@
/**
* SECTION:element-oggdemux
* @title: oggdemux
* @see_also: <link linkend="gst-plugins-base-plugins-oggmux">oggmux</link>
*
* This element demuxes ogg files into their encoded audio and video components.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v filesrc location=test.ogg ! oggdemux ! vorbisdec ! audioconvert ! audioresample ! autoaudiosink
* ]| Decodes a vorbis audio stream stored inside an ogg container and plays it.
* </refsect2>
* ]|
* Decodes a vorbis audio stream stored inside an ogg container and plays it.
*
*/

View file

@ -20,17 +20,18 @@
/**
* SECTION:element-oggmux
* @title: oggmux
* @see_also: <link linkend="gst-plugins-base-plugins-oggdemux">oggdemux</link>
*
* This element merges streams (audio and video) into ogg files.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 v4l2src num-buffers=500 ! video/x-raw,width=320,height=240 ! videoconvert ! videorate ! theoraenc ! oggmux ! filesink location=video.ogg
* ]| Encodes a video stream captured from a v4l2-compatible camera to Ogg/Theora
* ]|
* Encodes a video stream captured from a v4l2-compatible camera to Ogg/Theora
* (the encoding will stop automatically after 500 frames)
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
@ -968,14 +969,14 @@ no_granule:
/* make sure at least one buffer is queued on all pads, two if possible
*
*
* if pad->buffer == NULL, pad->next_buffer != NULL, then
* we do not know if the buffer is the last or not
* if pad->buffer != NULL, pad->next_buffer != NULL, then
* pad->buffer is not the last buffer for the pad
* if pad->buffer != NULL, pad->next_buffer == NULL, then
* pad->buffer if the last buffer for the pad
*
*
* returns a pointer to an oggpad that holds the best buffer, or
* NULL when no pad was usable. "best" means the buffer marked
* with the lowest timestamp. If best->buffer == NULL then either
@ -1409,7 +1410,7 @@ gst_ogg_mux_make_fistail (GstOggMux * mux, ogg_stream_state * os)
* page that allows decoders to identify the type of the stream.
* After that we need to write out all extra info for the decoders.
* In the case of a codec that also needs data as configuration, we can
* find that info in the streamcaps.
* find that info in the streamcaps.
* After writing the headers we must start a new page for the data.
*/
static GstFlowReturn
@ -2034,11 +2035,11 @@ gst_ogg_mux_send_start_events (GstOggMux * ogg_mux, GstCollectPads * pads)
}
/* This function is called when there is data on all pads.
*
*
* It finds a pad to pull on, this is done by looking at the buffers
* to decide which one to use, and using the 'oldest' one first. It then calls
* gst_ogg_mux_process_best_pad() to process as much data as possible.
*
*
* If all the pads have received EOS, it flushes out all data by continually
* getting the best pad and calling gst_ogg_mux_process_best_pad() until they
* are all empty, and then sends EOS.

View file

@ -26,16 +26,17 @@
/**
* SECTION:element-opusdec
* @title: opusdec
* @see_also: opusenc, oggdemux
*
* This element decodes a OPUS stream to raw integer audio.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v filesrc location=opus.ogg ! oggdemux ! opusdec ! audioconvert ! audioresample ! alsasink
* ]| Decode an Ogg/Opus file. To create an Ogg/Opus file refer to the documentation of opusenc.
* </refsect2>
* ]|
* Decode an Ogg/Opus file. To create an Ogg/Opus file refer to the documentation of opusenc.
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -25,16 +25,17 @@
/**
* SECTION:element-opusenc
* @title: opusenc
* @see_also: opusdec, oggmux
*
* This element encodes raw audio to OPUS.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v audiotestsrc wave=sine num-buffers=100 ! audioconvert ! opusenc ! oggmux ! filesink location=sine.ogg
* ]| Encode a test sine signal to Ogg/OPUS.
* </refsect2>
* ]|
* Encode a test sine signal to Ogg/OPUS.
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -20,6 +20,7 @@
/**
* SECTION:element-clockoverlay
* @title: clockoverlay
* @see_also: #GstBaseTextOverlay, #GstTimeOverlay
*
* This element overlays the current clock time on top of a video
@ -28,18 +29,19 @@
* time is displayed in the top left corner of the picture, with some
* padding to the left and to the top.
*
* <refsect2>
* <title>Example launch lines</title>
* ## Example launch lines
* |[
* gst-launch-1.0 -v videotestsrc ! clockoverlay ! autovideosink
* ]| Display the current wall clock time in the top left corner of the video picture
* ]|
* Display the current wall clock time in the top left corner of the video picture
* |[
* gst-launch-1.0 -v videotestsrc ! clockoverlay halignment=right valignment=bottom text="Edge City" shaded-background=true font-desc="Sans, 36" ! videoconvert ! autovideosink
* ]| Another pipeline that displays the current time with some leading
* ]|
* Another pipeline that displays the current time with some leading
* text in the bottom right corner of the video picture, with the background
* of the text being shaded in order to make it more legible on top of a
* bright video background.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -25,6 +25,7 @@
/**
* SECTION:element-textoverlay
* @title: textoverlay
* @see_also: #GstTextRender, #GstTextOverlay, #GstTimeOverlay, #GstSubParse
*
* This plugin renders text on top of a video stream. This can be either
@ -37,18 +38,19 @@
* The text can contain newline characters and text wrapping is enabled by
* default.
*
* <refsect2>
* <title>Example launch lines</title>
* ## Example launch lines
* |[
* gst-launch-1.0 -v gst-launch-1.0 videotestsrc ! textoverlay text="Room A" valignment=top halignment=left font-desc="Sans, 72" ! autovideosink
* ]| Here is a simple pipeline that displays a static text in the top left
* ]|
* Here is a simple pipeline that displays a static text in the top left
* corner of the video picture
* |[
* gst-launch-1.0 -v filesrc location=subtitles.srt ! subparse ! txt. videotestsrc ! timeoverlay ! textoverlay name=txt shaded-background=yes ! autovideosink
* ]| Here is another pipeline that displays subtitles from an .srt subtitle
* ]|
* Here is another pipeline that displays subtitles from an .srt subtitle
* file, centered at the bottom of the picture and with a rectangular shading
* around the text in the background:
* <para>
*
* If you do not have such a subtitle file, create one looking like this
* in a text editor:
* |[
@ -66,8 +68,7 @@
* Uh? What are you talking about?
* I don&apos;t understand (18-62s)
* ]|
* </para>
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -22,21 +22,21 @@
/**
* SECTION:element-textrender
* @title: textrender
* @see_also: #GstTextOverlay
*
* This plugin renders text received on the text sink pad to a video
* buffer (retaining the alpha channel), so it can later be overlayed
* on top of video streams using other elements.
*
* The text can contain newline characters. (FIXME: What about text
* The text can contain newline characters. (FIXME: What about text
* wrapping? It does not make sense in this context)
*
* <refsect2>
* <title>Example launch lines</title>
* ## Example launch lines
* |[
* gst-launch-1.0 -v filesrc location=subtitles.srt ! subparse ! textrender ! videoconvert ! autovideosink
* ]|
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -20,6 +20,7 @@
/**
* SECTION:element-timeoverlay
* @title: timeoverlay
* @see_also: #GstBaseTextOverlay, #GstClockOverlay
*
* This element overlays the buffer time stamps of a video stream on
@ -28,17 +29,18 @@
* time stamp is displayed in the top left corner of the picture, with some
* padding to the left and to the top.
*
* <refsect2>
* |[
* gst-launch-1.0 -v videotestsrc ! timeoverlay ! autovideosink
* ]| Display the time stamps in the top left corner of the video picture.
* ]|
* Display the time stamps in the top left corner of the video picture.
* |[
* gst-launch-1.0 -v videotestsrc ! timeoverlay halignment=right valignment=bottom text="Stream time:" shaded-background=true font-desc="Sans, 24" ! autovideosink
* ]| Another pipeline that displays the time stamps with some leading
* ]|
* Another pipeline that displays the time stamps with some leading
* text in the bottom right corner of the video picture, with the background
* of the text being shaded in order to make it more legible on top of a
* bright video background.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -22,6 +22,7 @@
/**
* SECTION:element-theoradec
* @title: theoradec
* @see_also: theoraenc, oggdemux
*
* This element decodes theora streams into raw video
@ -29,13 +30,13 @@
* video codec maintained by the <ulink url="http://www.xiph.org/">Xiph.org
* Foundation</ulink>, based on the VP3 codec.
*
* <refsect2>
* <title>Example pipeline</title>
* ## Example pipeline
* |[
* gst-launch-1.0 -v filesrc location=videotestsrc.ogg ! oggdemux ! theoradec ! videoconvert ! videoscale ! autovideosink
* ]| This example pipeline will decode an ogg stream and decodes the theora video in it.
* ]|
* This example pipeline will decode an ogg stream and decodes the theora video in it.
* Refer to the theoraenc example to create the ogg file.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -22,6 +22,7 @@
/**
* SECTION:element-theoraenc
* @title: theoraenc
* @see_also: theoradec, oggmux
*
* This element encodes raw video into a Theora stream.
@ -45,14 +46,14 @@
* A videorate element is often required in front of theoraenc, especially
* when transcoding and when putting Theora into the Ogg container.
*
* <refsect2>
* <title>Example pipeline</title>
* ## Example pipeline
* |[
* gst-launch-1.0 -v videotestsrc num-buffers=500 ! video/x-raw,width=1280,height=720 ! queue ! progressreport ! theoraenc ! oggmux ! filesink location=videotestsrc.ogg
* ]| This example pipeline will encode a test video source to theora muxed in an
* ]|
* This example pipeline will encode a test video source to theora muxed in an
* ogg container. Refer to the theoradec documentation to decode the create
* stream.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -20,6 +20,7 @@
/**
* SECTION:element-theoraparse
* @title: theoraparse
* @see_also: theoradec, oggdemux, vorbisparse
*
* The theoraparse element will parse the header packets of the Theora
@ -40,18 +41,19 @@
* offsetting all buffers that it outputs by a specified amount, and updating
* that offset from the value array whenever a keyframe is processed.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v filesrc location=video.ogg ! oggdemux ! theoraparse ! fakesink
* ]| This pipeline shows that the streamheader is set in the caps, and that each
* ]|
* This pipeline shows that the streamheader is set in the caps, and that each
* buffer has the timestamp, duration, offset, and offset_end set.
* |[
* gst-launch-1.0 filesrc location=video.ogg ! oggdemux ! theoraparse \
* ! oggmux ! filesink location=video-remuxed.ogg
* ]| This pipeline shows remuxing. video-remuxed.ogg might not be exactly the same
* ]|
* This pipeline shows remuxing. video-remuxed.ogg might not be exactly the same
* as video.ogg, but they should produce exactly the same decoded data.
* </refsect2>
*
*/
/* FIXME 0.11: suppress warnings for deprecated API such as GValueArray

View file

@ -19,6 +19,7 @@
/**
* SECTION:element-vorbisdec
* @title: vorbisdec
* @see_also: vorbisenc, oggdemux
*
* This element decodes a Vorbis stream to raw float audio.
@ -27,13 +28,12 @@
* Foundation</ulink>. As it outputs raw float audio you will often need to
* put an audioconvert element after it.
*
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v filesrc location=sine.ogg ! oggdemux ! vorbisdec ! audioconvert ! audioresample ! autoaudiosink
* ]| Decode an Ogg/Vorbis. To create an Ogg/Vorbis file refer to the documentation of vorbisenc.
* </refsect2>
* ]|
* Decode an Ogg/Vorbis. To create an Ogg/Vorbis file refer to the documentation of vorbisenc.
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -19,6 +19,7 @@
/**
* SECTION:element-vorbisenc
* @title: vorbisenc
* @see_also: vorbisdec, oggmux
*
* This element encodes raw float audio into a Vorbis stream.
@ -26,16 +27,17 @@
* audio codec maintained by the <ulink url="http://www.xiph.org/">Xiph.org
* Foundation</ulink>.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v audiotestsrc wave=sine num-buffers=100 ! audioconvert ! vorbisenc ! oggmux ! filesink location=sine.ogg
* ]| Encode a test sine signal to Ogg/Vorbis. Note that the resulting file
* ]|
* Encode a test sine signal to Ogg/Vorbis. Note that the resulting file
* will be really small because a sine signal compresses very well.
* |[
* gst-launch-1.0 -v autoaudiosrc ! audioconvert ! vorbisenc ! oggmux ! filesink location=alsasrc.ogg
* ]| Record from a sound card and encode to Ogg/Vorbis.
* </refsect2>
* ]|
* Record from a sound card and encode to Ogg/Vorbis.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"

View file

@ -20,6 +20,7 @@
/**
* SECTION:element-vorbisparse
* @title: vorbisparse
* @see_also: vorbisdec, oggdemux, theoraparse
*
* The vorbisparse element will parse the header packets of the Vorbis
@ -33,18 +34,19 @@
* vorbisparse outputs have all of the metadata that oggmux expects to receive,
* which allows you to (for example) remux an ogg/vorbis file.
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v filesrc location=sine.ogg ! oggdemux ! vorbisparse ! fakesink
* ]| This pipeline shows that the streamheader is set in the caps, and that each
* ]|
* This pipeline shows that the streamheader is set in the caps, and that each
* buffer has the timestamp, duration, offset, and offset_end set.
* |[
* gst-launch-1.0 filesrc location=sine.ogg ! oggdemux ! vorbisparse \
* ! oggmux ! filesink location=sine-remuxed.ogg
* ]| This pipeline shows remuxing. sine-remuxed.ogg might not be exactly the same
* ]|
* This pipeline shows remuxing. sine-remuxed.ogg might not be exactly the same
* as sine.ogg, but they should produce exactly the same decoded data.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -19,6 +19,7 @@
/**
* SECTION:element-vorbistag
* @title: vorbistag
* @see_also: #oggdemux, #oggmux, #vorbisparse, #GstTagSetter
*
* The vorbistags element can change the tag contained within a raw
@ -34,14 +35,14 @@
* automatically (and merged according to the merge mode set via the tag
* setter interface).
*
* <refsect2>
* <title>Example pipelines</title>
* ## Example pipelines
* |[
* gst-launch-1.0 -v filesrc location=foo.ogg ! oggdemux ! vorbistag ! oggmux ! filesink location=bar.ogg
* ]| This element is not useful with gst-launch-1.0, because it does not support
* ]|
* This element is not useful with gst-launch-1.0, because it does not support
* setting the tags on a #GstTagSetter interface. Conceptually, the element
* will usually be used in this order though.
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -27,6 +27,7 @@
/**
* SECTION:gstdmabuf
* @title: GstDmaBufAllocator
* @short_description: Memory wrapper for Linux dmabuf memory
* @see_also: #GstMemory
*

View file

@ -20,6 +20,7 @@
/**
* SECTION:gstfdmemory
* @title: GstFdAllocator
* @short_description: Memory wrapper for fd backed memory
* @see_also: #GstMemory
*

View file

@ -19,6 +19,7 @@
*/
/**
* SECTION:gstappsink
* @title: GstAppSink
* @short_description: Easy way for applications to extract samples from a
* pipeline
* @see_also: #GstSample, #GstBaseSink, appsrc

View file

@ -19,6 +19,7 @@
*/
/**
* SECTION:gstappsrc
* @title: GstAppSrc
* @short_description: Easy way for applications to inject buffers into a
* pipeline
* @see_also: #GstBaseSrc, appsink

View file

@ -18,6 +18,7 @@
*/
/**
* SECTION:gstaudiochannels
* @title: Audio-channels
* @short_description: Support library for audio channel handling
*
* This library contains some helper functions for multichannel audio.

View file

@ -32,24 +32,18 @@
/**
* SECTION:audioconverter
* @title: GstAudioConverter
* @short_description: Generic audio conversion
*
* <refsect2>
* <para>
* This object is used to convert audio samples from one format to another.
* The object can perform conversion of:
* <itemizedlist>
* <listitem><para>
* audio format with optional dithering and noise shaping
* </para></listitem>
* <listitem><para>
* audio samplerate
* </para></listitem>
* <listitem><para>
* audio channels and channel layout
* </para></listitem>
* </para>
* </refsect2>
*
* * audio format with optional dithering and noise shaping
*
* * audio samplerate
*
* * audio channels and channel layout
*
*/
#ifndef GST_DISABLE_GST_DEBUG
@ -1336,7 +1330,7 @@ gst_audio_converter_samples (GstAudioConverter * convert,
}
/**
* gst_audio_converter_supports_inplace
* gst_audio_converter_supports_inplace:
* @convert: a #GstAudioConverter
*
* Returns whether the audio converter can perform the conversion in-place.

View file

@ -42,6 +42,7 @@ GST_DEBUG_CATEGORY_STATIC (audio_resampler_debug);
/**
* SECTION:gstaudioresampler
* @title: GstAudioResampler
* @short_description: Utility structure for resampler information
*
* #GstAudioResampler is a structure which holds the information

View file

@ -28,20 +28,20 @@ G_BEGIN_DECLS
typedef struct _GstAudioResampler GstAudioResampler;
/**
* GST_AUDIO_RESAMPLER_OPT_CUTOFF
* GST_AUDIO_RESAMPLER_OPT_CUTOFF:
*
* G_TYPE_DOUBLE, Cutoff parameter for the filter. 0.940 is the default.
*/
#define GST_AUDIO_RESAMPLER_OPT_CUTOFF "GstAudioResampler.cutoff"
/**
* GST_AUDIO_RESAMPLER_OPT_STOP_ATTENUTATION
* GST_AUDIO_RESAMPLER_OPT_STOP_ATTENUTATION:
*
* G_TYPE_DOUBLE, stopband attenuation in debibels. The attenutation
* after the stopband for the kaiser window. 85 dB is the default.
*/
#define GST_AUDIO_RESAMPLER_OPT_STOP_ATTENUATION "GstAudioResampler.stop-attenutation"
/**
* GST_AUDIO_RESAMPLER_OPT_TRANSITION_BANDWIDTH
* GST_AUDIO_RESAMPLER_OPT_TRANSITION_BANDWIDTH:
*
* G_TYPE_DOUBLE, transition bandwidth. The width of the
* transition band for the kaiser window. 0.087 is the default.
@ -137,7 +137,7 @@ typedef enum {
*/
#define GST_AUDIO_RESAMPLER_OPT_FILTER_INTERPOLATION "GstAudioResampler.filter-interpolation"
/**
* GST_AUDIO_RESAMPLER_OPT_FILTER_OVERSAMPLE
* GST_AUDIO_RESAMPLER_OPT_FILTER_OVERSAMPLE:
*
* G_TYPE_UINT, oversampling to use when interpolating filters
* 8 is the default.

View file

@ -18,6 +18,7 @@
*/
/**
* SECTION:gstaudio
* @title: GstAudio
* @short_description: Support library for audio elements
*
* This library contains some helper functions for audio elements.
@ -60,7 +61,7 @@ ensure_debug_category (void)
* @segment: Segment in %GST_FORMAT_TIME or %GST_FORMAT_DEFAULT to which
* the buffer should be clipped.
* @rate: sample rate.
* @bpf: size of one audio frame in bytes. This is the size of one sample *
* @bpf: size of one audio frame in bytes. This is the size of one sample *
* number of channels.
*
* Clip the buffer to the given %GstSegment.

View file

@ -22,6 +22,7 @@
/**
* SECTION:gstaudiobasesink
* @title: GstAudioBaseSink
* @short_description: Base class for audio sinks
* @see_also: #GstAudioSink, #GstAudioRingBuffer.
*

View file

@ -22,6 +22,7 @@
/**
* SECTION:gstaudiobasesrc
* @title: GstAudioBaseSrc
* @short_description: Base class for audio sources
* @see_also: #GstAudioSrc, #GstAudioRingBuffer.
*

View file

@ -36,62 +36,53 @@
/**
* SECTION:gstaudiocdsrc
* @title: GstAudioCdSrc
* @short_description: Base class for Audio CD sources
*
* <para>
* Provides a base class for CD digital audio (CDDA) sources, which handles
* things like seeking, querying, discid calculation, tags, and buffer
* timestamping.
* </para>
* <refsect2>
* <title>Using GstAudioCdSrc-based elements in applications</title>
* <para>
*
* ## Using GstAudioCdSrc-based elements in applications
*
* GstAudioCdSrc registers two #GstFormat<!-- -->s of its own, namely
* the "track" format and the "sector" format. Applications will usually
* only find the "track" format interesting. You can retrieve that #GstFormat
* for use in seek events or queries with gst_format_get_by_nick("track").
* </para>
* <para>
*
* In order to query the number of tracks, for example, an application would
* set the CDDA source element to READY or PAUSED state and then query the
* the number of tracks via gst_element_query_duration() using the track
* format acquired above. Applications can query the currently playing track
* in the same way.
* </para>
* <para>
*
* Alternatively, applications may retrieve the currently playing track and
* the total number of tracks from the taglist that will posted on the bus
* whenever the CD is opened or the currently playing track changes. The
* taglist will contain GST_TAG_TRACK_NUMBER and GST_TAG_TRACK_COUNT tags.
* </para>
* <para>
*
* Applications playing back CD audio using playbin and cdda://n URIs should
* issue a seek command in track format to change between tracks, rather than
* setting a new cdda://n+1 URI on playbin (as setting a new URI on playbin
* involves closing and re-opening the CD device, which is much much slower).
* </para>
* <refsect2>
* </refsect2>
* <title>Tags and meta-information</title>
* <para>
*
* ## Tags and meta-information
*
* CDDA sources will automatically emit a number of tags, details about which
* can be found in the libgsttag documentation. Those tags are:
* #GST_TAG_CDDA_CDDB_DISCID, #GST_TAG_CDDA_CDDB_DISCID_FULL,
* #GST_TAG_CDDA_MUSICBRAINZ_DISCID, #GST_TAG_CDDA_MUSICBRAINZ_DISCID_FULL,
* among others.
* </para>
* </refsect2>
* <refsect2>
* <title>Tracks and Table of Contents (TOC)</title>
* <para>
*
* ## Tracks and Table of Contents (TOC)
*
* Applications will be informed of the available tracks via a TOC message
* on the pipeline's #GstBus. The #GstToc will contain a #GstTocEntry for
* each track, with information about each track. The duration for each
* track can be retrieved via the #GST_TAG_DURATION tag from each entry's
* tag list, or calculated via gst_toc_entry_get_start_stop_times().
* The track entries in the TOC will be sorted by track number.
* </para>
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -22,6 +22,7 @@
/**
* SECTION:gstaudioclock
* @title: GstAudioClock
* @short_description: Helper object for implementing audio clocks
* @see_also: #GstAudioBaseSink, #GstSystemClock
*

View file

@ -23,6 +23,7 @@
/**
* SECTION:gstaudiodecoder
* @title: GstAudioDecoder
* @short_description: Base class for audio decoders
* @see_also: #GstBaseTransform
*
@ -30,72 +31,48 @@
* raw audio samples.
*
* GstAudioDecoder and subclass should cooperate as follows.
* <orderedlist>
* <listitem>
* <itemizedlist><title>Configuration</title>
* <listitem><para>
* Initially, GstAudioDecoder calls @start when the decoder element
*
* ## Configuration
*
* * Initially, GstAudioDecoder calls @start when the decoder element
* is activated, which allows subclass to perform any global setup.
* Base class (context) parameters can already be set according to subclass
* capabilities (or possibly upon receive more information in subsequent
* @set_format).
* </para></listitem>
* <listitem><para>
* GstAudioDecoder calls @set_format to inform subclass of the format
* * GstAudioDecoder calls @set_format to inform subclass of the format
* of input audio data that it is about to receive.
* While unlikely, it might be called more than once, if changing input
* parameters require reconfiguration.
* </para></listitem>
* <listitem><para>
* GstAudioDecoder calls @stop at end of all processing.
* </para></listitem>
* </itemizedlist>
* </listitem>
* * GstAudioDecoder calls @stop at end of all processing.
*
* As of configuration stage, and throughout processing, GstAudioDecoder
* provides various (context) parameters, e.g. describing the format of
* output audio data (valid when output caps have been set) or current parsing state.
* Conversely, subclass can and should configure context to inform
* base class of its expectation w.r.t. buffer handling.
* <listitem>
* <itemizedlist>
* <title>Data processing</title>
* <listitem><para>
* Base class gathers input data, and optionally allows subclass
*
* ## Data processing
* * Base class gathers input data, and optionally allows subclass
* to parse this into subsequently manageable (as defined by subclass)
* chunks. Such chunks are subsequently referred to as 'frames',
* though they may or may not correspond to 1 (or more) audio format frame.
* </para></listitem>
* <listitem><para>
* Input frame is provided to subclass' @handle_frame.
* </para></listitem>
* <listitem><para>
* If codec processing results in decoded data, subclass should call
* * Input frame is provided to subclass' @handle_frame.
* * If codec processing results in decoded data, subclass should call
* @gst_audio_decoder_finish_frame to have decoded data pushed
* downstream.
* </para></listitem>
* <listitem><para>
* Just prior to actually pushing a buffer downstream,
* * Just prior to actually pushing a buffer downstream,
* it is passed to @pre_push. Subclass should either use this callback
* to arrange for additional downstream pushing or otherwise ensure such
* custom pushing occurs after at least a method call has finished since
* setting src pad caps.
* </para></listitem>
* <listitem><para>
* During the parsing process GstAudioDecoderClass will handle both
* * During the parsing process GstAudioDecoderClass will handle both
* srcpad and sinkpad events. Sink events will be passed to subclass
* if @event callback has been provided.
* </para></listitem>
* </itemizedlist>
* </listitem>
* <listitem>
* <itemizedlist><title>Shutdown phase</title>
* <listitem><para>
* GstAudioDecoder class calls @stop to inform the subclass that data
*
* ## Shutdown phase
*
* * GstAudioDecoder class calls @stop to inform the subclass that data
* parsing will be stopped.
* </para></listitem>
* </itemizedlist>
* </listitem>
* </orderedlist>
*
* Subclass is responsible for providing pad template caps for
* source and sink pads. The pads need to be named "sink" and "src". It also
@ -125,23 +102,18 @@
* bitrates.
*
* Things that subclass need to take care of:
* <itemizedlist>
* <listitem><para>Provide pad templates</para></listitem>
* <listitem><para>
* Set source pad caps when appropriate
* </para></listitem>
* <listitem><para>
* Set user-configurable properties to sane defaults for format and
*
* * Provide pad templates
* * Set source pad caps when appropriate
* * Set user-configurable properties to sane defaults for format and
* implementing codec at hand, and convey some subclass capabilities and
* expectations in context.
* </para></listitem>
* <listitem><para>
* Accept data in @handle_frame and provide encoded results to
*
* * Accept data in @handle_frame and provide encoded results to
* @gst_audio_decoder_finish_frame. If it is prepared to perform
* PLC, it should also accept NULL data in @handle_frame and provide for
* data for indicated duration.
* </para></listitem>
* </itemizedlist>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -21,6 +21,7 @@
/**
* SECTION:gstaudioencoder
* @title: GstAudioEncoder
* @short_description: Base class for audio encoders
* @see_also: #GstBaseTransform
*
@ -28,65 +29,46 @@
* encoded audio data.
*
* GstAudioEncoder and subclass should cooperate as follows.
* <orderedlist>
* <listitem>
* <itemizedlist><title>Configuration</title>
* <listitem><para>
* Initially, GstAudioEncoder calls @start when the encoder element
*
* ## Configuration
*
* * Initially, GstAudioEncoder calls @start when the encoder element
* is activated, which allows subclass to perform any global setup.
* </para></listitem>
* <listitem><para>
* GstAudioEncoder calls @set_format to inform subclass of the format
*
* * GstAudioEncoder calls @set_format to inform subclass of the format
* of input audio data that it is about to receive. Subclass should
* setup for encoding and configure various base class parameters
* appropriately, notably those directing desired input data handling.
* While unlikely, it might be called more than once, if changing input
* parameters require reconfiguration.
* </para></listitem>
* <listitem><para>
* GstAudioEncoder calls @stop at end of all processing.
* </para></listitem>
* </itemizedlist>
* </listitem>
*
* * GstAudioEncoder calls @stop at end of all processing.
*
* As of configuration stage, and throughout processing, GstAudioEncoder
* maintains various parameters that provide required context,
* e.g. describing the format of input audio data.
* Conversely, subclass can and should configure these context parameters
* to inform base class of its expectation w.r.t. buffer handling.
* <listitem>
* <itemizedlist>
* <title>Data processing</title>
* <listitem><para>
* Base class gathers input sample data (as directed by the context's
*
* ## Data processing
*
* * Base class gathers input sample data (as directed by the context's
* frame_samples and frame_max) and provides this to subclass' @handle_frame.
* </para></listitem>
* <listitem><para>
* If codec processing results in encoded data, subclass should call
* * If codec processing results in encoded data, subclass should call
* gst_audio_encoder_finish_frame() to have encoded data pushed
* downstream. Alternatively, it might also call
* gst_audio_encoder_finish_frame() (with a NULL buffer and some number of
* dropped samples) to indicate dropped (non-encoded) samples.
* </para></listitem>
* <listitem><para>
* Just prior to actually pushing a buffer downstream,
* * Just prior to actually pushing a buffer downstream,
* it is passed to @pre_push.
* </para></listitem>
* <listitem><para>
* During the parsing process GstAudioEncoderClass will handle both
* * During the parsing process GstAudioEncoderClass will handle both
* srcpad and sinkpad events. Sink events will be passed to subclass
* if @event callback has been provided.
* </para></listitem>
* </itemizedlist>
* </listitem>
* <listitem>
* <itemizedlist><title>Shutdown phase</title>
* <listitem><para>
* GstAudioEncoder class calls @stop to inform the subclass that data
*
* ## Shutdown phase
*
* * GstAudioEncoder class calls @stop to inform the subclass that data
* parsing will be stopped.
* </para></listitem>
* </itemizedlist>
* </listitem>
* </orderedlist>
*
* Subclass is responsible for providing pad template caps for
* source and sink pads. The pads need to be named "sink" and "src". It also
@ -125,25 +107,16 @@
* by same sample count and sample rate).
*
* Things that subclass need to take care of:
* <itemizedlist>
* <listitem><para>Provide pad templates</para></listitem>
* <listitem><para>
* Set source pad caps when appropriate
* </para></listitem>
* <listitem><para>
* Inform base class of buffer processing needs using context's
*
* * Provide pad templates
* * Set source pad caps when appropriate
* * Inform base class of buffer processing needs using context's
* frame_samples and frame_bytes.
* </para></listitem>
* <listitem><para>
* Set user-configurable properties to sane defaults for format and
* * Set user-configurable properties to sane defaults for format and
* implementing codec at hand, e.g. those controlling timestamp behaviour
* and discontinuity processing.
* </para></listitem>
* <listitem><para>
* Accept data in @handle_frame and provide encoded results to
* * Accept data in @handle_frame and provide encoded results to
* gst_audio_encoder_finish_frame().
* </para></listitem>
* </itemizedlist>
*
*/

View file

@ -21,6 +21,7 @@
/**
* SECTION:gstaudiofilter
* @title: GstAudioFilter
* @short_description: Base class for simple audio filters
*
* #GstAudioFilter is a #GstBaseTransform<!-- -->-derived base class for simple audio

View file

@ -21,6 +21,7 @@
/**
* SECTION:gstaudioiec61937
* @title: GstAudio IEC61937
* @short_description: Utility functions for IEC 61937 payloading
*
* This module contains some helper functions for encapsulating various

View file

@ -19,6 +19,7 @@
/**
* SECTION:gstaudiometa
* @title: GstAudioDownmixMeta
* @short_description: Buffer metadata for audio downmix matrix handling
*
* #GstAudioDownmixMeta defines an audio downmix matrix to be send along with

View file

@ -19,22 +19,19 @@
/**
* SECTION:gstaudioringbuffer
* @title: GstAudioRingBuffer
* @short_description: Base class for audio ringbuffer implementations
* @see_also: #GstAudioBaseSink, #GstAudioSink
*
* <refsect2>
* <para>
* This object is the base class for audio ringbuffers used by the base
* audio source and sink classes.
* </para>
* <para>
*
* The ringbuffer abstracts a circular buffer of data. One reader and
* one writer can operate on the data from different threads in a lockfree
* manner. The base class is sufficiently flexible to be used as an
* abstraction for DMA based ringbuffers as well as a pure software
* implementations.
* </para>
* </refsect2>
*
*/
#include <string.h>

View file

@ -22,43 +22,27 @@
/**
* SECTION:gstaudiosink
* @title: GstAudioSink
* @short_description: Simple base class for audio sinks
* @see_also: #GstAudioBaseSink, #GstAudioRingBuffer, #GstAudioSink.
*
* This is the most simple base class for audio sinks that only requires
* subclasses to implement a set of simple functions:
*
* <variablelist>
* <varlistentry>
* <term>open()</term>
* <listitem><para>Open the device.</para></listitem>
* </varlistentry>
* <varlistentry>
* <term>prepare()</term>
* <listitem><para>Configure the device with the specified format.</para></listitem>
* </varlistentry>
* <varlistentry>
* <term>write()</term>
* <listitem><para>Write samples to the device.</para></listitem>
* </varlistentry>
* <varlistentry>
* <term>reset()</term>
* <listitem><para>Unblock writes and flush the device.</para></listitem>
* </varlistentry>
* <varlistentry>
* <term>delay()</term>
* <listitem><para>Get the number of samples written but not yet played
* by the device.</para></listitem>
* </varlistentry>
* <varlistentry>
* <term>unprepare()</term>
* <listitem><para>Undo operations done by prepare.</para></listitem>
* </varlistentry>
* <varlistentry>
* <term>close()</term>
* <listitem><para>Close the device.</para></listitem>
* </varlistentry>
* </variablelist>
* * `open()` :Open the device.
*
* * `prepare()` :Configure the device with the specified format.
*
* * `write()` :Write samples to the device.
*
* * `reset()` :Unblock writes and flush the device.
*
* * `delay()` :Get the number of samples written but not yet played
* by the device.
*
* * `unprepare()` :Undo operations done by prepare.
*
* * `close()` :Close the device.
*
* All scheduling of samples and timestamps is done in this base class
* together with #GstAudioBaseSink using a default implementation of a

View file

@ -22,43 +22,20 @@
/**
* SECTION:gstaudiosrc
* @title: GstAudioSrc
* @short_description: Simple base class for audio sources
* @see_also: #GstAudioBaseSrc, #GstAudioRingBuffer, #GstAudioSrc.
*
* This is the most simple base class for audio sources that only requires
* subclasses to implement a set of simple functions:
*
* <variablelist>
* <varlistentry>
* <term>open()</term>
* <listitem><para>Open the device.</para></listitem>
* </varlistentry>
* <varlistentry>
* <term>prepare()</term>
* <listitem><para>Configure the device with the specified format.</para></listitem>
* </varlistentry>
* <varlistentry>
* <term>read()</term>
* <listitem><para>Read samples from the device.</para></listitem>
* </varlistentry>
* <varlistentry>
* <term>reset()</term>
* <listitem><para>Unblock reads and flush the device.</para></listitem>
* </varlistentry>
* <varlistentry>
* <term>delay()</term>
* <listitem><para>Get the number of samples in the device but not yet read.
* </para></listitem>
* </varlistentry>
* <varlistentry>
* <term>unprepare()</term>
* <listitem><para>Undo operations done by prepare.</para></listitem>
* </varlistentry>
* <varlistentry>
* <term>close()</term>
* <listitem><para>Close the device.</para></listitem>
* </varlistentry>
* </variablelist>
* * `open()` :Open the device.
* * `prepare()` :Configure the device with the specified format.
* * `read()` :Read samples from the device.
* * `reset()` :Unblock reads and flush the device.
* * `delay()` :Get the number of samples in the device but not yet read.
* * `unprepare()` :Undo operations done by prepare.
* * `close()` :Close the device.
*
* All scheduling of samples and timestamps is done in this base class
* together with #GstAudioBaseSrc using a default implementation of a

View file

@ -19,14 +19,12 @@
/**
* SECTION:gststreamvolume
* @title: GstStreamVolume
* @short_description: Interface for elements that provide a stream volume
*
* <refsect2>
* <para>
* This interface is implemented by elements that provide a stream volume. Examples for
* such elements are #volume and #playbin.
* </para>
* <para>
*
* Applications can use this interface to get or set the current stream volume. For this
* the "volume" #GObject property can be used or the helper functions gst_stream_volume_set_volume()
* and gst_stream_volume_get_volume(). This volume is always a linear factor, i.e. 0.0 is muted
@ -36,13 +34,11 @@
*
* Separate from the volume the stream can also be muted by the "mute" #GObject property or
* gst_stream_volume_set_mute() and gst_stream_volume_get_mute().
* </para>
* <para>
*
* Elements that provide some kind of stream volume should implement the "volume" and
* "mute" #GObject properties and handle setting and getting of them properly.
* The volume property is defined to be a linear volume factor.
* </para>
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -19,8 +19,9 @@
/**
* SECTION:gstfft
* @title: GstFFT
* @short_description: General FFT functions and declarations
*
*
* This library includes general definitions and functions, useful for
* all typed FFT classes.
*/

View file

@ -31,6 +31,7 @@
/**
* SECTION:gstfftf32
* @title: GstFFTF32
* @short_description: FFT functions for 32 bit float samples
*
* #GstFFTF32 provides a FFT implementation and related functions for

View file

@ -31,6 +31,7 @@
/**
* SECTION:gstfftf64
* @title: GstFFTF64
* @short_description: FFT functions for 64 bit float samples
*
* #GstFFTF64 provides a FFT implementation and related functions for

View file

@ -31,6 +31,7 @@
/**
* SECTION:gstffts16
* @title: GstFFTS16
* @short_description: FFT functions for signed 16 bit integer samples
*
* #GstFFTS16 provides a FFT implementation and related functions for

View file

@ -31,6 +31,7 @@
/**
* SECTION:gstffts32
* @title: GstFFTS32
* @short_description: FFT functions for signed 32 bit integer samples
*
* #GstFFTS32 provides a FFT implementation and related functions for

View file

@ -24,14 +24,12 @@
/**
* SECTION:gstpbutilscodecutils
* @title: Codec utilities
* @short_description: Miscellaneous codec-specific utility functions
*
* <refsect2>
* <para>
* Provides codec-specific ulility functions such as functions to provide the
* codec profile and level in human-readable string form from header data.
* </para>
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
@ -173,9 +171,7 @@ gst_codec_utils_aac_get_channels (const guint8 * audio_config, guint len)
* determined using the AudioObjectType field which is in the first 5 bits of
* @audio_config.
*
* <note>
* HE-AAC support has not yet been implemented.
* </note>
* > HE-AAC support has not yet been implemented.
*
* Returns: The profile as a const string and %NULL if the profile could not be
* determined.
@ -221,23 +217,13 @@ gst_codec_utils_aac_get_profile (const guint8 * audio_config, guint len)
* The @audio_config parameter follows the following format, starting from the
* most significant bit of the first byte:
*
* <itemizedlist>
* <listitem><para>
* Bit 0:4 contains the AudioObjectType
* </para></listitem>
* <listitem><para>
* Bit 5:8 contains the sample frequency index (if this is 0xf, then the
* next 24 bits define the actual sample frequency, and subsequent
* fields are appropriately shifted).
* </para></listitem>
* <listitem><para>
* Bit 9:12 contains the channel configuration
* </para></listitem>
* </itemizedlist>
* * Bit 0:4 contains the AudioObjectType
* * Bit 5:8 contains the sample frequency index (if this is 0xf, then the
* next 24 bits define the actual sample frequency, and subsequent
* fields are appropriately shifted).
* * Bit 9:12 contains the channel configuration
*
* <note>
* HE-AAC support has not yet been implemented.
* </note>
* > HE-AAC support has not yet been implemented.
*
* Returns: The level as a const string and %NULL if the level could not be
* determined.
@ -477,16 +463,14 @@ gst_codec_utils_aac_caps_set_level_and_profile (GstCaps * caps,
* as a bitstream here, with bit 0 being the most significant bit of the first
* byte.
*
* <itemizedlist>
* <listitem><para>Bit 0:7 - Profile indication</para></listitem>
* <listitem><para>Bit 8 - constraint_set0_flag</para></listitem>
* <listitem><para>Bit 9 - constraint_set1_flag</para></listitem>
* <listitem><para>Bit 10 - constraint_set2_flag</para></listitem>
* <listitem><para>Bit 11 - constraint_set3_flag</para></listitem>
* <listitem><para>Bit 12 - constraint_set3_flag</para></listitem>
* <listitem><para>Bit 13:15 - Reserved</para></listitem>
* <listitem><para>Bit 16:24 - Level indication</para></listitem>
* </itemizedlist>
* * Bit 0:7 - Profile indication
* * Bit 8 - constraint_set0_flag
* * Bit 9 - constraint_set1_flag
* * Bit 10 - constraint_set2_flag
* * Bit 11 - constraint_set3_flag
* * Bit 12 - constraint_set3_flag
* * Bit 13:15 - Reserved
* * Bit 16:24 - Level indication
*
* Returns: The profile as a const string, or %NULL if there is an error.
*/
@ -735,18 +719,16 @@ gst_codec_utils_h264_caps_set_level_and_profile (GstCaps * caps,
* specification. The profile_tier_level is viewed as a bitstream here,
* with bit 0 being the most significant bit of the first byte.
*
* <itemizedlist>
* <listitem><para>Bit 0:1 - general_profile_space</para></listitem>
* <listitem><para>Bit 2 - general_tier_flag</para></listitem>
* <listitem><para>Bit 3:7 - general_profile_idc</para></listitem>
* <listitem><para>Bit 8:39 - gernal_profile_compatibility_flags</para></listitem>
* <listitem><para>Bit 40 - general_progressive_source_flag</para></listitem>
* <listitem><para>Bit 41 - general_interlaced_source_flag</para></listitem>
* <listitem><para>Bit 42 - general_non_packed_constraint_flag</para></listitem>
* <listitem><para>Bit 43 - general_frame_only_constraint_flag</para></listitem>
* <listitem><para>Bit 44:87 - general_reserved_zero_44bits</para></listitem>
* <listitem><para>Bit 88:95 - general_level_idc</para></listitem>
* </itemizedlist>
* * Bit 0:1 - general_profile_space
* * Bit 2 - general_tier_flag
* * Bit 3:7 - general_profile_idc
* * Bit 8:39 - gernal_profile_compatibility_flags
* * Bit 40 - general_progressive_source_flag
* * Bit 41 - general_interlaced_source_flag
* * Bit 42 - general_non_packed_constraint_flag
* * Bit 43 - general_frame_only_constraint_flag
* * Bit 44:87 - general_reserved_zero_44bits
* * Bit 88:95 - general_level_idc
*
* Returns: The profile as a const string, or %NULL if there is an error.
*

View file

@ -19,21 +19,18 @@
/**
* SECTION:gstpbutilsdescriptions
* @title: Descriptions
* @short_description: Provides human-readable descriptions for caps/codecs
* and encoder, decoder, URI source and URI sink elements
*
* <refsect2>
* <para>
* The above functions provide human-readable strings for media formats
* and decoder/demuxer/depayloader/encoder/muxer/payloader elements for use
* in error dialogs or other messages shown to users.
* </para>
* <para>
*
* gst_pb_utils_add_codec_description_to_tag_list() is a utility function
* for demuxer and decoder elements to add audio/video codec tags from a
* given (fixed) #GstCaps.
* </para>
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -20,6 +20,7 @@
/**
* SECTION:encoding-profile
* @title: GstEncodingProfile
* @short_description: Encoding profile library
*
* Functions to create and handle encoding profiles.
@ -189,7 +190,6 @@
* return (GstEncodingProfile*) prof;
*}
*
*
* ]|
*
* # Example: Using an encoder preset with a profile
@ -232,7 +232,6 @@
* return (GstEncodingProfile*) prof;
*}
*
*
* ]|
*
* # Example: Listing categories, targets and profiles

View file

@ -21,6 +21,7 @@
*/
/**
* SECTION:gstaudiovisualizer
* @title: GstAudioVisualizer
*
* A baseclass for scopes (visualizers). It takes care of re-fitting the
* audio-rate to video-rate and handles renegotiation (downstream video size

View file

@ -20,6 +20,7 @@
/**
* SECTION:gstdiscoverer
* @title: GstDiscoverer
* @short_description: Utility for discovering information on URIs.
*
* The #GstDiscoverer is a utility object which allows to get as much

View file

@ -19,6 +19,7 @@
/**
* SECTION:gstpluginsbaseversion
* @title: Version
* @short_description: GStreamer gst-plugins-base libraries version macros.
*
* Use the GST_PLUGINS_BASE_VERSION_* macros e.g. to check what version of

View file

@ -20,334 +20,239 @@
/**
* SECTION:gstpbutilsinstallplugins
* @title: Install-plugins
* @short_description: Missing plugin installation support for applications
*
* <refsect2>
* <title>Overview</title>
* <para>
* ## Overview
*
* Using this API, applications can request the installation of missing
* GStreamer plugins. These may be missing decoders/demuxers or encoders/muxers
* for a certain format, sources or sinks for a certain URI protocol
* (e.g. 'http'), or certain elements known by their element factory name
* ('audioresample').
* </para>
* <para>
* GStreamer plugins. These may be missing decoders/demuxers or
* encoders/muxers for a certain format, sources or sinks for a certain URI
* protocol (e.g. 'http'), or certain elements known by their element
* factory name ('audioresample').
*
* Whether plugin installation is supported or not depends on the operating
* system and/or distribution in question. The vendor of the operating system
* needs to make sure the necessary hooks and mechanisms are in place for
* plugin installation to work. See below for more detailed information.
* </para>
* <para>
* From the application perspective, plugin installation is usually triggered
* either
* <itemizedlist>
* <listitem><para>
* when the application itself has found that it wants or needs to install a
* certain element
* </para></listitem>
* <listitem><para>
* when the application has been notified by an element (such as playbin or
* decodebin) that one or more plugins are missing <emphasis>and</emphasis>
* the application has decided that it wants to install one or more of those
* missing plugins
* </para></listitem>
* </itemizedlist>
* </para>
* <title>Detail Strings</title>
* <para>
* The install functions in this section all take one or more 'detail strings'.
* These detail strings contain information about the type of plugin that
* needs to be installed (decoder, encoder, source, sink, or named element),
* and some additional information such GStreamer version used and a
* human-readable description of the component to install for user dialogs.
* </para>
* <para>
* system and/or distribution in question. The vendor of the operating
* system needs to make sure the necessary hooks and mechanisms are in
* place for plugin installation to work. See below for more detailed
* information.
*
* From the application perspective, plugin installation is usually
* triggered either
*
* - when the application itself has found that it wants or needs to
* install a certain element
* - when the application has been notified by an element (such as
* playbin or decodebin) that one or more plugins are missing *and* the
* application has decided that it wants to install one or more of
* those missing plugins
*
* The install functions in this section all take one or more 'detail
* strings'. These detail strings contain information about the type of
* plugin that needs to be installed (decoder, encoder, source, sink, or
* named element), and some additional information such GStreamer version
* used and a human-readable description of the component to install for
* user dialogs.
*
* Applications should not concern themselves with the composition of the
* string itself. They should regard the string as if it was a shared secret
* between GStreamer and the plugin installer application.
* </para>
* <para>
* string itself. They should regard the string as if it was a shared
* secret between GStreamer and the plugin installer application.
*
* Detail strings can be obtained using the function
* gst_missing_plugin_message_get_installer_detail() on a missing-plugin
* message. Such a message will either have been found by the application on
* a pipeline's #GstBus, or the application will have created it itself using
* gst_missing_element_message_new(), gst_missing_decoder_message_new(),
* gst_missing_encoder_message_new(), gst_missing_uri_sink_message_new(), or
* gst_missing_plugin_message_get_installer_detail() on a
* missing-plugin message. Such a message will either have been found by
* the application on a pipeline's #GstBus, or the application will have
* created it itself using gst_missing_element_message_new(),
* gst_missing_decoder_message_new(),
* gst_missing_encoder_message_new(),
* gst_missing_uri_sink_message_new(), or
* gst_missing_uri_source_message_new().
* </para>
* <title>Plugin Installation from the Application Perspective</title>
* <para>
* For each GStreamer element/plugin/component that should be installed, the
* application needs one of those 'installer detail' string mentioned in the
* previous section. This string can be obtained, as already mentioned above,
* from a missing-plugin message using the function
* gst_missing_plugin_message_get_installer_detail(). The missing-plugin
* message is either posted by another element and then found on the bus
* by the application, or the application has created it itself as described
* above.
* </para>
* <para>
*
* For each GStreamer element/plugin/component that should be installed,
* the application needs one of those 'installer detail' string mentioned
* in the previous section. This string can be obtained, as already
* mentioned above, from a missing-plugin message using the function
* gst_missing_plugin_message_get_installer_detail(). The
* missing-plugin message is either posted by another element and then
* found on the bus by the application, or the application has created it
* itself as described above.
*
* The application will then call gst_install_plugins_async(), passing a
* NULL-terminated array of installer detail strings, and a function that
* should be called when the installation of the plugins has finished
* (successfully or not). Optionally, a #GstInstallPluginsContext created
* with gst_install_plugins_context_new() may be passed as well. This way
* additional optional arguments like the application window's XID can be
* passed to the external installer application.
* </para>
* <para>
* with gst_install_plugins_context_new() may be passed as well. This
* way additional optional arguments like the application window's XID can
* be passed to the external installer application.
*
* gst_install_plugins_async() will return almost immediately, with the
* return code indicating whether plugin installation was started or not.
* If the necessary hooks for plugin installation are in place and an
* external installer application has in fact been called, the passed in
* function will be called with a result code as soon as the external installer
* has finished. If the result code indicates that new plugins have been
* installed, the application will want to call gst_update_registry() so the
* run-time plugin registry is updated and the new plugins are made available
* to the application.
* <note>
* A Gtk/GLib main loop must be running in order for the result function to
* be called when the external installer has finished. If this is not the case,
* make sure to regularly call
* <programlisting>
* g_main_context_iteration (NULL,FALSE);
* </programlisting>
* from your code.
* </note>
* </para>
* <title>Plugin Installation from the Vendor/Distribution Perspective</title>
* <para>
* <emphasis>1. Installer hook</emphasis>
* </para>
* <para>
* function will be called with a result code as soon as the external
* installer has finished. If the result code indicates that new plugins
* have been installed, the application will want to call
* gst_update_registry() so the run-time plugin registry is updated and
* the new plugins are made available to the application.
*
* > A Gtk/GLib main loop must be running in order for the result function
* > to be called when the external installer has finished. If this is not
* > the case, make sure to regularly call in your code:
* >
* > g_main_context_iteration (NULL,FALSE);
*
* ## 1. Installer hook
*
* When GStreamer applications initiate plugin installation via
* gst_install_plugins_async() or gst_install_plugins_sync(), a pre-defined
* helper application will be called.
* </para>
* <para>
* gst_install_plugins_async() or gst_install_plugins_sync(), a
* pre-defined helper application will be called.
*
* The exact path of the helper application to be called is set at compile
* time, usually by the <literal>./configure</literal> script based on the
* install prefix. For a normal package build into the <literal>/usr</literal>
* prefix, this will usually default to
* <filename>/usr/libexec/gst-install-plugins-helper</filename> or
* <filename>/usr/lib/gst-install-plugins-helper</filename>.
* </para>
* <para>
* time, usually by the `./configure` script based on the install prefix.
* For a normal package build into the `/usr` prefix, this will usually
* default to `/usr/libexec/gst-install-plugins-helper` or
* `/usr/lib/gst-install-plugins-helper`.
*
* Vendors/distros who want to support GStreamer plugin installation should
* either provide such a helper script/application or use the
* <literal>./configure</literal> option
* <literal>--with-install-plugins-helper=/path/to/installer</literal> to
* make GStreamer call an installer of their own directly.
* </para>
* <para>
* It is strongly recommended that vendors provide a small helper application
* as interlocutor to the real installer though, even more so if command line
* argument munging is required to transform the command line arguments
* passed by GStreamer to the helper application into arguments that are
* understood by the real installer.
* </para>
* <para>
* either provide such a helper script/application or use the `./configure`
* option `--with-install-plugins-helper=/path/to/installer` to make
* GStreamer call an installer of their own directly.
*
* It is strongly recommended that vendors provide a small helper
* application as interlocutor to the real installer though, even more so
* if command line argument munging is required to transform the command
* line arguments passed by GStreamer to the helper application into
* arguments that are understood by the real installer.
*
* The helper application path defined at compile time can be overriden at
* runtime by setting the <envar>GST_INSTALL_PLUGINS_HELPER</envar>
* environment variable. This can be useful for testing/debugging purposes.
* </para>
* <para>
* <emphasis>2. Arguments passed to the install helper</emphasis>
* </para>
* <para>
* GStreamer will pass the following arguments to the install helper (this is
* in addition to the path of the executable itself, which is by convention
* argv[0]):
* <itemizedlist>
* <listitem><para>
* none to many optional arguments in the form of
* <literal>--foo-bar=val</literal>. Example:
* <literal>--transient-for=XID</literal> where XID is the X Window ID of
* the main window of the calling application (so the installer can make
* itself transient to that window). Unknown optional arguments should
* be ignored by the installer.
* </para></listitem>
* <listitem><para>
* one 'installer detail string' argument for each plugin to be installed;
* these strings will have a <literal>gstreamer</literal> prefix; the
* exact format of the detail string is explained below
* </para></listitem>
* </itemizedlist>
* </para>
* <para>
* <emphasis>3. Detail string describing the missing plugin</emphasis>
* </para>
* <para>
* The string is in UTF-8 encoding and is made up of several fields, separated
* by '|' characters (but neither the first nor the last character is a '|').
* The fields are:
* <itemizedlist>
* <listitem><para>
* plugin system identifier, ie. "gstreamer"
* </para><para>
* This identifier determines the format of the rest of the detail string.
* Automatic plugin installers should not process detail strings with
* unknown identifiers. This allows other plugin-based libraries to use
* the same mechanism for their automatic plugin installation needs, or
* for the format to be changed should it turn out to be insufficient.
* </para></listitem>
* <listitem><para>
* plugin system version, e.g. "0.10"
* </para><para>
* This is required so that when there is a GStreamer-0.12 or GStreamer-1.0
* at some point in future, the different major versions can still co-exist
* and use the same plugin install mechanism in the same way.
* </para></listitem>
* <listitem><para>
* application identifier, e.g. "totem"
* </para><para>
* This may also be in the form of "pid/12345" if the program name can't
* be obtained for some reason.
* </para></listitem>
* <listitem><para>
* human-readable localised description of the required component,
* e.g. "Vorbis audio decoder"
* </para></listitem>
* <listitem><para>
* identifier string for the required component (see below for details about
* how to map this to the package/plugin that needs installing), e.g.
* <itemizedlist>
* <listitem><para>
* urisource-$(PROTOCOL_REQUIRED), e.g. urisource-http or urisource-mms
* </para></listitem>
* <listitem><para>
* element-$(ELEMENT_REQUIRED), e.g. element-videoconvert
* </para></listitem>
* <listitem><para>
* decoder-$(CAPS_REQUIRED), e.g. (do read below for more details!):
* <itemizedlist>
* <listitem><para>decoder-audio/x-vorbis</para></listitem>
* <listitem><para>decoder-application/ogg</para></listitem>
* <listitem><para>decoder-audio/mpeg, mpegversion=(int)4</para></listitem>
* <listitem><para>decoder-video/mpeg, systemstream=(boolean)true, mpegversion=(int)2</para></listitem>
</itemizedlist>
* </para></listitem>
* <listitem><para>
* encoder-$(CAPS_REQUIRED), e.g. encoder-audio/x-vorbis
* </para></listitem>
* </itemizedlist>
* </para></listitem>
* <listitem><para>
* optional further fields not yet specified
* </para></listitem>
* </itemizedlist>
* </para>
* <para>
* An entire ID string might then look like this, for example:
* <literal>
* gstreamer|0.10|totem|Vorbis audio decoder|decoder-audio/x-vorbis
* </literal>
* </para>
* <para>
* Plugin installers parsing this ID string should expect further fields also
* separated by '|' symbols and either ignore them, warn the user, or error
* out when encountering them.
* </para>
* <para>
* Those unfamiliar with the GStreamer 'caps' system should note a few things
* about the caps string used in the above decoder/encoder case:
* <itemizedlist>
* <listitem><para>
* the first part ("video/mpeg") of the caps string is a GStreamer media
* type and <emphasis>not</emphasis> a MIME type. Wherever possible, the
* GStreamer media type will be the same as the corresponding MIME type,
* but often it is not.
* </para></listitem>
* <listitem><para>
* a caps string may or may not have additional comma-separated fields
* of various types (as seen in the examples above)
* </para></listitem>
* <listitem><para>
* the caps string of a 'required' component (as above) will always have
* fields with fixed values, whereas an introspected string (see below)
* may have fields with non-fixed values. Compare for example:
* <itemizedlist>
* <listitem><para>
* <literal>audio/mpeg, mpegversion=(int)4</literal> vs.
* <literal>audio/mpeg, mpegversion=(int){2, 4}</literal>
* </para></listitem>
* <listitem><para>
* <literal>video/mpeg, mpegversion=(int)2</literal> vs.
* <literal>video/mpeg, systemstream=(boolean){ true, false}, mpegversion=(int)[1, 2]</literal>
* </para></listitem>
* </itemizedlist>
* </para></listitem>
* </itemizedlist>
* </para>
* <para>
* <emphasis>4. Exit codes the installer should return</emphasis>
* </para>
* <para>
* The installer should return one of the following exit codes when it exits:
* <itemizedlist>
* <listitem><para>
* 0 if all of the requested plugins could be installed
* runtime by setting the GST_INSTALL_PLUGINS_HELPER environment
* variable. This can be useful for testing/debugging purposes.
*
* ## 2. Arguments passed to the install helper
*
* GStreamer will pass the following arguments to the install helper (this
* is in addition to the path of the executable itself, which is by
* convention argv[0]):
*
* - none to many optional arguments in the form of `--foo-bar=val`.
* Example: `--transient-for=XID` where XID is the X Window ID of the
* main window of the calling application (so the installer can make
* itself transient to that window). Unknown optional arguments should
* be ignored by the installer.
*
* - one 'installer detail string' argument for each plugin to be
* installed; these strings will have a `gstreamer` prefix; the exact
* format of the detail string is explained below
*
* ## 3. Detail string describing the missing plugin
*
* The string is in UTF-8 encoding and is made up of several fields,
* separated by '|' characters (but neither the first nor the last
* character is a '|'). The fields are:
*
* - plugin system identifier, ie. "gstreamer"
* This identifier determines the format of the rest of the detail
* string. Automatic plugin installers should not process detail
* strings with unknown identifiers. This allows other plugin-based
* libraries to use the same mechanism for their automatic plugin
* installation needs, or for the format to be changed should it turn
* out to be insufficient.
* - plugin system version, e.g. "0.10"
* This is required so that when there is a GStreamer-0.12 or
* GStreamer-1.0 at some point in future, the different major versions
* can still co-exist and use the same plugin install mechanism in the
* same way.
* - application identifier, e.g. "totem"
* This may also be in the form of "pid/12345" if the program name
* can't be obtained for some reason.
* - human-readable localised description of the required component, e.g.
* "Vorbis audio decoder"
* - identifier string for the required component (see below for details
* about how to map this to the package/plugin that needs installing),
* e.g.
* - urisource-$(PROTOCOL_REQUIRED), e.g. urisource-http or
* urisource-mms
* - element-$(ELEMENT_REQUIRED), e.g. element-videoconvert
* - decoder-$(CAPS_REQUIRED), e.g. (do read below for more
* details!):
* - decoder-audio/x-vorbis
* - decoder-application/ogg
* - decoder-audio/mpeg, mpegversion=(int)4
* - decoder-video/mpeg, systemstream=(boolean)true,
* mpegversion=(int)2
* - encoder-$(CAPS_REQUIRED), e.g. encoder-audio/x-vorbis
* - optional further fields not yet specified
*
* An entire ID string might then look like this, for example: `
* gstreamer|0.10|totem|Vorbis audio decoder|decoder-audio/x-vorbis`
*
* Plugin installers parsing this ID string should expect further fields
* also separated by '|' symbols and either ignore them, warn the user, or
* error out when encountering them.
*
* Those unfamiliar with the GStreamer 'caps' system should note a few
* things about the caps string used in the above decoder/encoder case:
*
* - the first part ("video/mpeg") of the caps string is a GStreamer
* media type and *not* a MIME type. Wherever possible, the GStreamer
* media type will be the same as the corresponding MIME type, but
* often it is not.
* - a caps string may or may not have additional comma-separated fields
* of various types (as seen in the examples above)
* - the caps string of a 'required' component (as above) will always
* have fields with fixed values, whereas an introspected string (see
* below) may have fields with non-fixed values. Compare for example:
* - `audio/mpeg, mpegversion=(int)4` vs.
* `audio/mpeg, mpegversion=(int){2, 4}`
* - `video/mpeg, mpegversion=(int)2` vs.
* `video/mpeg, systemstream=(boolean){ true, false}, mpegversion=(int)[1, 2]`
*
* ## 4. Exit codes the installer should return
*
* The installer should return one of the following exit codes when it
* exits:
*
* - 0 if all of the requested plugins could be installed
* (#GST_INSTALL_PLUGINS_SUCCESS)
* </para></listitem>
* <listitem><para>
* 1 if no appropriate installation candidate for any of the requested
* plugins could be found. Only return this if nothing has been installed
* (#GST_INSTALL_PLUGINS_NOT_FOUND)
* </para></listitem>
* <listitem><para>
* 2 if an error occured during the installation. The application will
* - 1 if no appropriate installation candidate for any of the requested
* plugins could be found. Only return this if nothing has been
* installed (#GST_INSTALL_PLUGINS_NOT_FOUND)
* - 2 if an error occured during the installation. The application will
* assume that the user will already have seen an error message by the
* installer in this case and will usually not show another one
* (#GST_INSTALL_PLUGINS_ERROR)
* </para></listitem>
* <listitem><para>
* 3 if some of the requested plugins could be installed, but not all
* - 3 if some of the requested plugins could be installed, but not all
* (#GST_INSTALL_PLUGINS_PARTIAL_SUCCESS)
* </para></listitem>
* <listitem><para>
* 4 if the user aborted the installation (#GST_INSTALL_PLUGINS_USER_ABORT)
* </para></listitem>
* </itemizedlist>
* </para>
* <para>
* <emphasis>5. How to map the required detail string to packages</emphasis>
* </para>
* <para>
* - 4 if the user aborted the installation
* (#GST_INSTALL_PLUGINS_USER_ABORT)
*
* ## 5. How to map the required detail string to packages
*
* It is up to the vendor to find mechanism to map required components from
* the detail string to the actual packages/plugins to install. This could
* be a hardcoded list of mappings, for example, or be part of the packaging
* system metadata.
* </para>
* <para>
* be a hardcoded list of mappings, for example, or be part of the
* packaging system metadata.
*
* GStreamer plugin files can be introspected for this information. The
* <literal>gst-inspect</literal> utility has a special command line option
* that will output information similar to what is required. For example
* <command>
* `gst-inspect` utility has a special command line option that will output
* information similar to what is required. For example `
* $ gst-inspect-1.0 --print-plugin-auto-install-info /path/to/libgstvorbis.so
* </command>
* should output something along the lines of
* <computeroutput>
* decoder-audio/x-vorbis
* element-vorbisdec
* element-vorbisenc
* element-vorbisparse
* element-vorbistag
* encoder-audio/x-vorbis
* </computeroutput>
* Note that in the encoder and decoder case the introspected caps can be more
* complex with additional fields, e.g.
* <literal>audio/mpeg,mpegversion=(int){2,4}</literal>, so they will not
* always exactly match the caps wanted by the application. It is up to the
* installer to deal with this (either by doing proper caps intersection using
* the GStreamer #GstCaps API, or by only taking into account the media type).
* </para>
* <para>
* `decoder-audio/x-vorbis`, `element-vorbisdec` `element-vorbisenc`
* `element-vorbisparse`, `element-vorbistag`, `encoder-audio/x-vorbis`
*
* Note that in the encoder and decoder case the introspected caps can be
* more complex with additional fields, e.g.
* `audio/mpeg,mpegversion=(int){2,4}`, so they will not always exactly
* match the caps wanted by the application. It is up to the installer to
* deal with this (either by doing proper caps intersection using the
* GStreamer #GstCaps API, or by only taking into account the media type).
*
* Another potential source of problems are plugins such as ladspa or
* libvisual where the list of elements depends on the installed
* ladspa/libvisual plugins at the time. This is also up to the distribution
* to handle (but usually not relevant for playback applications).
* </para>
* </refsect2>
* ladspa/libvisual plugins at the time. This is also up to the
* distribution to handle (but usually not relevant for playback
* applications).
*/
#ifdef HAVE_CONFIG_H
@ -455,11 +360,11 @@ gst_install_plugins_context_set_desktop_id (GstInstallPluginsContext * ctx,
*
* GTK+/GNOME applications should be able to create a startup notification ID
* like this:
* <programlisting>
* |[
* timestamp = gtk_get_current_event_time ();
* startup_id = g_strdup_printf ("_TIME%u", timestamp);
* ...
* </programlisting>
* ]|
*
* Since: 1.6
*/
@ -487,7 +392,7 @@ void gst_install_plugins_context_set_startup_notification_id
*
* Gtk+/Gnome application should be able to obtain the XID of the top-level
* window like this:
* <programlisting>
* |[
* ##include &lt;gtk/gtk.h&gt;
* ##ifdef GDK_WINDOWING_X11
* ##include &lt;gdk/gdkx.h&gt;
@ -497,7 +402,8 @@ void gst_install_plugins_context_set_startup_notification_id
* xid = GDK_WINDOW_XWINDOW (GTK_WIDGET (application_window)-&gt;window);
* ##endif
* ...
* </programlisting>
* ]|
*
*/
void
gst_install_plugins_context_set_xid (GstInstallPluginsContext * ctx, guint xid)
@ -699,7 +605,7 @@ gst_install_plugins_installer_exited (GPid pid, gint status, gpointer data)
* @ctx: (allow-none): a #GstInstallPluginsContext, or NULL
* @func: (scope async): the function to call when the installer program returns
* @user_data: (closure): the user data to pass to @func when called, or NULL
*
*
* Requests plugin installation without blocking. Once the plugins have been
* installed or installation has failed, @func will be called with the result
* of the installation and your provided @user_data pointer.
@ -756,7 +662,7 @@ gst_install_plugins_async (const gchar * const *details,
* @details: (array zero-terminated=1) (transfer none): NULL-terminated array
* of installer string details
* @ctx: (allow-none): a #GstInstallPluginsContext, or NULL
*
*
* Requests plugin installation and block until the plugins have been
* installed or installation has failed.
*
@ -793,7 +699,7 @@ gst_install_plugins_sync (const gchar * const *details,
/**
* gst_install_plugins_return_get_name:
* @ret: the return status code
*
*
* Convenience function to return the descriptive string associated
* with a status code. This function returns English strings and
* should not be used for user messages. It is here only to assist
@ -835,7 +741,7 @@ gst_install_plugins_return_get_name (GstInstallPluginsReturn ret)
/**
* gst_install_plugins_installation_in_progress:
*
*
* Checks whether plugin installation (initiated by this application only)
* is currently in progress.
*
@ -849,7 +755,7 @@ gst_install_plugins_installation_in_progress (void)
/**
* gst_install_plugins_supported:
*
*
* Checks whether plugin installation is likely to be supported by the
* current environment. This currently only checks whether the helper script
* that is to be provided by the distribution or operating system vendor

View file

@ -19,35 +19,27 @@
/**
* SECTION:gstpbutilsmissingplugins
* @title: Missing plugins
* @short_description: Create, recognise and parse missing-plugins messages
*
* <refsect2>
* <para>
* Functions to create, recognise and parse missing-plugins messages for
* applications and elements.
* </para>
* <para>
*
* Missing-plugin messages are posted on the bus by elements like decodebin
* or playbin if they can't find an appropriate source element or decoder
* element. The application can use these messages for two things:
* <itemizedlist>
* <listitem><para>
* concise error/problem reporting to the user mentioning what exactly
*
* * concise error/problem reporting to the user mentioning what exactly
* is missing, see gst_missing_plugin_message_get_description()
* </para></listitem>
* <listitem><para>
* initiate installation of missing plugins, see
*
* * initiate installation of missing plugins, see
* gst_missing_plugin_message_get_installer_detail() and
* gst_install_plugins_async()
* </para></listitem>
* </itemizedlist>
* </para>
* <para>
*
* Applications may also create missing-plugin messages themselves to install
* required elements that are missing, using the install mechanism mentioned
* above.
* </para>
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
@ -397,7 +389,7 @@ missing_structure_get_caps_detail (const GstStructure * s, GstCaps ** p_caps)
* Returns an opaque string containing all the details about the missing
* element to be passed to an external installer called via
* gst_install_plugins_async() or gst_install_plugins_sync().
*
*
* This function is mainly for applications that call external plugin
* installation mechanisms using one of the two above-mentioned functions.
*
@ -653,7 +645,7 @@ gst_installer_detail_new (gchar * description, const gchar * type,
* Returns an opaque string containing all the details about the missing
* element to be passed to an external installer called via
* gst_install_plugins_async() or gst_install_plugins_sync().
*
*
* This function is mainly for applications that call external plugin
* installation mechanisms using one of the two above-mentioned functions in
* the case where the application knows exactly what kind of plugin it is
@ -681,7 +673,7 @@ gst_missing_uri_source_installer_detail_new (const gchar * protocol)
* Returns an opaque string containing all the details about the missing
* element to be passed to an external installer called via
* gst_install_plugins_async() or gst_install_plugins_sync().
*
*
* This function is mainly for applications that call external plugin
* installation mechanisms using one of the two above-mentioned functions in
* the case where the application knows exactly what kind of plugin it is
@ -709,7 +701,7 @@ gst_missing_uri_sink_installer_detail_new (const gchar * protocol)
* Returns an opaque string containing all the details about the missing
* element to be passed to an external installer called via
* gst_install_plugins_async() or gst_install_plugins_sync().
*
*
* This function is mainly for applications that call external plugin
* installation mechanisms using one of the two above-mentioned functions in
* the case where the application knows exactly what kind of plugin it is
@ -736,7 +728,7 @@ gst_missing_element_installer_detail_new (const gchar * factory_name)
* Returns an opaque string containing all the details about the missing
* element to be passed to an external installer called via
* gst_install_plugins_async() or gst_install_plugins_sync().
*
*
* This function is mainly for applications that call external plugin
* installation mechanisms using one of the two above-mentioned functions in
* the case where the application knows exactly what kind of plugin it is
@ -774,7 +766,7 @@ gst_missing_decoder_installer_detail_new (const GstCaps * decode_caps)
* Returns an opaque string containing all the details about the missing
* element to be passed to an external installer called via
* gst_install_plugins_async() or gst_install_plugins_sync().
*
*
* This function is mainly for applications that call external plugin
* installation mechanisms using one of the two above-mentioned functions in
* the case where the application knows exactly what kind of plugin it is

View file

@ -19,53 +19,37 @@
/**
* SECTION:gstpbutils
* @title: Pbutils
* @short_description: General Application and Plugin Utility Library
*
* <refsect2>
* <para>
* libgstpbutils is a general utility library for plugins and applications.
* It currently provides the
* following:
* </para>
* <itemizedlist>
* <listitem>
* <para>
* human-readable description strings of codecs, elements, sources, decoders,
*
* * human-readable description strings of codecs, elements, sources, decoders,
* encoders, or sinks from decoder/encoder caps, element names, or protocol
* names.
* </para>
* </listitem>
* <listitem>
* <para>
* support for applications to initiate installation of missing plugins (if
*
* * support for applications to initiate installation of missing plugins (if
* this is supported by the distribution or operating system used)
* </para>
* </listitem>
* <listitem>
* <para>
* API for GStreamer elements to create missing-plugin messages in order to
*
* * API for GStreamer elements to create missing-plugin messages in order to
* communicate to the application that a certain type of plugin is missing
* (decoder, encoder, URI protocol source, URI protocol sink, named element)
* </para>
* </listitem>
* <listitem>
* <para>
* API for applications to recognise and handle missing-plugin messages
* </para>
* </listitem>
* </itemizedlist>
* <title>Linking to this library</title>
* <para>
*
* * API for applications to recognise and handle missing-plugin messages
*
* ## Linking to this library
*
* You should obtain the required CFLAGS and LIBS using pkg-config on the
* gstreamer-plugins-base-0.10 module. You will then also need to add
* '-lgstpbutils-0.10' manually to your LIBS line.
* </para>
* <title>Library initialisation</title>
* <para>
*
* ## Library initialisation
*
* Before using any of its functions, applications and plugins must call
* gst_pb_utils_init() to initialise the library.
* </para>
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -362,7 +362,7 @@ too_small:
* containing extradata for this particular stream (e.g.
* palette, codec initialization data).
*
* Parses a video stream´s strf structure plus optionally some
* Parses a video stream's strf structure plus optionally some
* extradata from input data. This function takes ownership of @buf.
*
* Returns: TRUE if parsing succeeded, otherwise FALSE. The stream
@ -460,7 +460,7 @@ too_small:
* containing extradata for this particular stream (e.g.
* codec initialization data).
*
* Parses an audio stream´s strf structure plus optionally some
* Parses an audio stream's strf structure plus optionally some
* extradata from input data. This function takes ownership of @buf.
* use.
*

View file

@ -20,6 +20,7 @@
*/
/**
* SECTION:gstriff
* @title: Riff utilities
* @short_description: Riff fileformat utillity functions.
*
* A collection of functions to handle riff base files, such as avi, wav and

View file

@ -22,24 +22,21 @@
/**
* SECTION:gstrtcpbuffer
* @title: GstRTCPBuffer
* @short_description: Helper methods for dealing with RTCP buffers
* @see_also: #GstRTPBasePayload, #GstRTPBaseDepayload, #gstrtpbuffer
*
* Note: The API in this module is not yet declared stable.
*
* <refsect2>
* <para>
* The GstRTPCBuffer helper functions makes it easy to parse and create regular
* The GstRTPCBuffer helper functions makes it easy to parse and create regular
* #GstBuffer objects that contain compound RTCP packets. These buffers are typically
* of 'application/x-rtcp' #GstCaps.
* </para>
* <para>
*
* An RTCP buffer consists of 1 or more #GstRTCPPacket structures that you can
* retrieve with gst_rtcp_buffer_get_first_packet(). #GstRTCPPacket acts as a pointer
* into the RTCP buffer; you can move to the next packet with
* gst_rtcp_packet_move_to_next().
* </para>
* </refsect2>
*
*/
#include <string.h>
@ -497,7 +494,7 @@ end:
* @type: the #GstRTCPType of the new packet
* @packet: pointer to new packet
*
* Add a new packet of @type to @rtcp. @packet will point to the newly created
* Add a new packet of @type to @rtcp. @packet will point to the newly created
* packet.
*
* Returns: %TRUE if the packet could be created. This function returns %FALSE
@ -677,7 +674,7 @@ gst_rtcp_packet_get_count (GstRTCPPacket * packet)
* gst_rtcp_packet_get_length:
* @packet: a valid #GstRTCPPacket
*
* Get the length field of @packet. This is the length of the packet in
* Get the length field of @packet. This is the length of the packet in
* 32-bit words minus one.
*
* Returns: The length field of @packet.
@ -737,7 +734,7 @@ gst_rtcp_packet_sr_get_sender_info (GstRTCPPacket * packet, guint32 * ssrc,
/**
* gst_rtcp_packet_sr_set_sender_info:
* @packet: a valid SR #GstRTCPPacket
* @ssrc: the SSRC
* @ssrc: the SSRC
* @ntptime: the NTP time
* @rtptime: the RTP time
* @packet_count: the packet count

View file

@ -19,6 +19,7 @@
/**
* SECTION:gstrtpbaseaudiopayload
* @title: GstRTPBaseAudioPayload
* @short_description: Base class for audio RTP payloader
*
* Provides a base class for audio RTP payloaders for frame or sample based
@ -36,9 +37,8 @@
* sent in a last RTP packet. In the case of frame based codecs, the resulting
* RTP packets always contain full frames.
*
* <refsect2>
* <title>Usage</title>
* <para>
* ## Usage
*
* To use this base class, your child element needs to call either
* gst_rtp_base_audio_payload_set_frame_based() or
* gst_rtp_base_audio_payload_set_sample_based(). This is usually done in the
@ -50,8 +50,7 @@
* must set any variables or call/override any functions required by that base
* class. The child element does not need to override any other functions
* specific to GstRTPBaseAudioPayload.
* </para>
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -20,6 +20,7 @@
/**
* SECTION:gstrtpbasedepayload
* @title: GstRTPBaseDepayload
* @short_description: Base class for RTP depayloader
*
* Provides a base class for RTP depayloaders
@ -150,55 +151,17 @@ gst_rtp_base_depayload_class_init (GstRTPBaseDepayloadClass * klass)
* application/x-rtp-depayload-stats containing the following fields relating to
* the last processed buffer and current state of the stream being depayloaded:
*
* <variablelist>
* <varlistentry>
* <term>clock-rate</term>
* <listitem><para>#G_TYPE_UINT, clock-rate of the
* stream</para></listitem>
* </varlistentry>
* <varlistentry>
* <term>npt-start</term>
* <listitem><para>#G_TYPE_UINT64, time of playback start
* </para></listitem>
* </varlistentry>
* <varlistentry>
* <term>npt-stop</term>
* <listitem><para>#G_TYPE_UINT64, time of playback stop
* </para></listitem>
* </varlistentry>
* <varlistentry>
* <term>play-speed</term>
* <listitem><para>#G_TYPE_DOUBLE, the playback speed
* </para></listitem>
* </varlistentry>
* <varlistentry>
* <term>play-scale</term>
* <listitem><para>#G_TYPE_DOUBLE, the playback scale
* </para></listitem>
* </varlistentry>
* <varlistentry>
* <term>running-time-dts</term>
* <listitem><para>#G_TYPE_UINT64, the last running-time of the
* * `clock-rate`: #G_TYPE_UINT, clock-rate of the stream
* * `npt-start`: #G_TYPE_UINT64, time of playback start
* * `npt-stop`: #G_TYPE_UINT64, time of playback stop
* * `play-speed`: #G_TYPE_DOUBLE, the playback speed
* * `play-scale`: #G_TYPE_DOUBLE, the playback scale
* * `running-time-dts`: #G_TYPE_UINT64, the last running-time of the
* last DTS
* </para></listitem>
* </varlistentry>
* <varlistentry>
* <term>running-time-pts</term>
* <listitem><para>#G_TYPE_UINT64, the last running-time of the
* * `running-time-pts`: #G_TYPE_UINT64, the last running-time of the
* last PTS
* </para></listitem>
* </varlistentry>
* <varlistentry>
* <term>seqnum</term>
* <listitem><para>#G_TYPE_UINT, the last seen seqnum
* </para></listitem>
* </varlistentry>
* <varlistentry>
* <term>timestamp</term>
* <listitem><para>#G_TYPE_UINT, the last seen RTP timestamp
* </para></listitem>
* </varlistentry>
* </variablelist>
* * `seqnum`: #G_TYPE_UINT, the last seen seqnum
* * `timestamp`: #G_TYPE_UINT, the last seen RTP timestamp
**/
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_STATS,
g_param_spec_boxed ("stats", "Statistics", "Various statistics",

View file

@ -14,6 +14,7 @@
/**
* SECTION:gstrtpbasepayload
* @title: GstRTPBasePayload
* @short_description: Base class for RTP payloader
*
* Provides a base class for RTP payloaders
@ -275,48 +276,14 @@ gst_rtp_base_payload_class_init (GstRTPBasePayloadClass * klass)
* application/x-rtp-payload-stats containing the following fields relating to
* the last processed buffer and current state of the stream being payloaded:
*
* <variablelist>
* <varlistentry>
* <term>clock-rate</term>
* <listitem><para>#G_TYPE_UINT, clock-rate of the
* stream</para></listitem>
* </varlistentry>
* <varlistentry>
* <term>running-time</term>
* <listitem><para>#G_TYPE_UINT64, running time
* </para></listitem>
* </varlistentry>
* <varlistentry>
* <term>seqnum</term>
* <listitem><para>#G_TYPE_UINT, sequence number, same as
* #GstRTPBasePayload:seqnum</para></listitem>
* </varlistentry>
* <varlistentry>
* <term>timestamp</term>
* <listitem><para>#G_TYPE_UINT, RTP timestamp, same as
* #GstRTPBasePayload:timestamp</para></listitem>
* </varlistentry>
* <varlistentry>
* <term>ssrc</term>
* <listitem><para>#G_TYPE_UINT, The SSRC in use
* </para></listitem>
* </varlistentry>
* <varlistentry>
* <term>pt</term>
* <listitem><para>#G_TYPE_UINT, The Payload type in use, same as
* #GstRTPBasePayload:pt</para></listitem>
* </varlistentry>
* <varlistentry>
* <term>seqnum-offset</term>
* <listitem><para>#G_TYPE_UINT, The current offset added to the
* seqnum</para></listitem>
* </varlistentry>
* <varlistentry>
* <term>timestamp-offset</term>
* <listitem><para>#G_TYPE_UINT, The current offset added to the
* timestamp</para></listitem>
* </varlistentry>
* </variablelist>
* * `clock-rate` :#G_TYPE_UINT, clock-rate of the stream
* * `running-time` :#G_TYPE_UINT64, running time
* * `seqnum` :#G_TYPE_UINT, sequence number, same as #GstRTPBasePayload:seqnum
* * `timestamp` :#G_TYPE_UINT, RTP timestamp, same as #GstRTPBasePayload:timestamp
* * `ssrc` :#G_TYPE_UINT, The SSRC in use
* * `pt` :#G_TYPE_UINT, The Payload type in use, same as #GstRTPBasePayload:pt
* * `seqnum-offset` :#G_TYPE_UINT, The current offset added to the seqnum
* * `timestamp-offset` :#G_TYPE_UINT, The current offset added to the timestamp
**/
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_STATS,
g_param_spec_boxed ("stats", "Statistics", "Various statistics",

View file

@ -20,16 +20,14 @@
/**
* SECTION:gstrtpbuffer
* @title: GstRTPBuffer
* @short_description: Helper methods for dealing with RTP buffers
* @see_also: #GstRTPBasePayload, #GstRTPBaseDepayload, gstrtcpbuffer
*
* <refsect2>
* <para>
* The GstRTPBuffer helper functions makes it easy to parse and create regular
* The GstRTPBuffer helper functions makes it easy to parse and create regular
* #GstBuffer objects that contain RTP payloads. These buffers are typically of
* 'application/x-rtp' #GstCaps.
* </para>
* </refsect2>
*
*/
#include "gstrtpbuffer.h"
@ -658,7 +656,7 @@ gst_rtp_buffer_pad_to (GstRTPBuffer * rtp, guint len)
* @rtp: the RTP packet
*
* Check if the extension bit is set on the RTP packet in @buffer.
*
*
* Returns: TRUE if @buffer has the extension bit set.
*/
gboolean
@ -693,7 +691,7 @@ gst_rtp_buffer_set_extension (GstRTPBuffer * rtp, gboolean extension)
*
* If @buffer did not contain an extension, this function will return %FALSE
* with @bits, @data and @wordlen unchanged.
*
*
* Returns: TRUE if @buffer had the extension bit set.
*/
gboolean
@ -891,7 +889,7 @@ gst_rtp_buffer_set_extension_data (GstRTPBuffer * rtp, guint16 bits,
* @rtp: the RTP packet
*
* Get the SSRC of the RTP packet in @buffer.
*
*
* Returns: the SSRC of @buffer in host order.
*/
guint32
@ -918,7 +916,7 @@ gst_rtp_buffer_set_ssrc (GstRTPBuffer * rtp, guint32 ssrc)
* @rtp: the RTP packet
*
* Get the CSRC count of the RTP packet in @buffer.
*
*
* Returns: the CSRC count of @buffer.
*/
guint8
@ -933,7 +931,7 @@ gst_rtp_buffer_get_csrc_count (GstRTPBuffer * rtp)
* @idx: the index of the CSRC to get
*
* Get the CSRC at index @idx in @buffer.
*
*
* Returns: the CSRC at index @idx in host order.
*/
guint32

View file

@ -19,13 +19,10 @@
/**
* SECTION:gstrtphdrext
* @title: GstRtphdrext
* @short_description: Helper methods for dealing with RTP header extensions
* @see_also: #GstRTPBasePayload, #GstRTPBaseDepayload, gstrtpbuffer
*
* <refsect2>
* <para>
* </para>
* </refsect2>
*/
#include "gstrtphdrext.h"

View file

@ -22,16 +22,14 @@
/**
* SECTION:gstrtppayloads
* @title: GstRTPPayloadInfo
* @short_description: Helper methods for dealing with RTP payloads
* @see_also: gstrtpbuffer
*
* <refsect2>
* <para>
* The GstRTPPayloads helper functions makes it easy to deal with static and dynamic
* payloads. Its main purpose is to retrieve properties such as the default clock-rate
* payloads. Its main purpose is to retrieve properties such as the default clock-rate
* and get session bandwidth information.
* </para>
* </refsect2>
*
*/
#include <string.h>

View file

@ -56,7 +56,6 @@ G_BEGIN_DECLS
* @GST_RTP_PAYLOAD_MP2T: MPEG-2 transport stream (RFC 2250)
* @GST_RTP_PAYLOAD_H263: Video H263 (RFC 2190)
*
*
* Standard predefined fixed payload types.
*
* The official list is at:

View file

@ -42,6 +42,7 @@
/**
* SECTION:gstrtspconnection
* @title: GstRTSPConnection
* @short_description: manage RTSP connections
* @see_also: gstrtspurl
*

View file

@ -42,10 +42,11 @@
/**
* SECTION:gstrtspdefs
* @title: GstRtspdefs
* @short_description: common RTSP defines
* @see_also: gstrtspurl, gstrtspconnection
*
* Provides common defines for the RTSP library.
*
* Provides common defines for the RTSP library.
*/
#ifdef HAVE_CONFIG_H

View file

@ -21,14 +21,12 @@
/**
* SECTION:gstrtspextension
* @title: GstRTSPExtension
* @short_description: Interface for extending RTSP protocols
*
* <refsect2>
* <para>
* This interface is implemented e.g. by the Windows Media Streaming RTSP
* exentension (rtspwms) and the RealMedia RTSP extension (rtspreal).
* </para>
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -44,9 +44,10 @@
/**
* SECTION:gstrtspmessage
* @title: GstRTSPMessage
* @short_description: RTSP messages
* @see_also: gstrtspconnection
*
*
* Provides methods for creating and parsing request, response and data messages.
*/
@ -429,7 +430,7 @@ gst_rtsp_message_parse_data (GstRTSPMessage * msg, guint8 * channel)
* @msg: a #GstRTSPMessage
*
* Unset the contents of @msg so that it becomes an uninitialized
* #GstRTSPMessage again. This function is mostly used in combination with
* #GstRTSPMessage again. This function is mostly used in combination with
* gst_rtsp_message_init_request(), gst_rtsp_message_init_response() and
* gst_rtsp_message_init_data() on stack allocated #GstRTSPMessage structures.
*

View file

@ -42,8 +42,9 @@
/**
* SECTION:gstrtsprange
* @title: GstRTSPTimeRange
* @short_description: dealing with time ranges
*
*
* Provides helper functions to deal with time ranges.
*/

View file

@ -43,8 +43,9 @@
/**
* SECTION:gstrtsptransport
* @title: GstRTSPRange
* @short_description: dealing with RTSP transports
*
*
* Provides helper functions to deal with RTSP transport strings.
*/
@ -146,7 +147,7 @@ G_STMT_START { \
* Allocate a new initialized #GstRTSPTransport. Use gst_rtsp_transport_free()
* after usage.
*
* Returns: a #GstRTSPResult.
* Returns: a #GstRTSPResult.
*/
GstRTSPResult
gst_rtsp_transport_new (GstRTSPTransport ** transport)
@ -168,7 +169,7 @@ gst_rtsp_transport_new (GstRTSPTransport ** transport)
*
* Initialize @transport so that it can be used.
*
* Returns: #GST_RTSP_OK.
* Returns: #GST_RTSP_OK.
*/
GstRTSPResult
gst_rtsp_transport_init (GstRTSPTransport * transport)
@ -284,7 +285,7 @@ get_default_lower_trans (GstRTSPTransport * transport)
* @manager will contain an element name or #NULL when no manager is
* needed/available for @trans.
*
* Returns: #GST_RTSP_OK.
* Returns: #GST_RTSP_OK.
*/
GstRTSPResult
gst_rtsp_transport_get_manager (GstRTSPTransMode trans, const gchar ** manager,

View file

@ -42,8 +42,9 @@
/**
* SECTION:gstrtspurl
* @title: GstRTSPUrl
* @short_description: handling RTSP urls
*
*
* Provides helper functions to handle RTSP urls.
*/
@ -308,7 +309,7 @@ gst_rtsp_url_get_port (const GstRTSPUrl * url, guint16 * port)
* gst_rtsp_url_get_request_uri:
* @url: a #GstRTSPUrl
*
* Get a newly allocated string describing the request URI for @url.
* Get a newly allocated string describing the request URI for @url.
*
* Returns: a string with the request URI. g_free() after usage.
*/

View file

@ -21,14 +21,11 @@
/**
* SECTION:gstmikey
* @title: GstMIKEYMessage
* @short_description: Helper methods for dealing with MIKEY messages
*
* <refsect2>
* <para>
* The GstMIKEY helper functions makes it easy to parse and create MIKEY
* messages.
* </para>
* </refsect2>
*
* Since: 1.4
*/

View file

@ -42,14 +42,12 @@
/**
* SECTION:gstsdpmessage
* @title: GstSDPMessage
* @short_description: Helper methods for dealing with SDP messages
*
* <refsect2>
* <para>
* The GstSDPMessage helper functions makes it easy to parse and create SDP
* messages.
* </para>
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -21,6 +21,7 @@
/**
* SECTION:gsttagexif
* @title: GstExiftag
* @short_description: tag mappings and support functions for plugins
* dealing with exif tags
* @see_also: #GstTagList

View file

@ -21,16 +21,14 @@
/**
* SECTION:gsttagid3
* @title: ID3 tag utils
* @short_description: tag mappings and support functions for plugins
* dealing with ID3v1 and ID3v2 tags
* @see_also: #GstTagList
*
* <refsect2>
* <para>
*
* Contains various utility functions for plugins to parse or create
* ID3 tags and map ID3v2 identifiers to and from GStreamer identifiers.
* </para>
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
@ -305,7 +303,7 @@ gst_tag_list_new_from_id3v1 (const guint8 * data)
/**
* gst_tag_id3_genre_count:
*
* Gets the number of ID3v1 genres that can be identified. Winamp genres are
* Gets the number of ID3v1 genres that can be identified. Winamp genres are
* included.
*
* Returns: the number of ID3v1 genres that can be identified

View file

@ -20,12 +20,11 @@
/**
* SECTION:gsttagdemux
* @title: GstTagDemux
* @see_also: GstApeDemux, GstID3Demux
* @short_description: Base class for demuxing tags that are in chunks
* directly at the beginning or at the end of a file
*
* <refsect2>
* <para>
*
* Provides a base class for demuxing tags at the beginning or end of a
* stream and handles things like typefinding, querying, seeking, and
* different modes of operation (chain-based, pull_range-based, and providing
@ -35,37 +34,26 @@
* there was no tag at all. Also, once the tag has been parsed, GstTagDemux
* will try to determine the media type of the resulting stream and add a
* source pad with the appropriate caps in order to facilitate auto-plugging.
* </para>
* <title>Deriving from GstTagDemux</title>
* <para>
*
* ## Deriving from GstTagDemux
*
* Subclasses have to do four things:
* <itemizedlist>
* <listitem><para>
* In their base init function, they must add a pad template for the sink
* pad to the element class, describing the media type they can parse in
* the caps of the pad template.
* </para></listitem>
* <listitem><para>
* In their class init function, they must override
* GST_TAG_DEMUX_CLASS(demux_klass)->identify_tag with their own identify
* function.
* </para></listitem>
* <listitem><para>
* In their class init function, they must override
*
* * In their base init function, they must add a pad template for the sink
* pad to the element class, describing the media type they can parse in
* the caps of the pad template.
* * In their class init function, they must override
* GST_TAG_DEMUX_CLASS(demux_klass)->identify_tag with their own identify
* function.
* * In their class init function, they must override
* GST_TAG_DEMUX_CLASS(demux_klass)->parse_tag with their own parse
* function.
* </para></listitem>
* <listitem><para>
* In their class init function, they must also set
* GST_TAG_DEMUX_CLASS(demux_klass)->min_start_size and/or
* * In their class init function, they must also set
* GST_TAG_DEMUX_CLASS(demux_klass)->min_start_size and/or
* GST_TAG_DEMUX_CLASS(demux_klass)->min_end_size to the minimum size required
* for the identify function to decide whether the stream has a supported tag
* or not. A class parsing ID3v1 tags, for example, would set min_end_size to
* 128 bytes.
* </para></listitem>
* </itemizedlist>
* </para>
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
@ -120,9 +108,9 @@ struct _GstTagDemuxPrivate
GList *pending_events;
};
/* Require at least 8kB of data before we attempt typefind.
/* Require at least 8kB of data before we attempt typefind.
* Seems a decent value based on test files
* 40kB is massive overkill for the maximum, I think, but it
* 40kB is massive overkill for the maximum, I think, but it
* doesn't do any harm (tpm: increased to 64kB after watching
* typefinding fail on a wavpack file that needed 42kB to succeed) */
#define TYPE_FIND_MIN_SIZE 8192
@ -552,7 +540,7 @@ gst_tag_demux_chain_parse_tag (GstTagDemux * demux)
g_assert (gst_buffer_is_writable (collect));
/* If we receive a buffer that's from the middle of the file,
/* If we receive a buffer that's from the middle of the file,
* we can't read tags so move to typefinding */
if (GST_BUFFER_OFFSET_IS_VALID (collect) && GST_BUFFER_OFFSET (collect) != 0) {
GST_DEBUG_OBJECT (demux, "Received buffer from non-zero offset %"
@ -1571,7 +1559,7 @@ gst_tag_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
/* 1: */
/* If we can activate pull_range upstream, then read any end and start
* tags, otherwise activate in push mode and the chain function will
* tags, otherwise activate in push mode and the chain function will
* collect buffers, read the start tag and output a buffer to end
* preroll.
*/
@ -1656,7 +1644,7 @@ gst_tag_demux_read_range (GstTagDemux * demux, GstObject * parent,
if (ret != GST_FLOW_OK)
return ret;
/* Adjust offset and length of the request to trim off tag information.
/* Adjust offset and length of the request to trim off tag information.
* For the returned buffer, adjust the output offset to match what downstream
* should see */
in_offset = offset + demux->priv->strip_start;

View file

@ -22,33 +22,26 @@
/**
* SECTION:gsttagmux
* @title: GstTagMux
* @see_also: GstApeMux, GstId3Mux
* @short_description: Base class for adding tags that are in one single chunk
* directly at the beginning or at the end of a file
*
* <refsect2>
* <para>
* Provides a base class for adding tags at the beginning or end of a
* stream.
* </para>
* <title>Deriving from GstTagMux</title>
* <para>
*
* ## Deriving from GstTagMux
*
* Subclasses have to do the following things:
* <itemizedlist>
* <listitem><para>
* In their base init function, they must add pad templates for the sink
* pad and the source pad to the element class, describing the media type
* they accept and output in the caps of the pad template.
* </para></listitem>
* <listitem><para>
* In their class init function, they must override the
* GST_TAG_MUX_CLASS(mux_klass)->render_start_tag and/or
* GST_TAG_MUX_CLASS(mux_klass)->render_end_tag vfuncs and set up a render
* function.
* </para></listitem>
* </itemizedlist>
* </para>
* </refsect2>
*
* * In their base init function, they must add pad templates for the sink
* pad and the source pad to the element class, describing the media type
* they accept and output in the caps of the pad template.
* * In their class init function, they must override the
* GST_TAG_MUX_CLASS(mux_klass)->render_start_tag and/or
* GST_TAG_MUX_CLASS(mux_klass)->render_end_tag vfuncs and set up a render
* function.
*
*/
#ifdef HAVE_CONFIG_H
#include <config.h>

View file

@ -21,16 +21,14 @@
/**
* SECTION:gsttagvorbis
* @title: GstVorbisTag
* @short_description: tag mappings and support functions for plugins
* dealing with vorbiscomments
* @see_also: #GstTagList
*
* <refsect2>
* <para>
* Contains various utility functions for plugins to parse or create
* vorbiscomments and map them to and from #GstTagList<!-- -->s.
* </para>
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -22,6 +22,7 @@
/**
* SECTION:gsttagxmp
* @title: GstXmptag
* @short_description: tag mappings and support functions for plugins
* dealing with xmp packets
* @see_also: #GstTagList

View file

@ -19,15 +19,13 @@
/**
* SECTION:gsttaglanguagecodes
* @title: ISO-639 lang mappings
* @short_description: mappings for ISO-639 language codes and names
* @see_also: #GstTagList
*
* <refsect2>
* <para>
* Provides helper functions to convert between the various ISO-639 language
* codes, and to map language codes to language names.
* </para>
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -19,6 +19,7 @@
/**
* SECTION:gsttaglicenses
* @title: Licenses
* @short_description: utility functions for Creative Commons licenses
* @see_also: #GstTagList
*

View file

@ -32,16 +32,14 @@
/**
* SECTION:gsttag
* @title: Tags
* @short_description: additional tag definitions for plugins and applications
* @see_also: #GstTagList
*
* <refsect2>
* <para>
*
* Contains additional standardized GStreamer tag definitions for plugins
* and applications, and functions to register them with the GStreamer
* tag system.
* </para>
* </refsect2>
*
*/
#ifndef GST_DISABLE_GST_DEBUG

View file

@ -19,19 +19,16 @@
/**
* SECTION:gsttagxmpwriter
* @title: GstTagXmpWriter
* @short_description: Interface for elements that provide XMP serialization
*
* <refsect2>
* <para>
* This interface is implemented by elements that are able to do XMP serialization. Examples for
* such elements are #jifmux and #qtmux.
* </para>
* <para>
*
* Applications can use this interface to configure which XMP schemas should be used when serializing
* tags into XMP. Schemas are represented by their names, a full list of the supported schemas can be
* obtained from gst_tag_xmp_list_schemas(). By default, all schemas are used.
* </para>
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -28,16 +28,15 @@
/**
* SECTION:gstcolorbalance
* @title: GstColorBalance
* @short_description: Interface for adjusting color balance settings
*
* <refsect2><para>
* This interface is implemented by elements which can perform some color
* balance operation on video frames they process. For example, modifying
* the brightness, contrast, hue or saturation.
* </para><para>
*
* Example elements are 'xvimagesink' and 'colorbalance'
* </para>
* </refsect2>
*
*/
/* FIXME 0.11: check if we need to add API for sometimes-supportedness
@ -146,7 +145,7 @@ gst_color_balance_list_channels (GstColorBalance * balance)
*
* Sets the current value of the channel to the passed value, which must
* be between min_value and max_value.
*
*
* See Also: The #GstColorBalanceChannel.min_value and
* #GstColorBalanceChannel.max_value members of the
* #GstColorBalanceChannel object.
@ -169,11 +168,11 @@ gst_color_balance_set_value (GstColorBalance * balance,
*
* Retrieve the current value of the indicated channel, between min_value
* and max_value.
*
*
* See Also: The #GstColorBalanceChannel.min_value and
* #GstColorBalanceChannel.max_value members of the
* #GstColorBalanceChannel object.
*
*
* Returns: The current value of the channel.
*/
gint

View file

@ -27,13 +27,14 @@
/**
* SECTION:gstcolorbalancechannel
* @title: GstColorBalanceChannel
* @short_description: Object representing a channel from the #GstColorBalance
* interface.
*
* <refsect2><para>The #GstColorBalanceChannel object represents a parameter
* The #GstColorBalanceChannel object represents a parameter
* for modifying the color balance implemented by an element providing the
* #GstColorBalance interface. For example, Hue or Saturation.
* </para></refsect2>
*
*/
enum

View file

@ -98,7 +98,7 @@ gst_video_affine_transformation_meta_get_info (void)
}
/**
* gst_buffer_add_video_affine_transformation_meta
* gst_buffer_add_video_affine_transformation_meta:
* @buffer: a #GstBuffer
*
* Attaches GstVideoAffineTransformationMeta metadata to @buffer with

View file

@ -24,6 +24,7 @@
/**
* SECTION:gstvideodecoder
* @title: GstVideoDecoder
* @short_description: Base class for video decoders
* @see_also:
*
@ -32,86 +33,61 @@
*
* The GstVideoDecoder base class and derived subclasses should cooperate as
* follows:
* <orderedlist>
* <listitem>
* <itemizedlist><title>Configuration</title>
* <listitem><para>
* Initially, GstVideoDecoder calls @start when the decoder element
*
* ## Configuration
*
* * Initially, GstVideoDecoder calls @start when the decoder element
* is activated, which allows the subclass to perform any global setup.
* </para></listitem>
* <listitem><para>
* GstVideoDecoder calls @set_format to inform the subclass of caps
*
* * GstVideoDecoder calls @set_format to inform the subclass of caps
* describing input video data that it is about to receive, including
* possibly configuration data.
* While unlikely, it might be called more than once, if changing input
* parameters require reconfiguration.
* </para></listitem>
* <listitem><para>
* Incoming data buffers are processed as needed, described in Data
*
* * Incoming data buffers are processed as needed, described in Data
* Processing below.
* </para></listitem>
* <listitem><para>
* GstVideoDecoder calls @stop at end of all processing.
* </para></listitem>
* </itemizedlist>
* </listitem>
* <listitem>
* <itemizedlist>
* <title>Data processing</title>
* <listitem><para>
* The base class gathers input data, and optionally allows subclass
*
* * GstVideoDecoder calls @stop at end of all processing.
*
* ## Data processing
*
* * The base class gathers input data, and optionally allows subclass
* to parse this into subsequently manageable chunks, typically
* corresponding to and referred to as 'frames'.
* </para></listitem>
* <listitem><para>
* Each input frame is provided in turn to the subclass' @handle_frame
*
* * Each input frame is provided in turn to the subclass' @handle_frame
* callback.
* The ownership of the frame is given to the @handle_frame callback.
* </para></listitem>
* <listitem><para>
* If codec processing results in decoded data, the subclass should call
*
* * If codec processing results in decoded data, the subclass should call
* @gst_video_decoder_finish_frame to have decoded data pushed.
* downstream. Otherwise, the subclass must call
* @gst_video_decoder_drop_frame, to allow the base class to do timestamp
* and offset tracking, and possibly to requeue the frame for a later
* attempt in the case of reverse playback.
* </para></listitem>
* </itemizedlist>
* </listitem>
* <listitem>
* <itemizedlist><title>Shutdown phase</title>
* <listitem><para>
* The GstVideoDecoder class calls @stop to inform the subclass that data
*
* ## Shutdown phase
*
* * The GstVideoDecoder class calls @stop to inform the subclass that data
* parsing will be stopped.
* </para></listitem>
* </itemizedlist>
* </listitem>
* <listitem>
* <itemizedlist><title>Additional Notes</title>
* <listitem>
* <itemizedlist><title>Seeking/Flushing</title>
* <listitem><para>
* When the pipeline is seeked or otherwise flushed, the subclass is
* informed via a call to its @reset callback, with the hard parameter
* set to true. This indicates the subclass should drop any internal data
* queues and timestamps and prepare for a fresh set of buffers to arrive
* for parsing and decoding.
* </para></listitem>
* </itemizedlist>
* </listitem>
* <listitem>
* <itemizedlist><title>End Of Stream</title>
* <listitem><para>
* At end-of-stream, the subclass @parse function may be called some final
* times with the at_eos parameter set to true, indicating that the element
* should not expect any more data to be arriving, and it should parse and
* remaining frames and call gst_video_decoder_have_frame() if possible.
* </para></listitem>
* </itemizedlist>
* </listitem>
* </itemizedlist>
* </listitem>
* </orderedlist>
*
* ## Additional Notes
*
* * Seeking/Flushing
*
* * When the pipeline is seeked or otherwise flushed, the subclass is
* informed via a call to its @reset callback, with the hard parameter
* set to true. This indicates the subclass should drop any internal data
* queues and timestamps and prepare for a fresh set of buffers to arrive
* for parsing and decoding.
*
* * End Of Stream
*
* * At end-of-stream, the subclass @parse function may be called some final
* times with the at_eos parameter set to true, indicating that the element
* should not expect any more data to be arriving, and it should parse and
* remaining frames and call gst_video_decoder_have_frame() if possible.
*
* The subclass is responsible for providing pad template caps for
* source and sink pads. The pads need to be named "sink" and "src". It also
@ -143,23 +119,18 @@
* incoming data.
*
* The bare minimum that a functional subclass needs to implement is:
* <itemizedlist>
* <listitem><para>Provide pad templates</para></listitem>
* <listitem><para>
* Inform the base class of output caps via
*
* * Provide pad templates
* * Inform the base class of output caps via
* @gst_video_decoder_set_output_state
* </para></listitem>
* <listitem><para>
* Parse input data, if it is not considered packetized from upstream
*
* * Parse input data, if it is not considered packetized from upstream
* Data will be provided to @parse which should invoke
* @gst_video_decoder_add_to_frame and @gst_video_decoder_have_frame to
* separate the data belonging to each video frame.
* </para></listitem>
* <listitem><para>
* Accept data in @handle_frame and provide decoded results to
*
* * Accept data in @handle_frame and provide decoded results to
* @gst_video_decoder_finish_frame, or call @gst_video_decoder_drop_frame.
* </para></listitem>
* </itemizedlist>
*/
#ifdef HAVE_CONFIG_H
@ -3358,7 +3329,7 @@ gst_video_decoder_have_frame (GstVideoDecoder * decoder)
}
/* Pass the frame in priv->current_frame through the
* handle_frame() callback for decoding and passing to gvd_finish_frame(),
* handle_frame() callback for decoding and passing to gvd_finish_frame(),
* or dropping by passing to gvd_drop_frame() */
static GstFlowReturn
gst_video_decoder_decode_frame (GstVideoDecoder * decoder,
@ -3370,7 +3341,7 @@ gst_video_decoder_decode_frame (GstVideoDecoder * decoder,
decoder_class = GST_VIDEO_DECODER_GET_CLASS (decoder);
/* FIXME : This should only have to be checked once (either the subclass has an
/* FIXME : This should only have to be checked once (either the subclass has an
* implementation, or it doesn't) */
g_return_val_if_fail (decoder_class->handle_frame != NULL, GST_FLOW_ERROR);
@ -3538,7 +3509,7 @@ gst_video_decoder_get_oldest_frame (GstVideoDecoder * decoder)
* @frame_number: system_frame_number of a frame
*
* Get a pending unfinished #GstVideoCodecFrame
*
*
* Returns: (transfer full): pending unfinished #GstVideoCodecFrame identified by @frame_number.
*/
GstVideoCodecFrame *
@ -3568,7 +3539,7 @@ gst_video_decoder_get_frame (GstVideoDecoder * decoder, int frame_number)
* @decoder: a #GstVideoDecoder
*
* Get all pending unfinished #GstVideoCodecFrame
*
*
* Returns: (transfer full) (element-type GstVideoCodecFrame): pending unfinished #GstVideoCodecFrame.
*/
GList *

View file

@ -24,6 +24,7 @@
/**
* SECTION:gstvideoencoder
* @title: GstVideoEncoder
* @short_description: Base class for video encoders
* @see_also:
*
@ -31,59 +32,40 @@
* encoded video data.
*
* GstVideoEncoder and subclass should cooperate as follows.
* <orderedlist>
* <listitem>
* <itemizedlist><title>Configuration</title>
* <listitem><para>
* Initially, GstVideoEncoder calls @start when the encoder element
*
* ## Configuration
*
* * Initially, GstVideoEncoder calls @start when the encoder element
* is activated, which allows subclass to perform any global setup.
* </para></listitem>
* <listitem><para>
* GstVideoEncoder calls @set_format to inform subclass of the format
* * GstVideoEncoder calls @set_format to inform subclass of the format
* of input video data that it is about to receive. Subclass should
* setup for encoding and configure base class as appropriate
* (e.g. latency). While unlikely, it might be called more than once,
* if changing input parameters require reconfiguration. Baseclass
* will ensure that processing of current configuration is finished.
* </para></listitem>
* <listitem><para>
* GstVideoEncoder calls @stop at end of all processing.
* </para></listitem>
* </itemizedlist>
* </listitem>
* <listitem>
* <itemizedlist>
* <title>Data processing</title>
* <listitem><para>
* Base class collects input data and metadata into a frame and hands
* * GstVideoEncoder calls @stop at end of all processing.
*
* ## Data processing
*
* * Base class collects input data and metadata into a frame and hands
* this to subclass' @handle_frame.
* </para></listitem>
* <listitem><para>
* If codec processing results in encoded data, subclass should call
*
* * If codec processing results in encoded data, subclass should call
* @gst_video_encoder_finish_frame to have encoded data pushed
* downstream.
* </para></listitem>
* <listitem><para>
* If implemented, baseclass calls subclass @pre_push just prior to
*
* * If implemented, baseclass calls subclass @pre_push just prior to
* pushing to allow subclasses to modify some metadata on the buffer.
* If it returns GST_FLOW_OK, the buffer is pushed downstream.
* </para></listitem>
* <listitem><para>
* GstVideoEncoderClass will handle both srcpad and sinkpad events.
*
* * GstVideoEncoderClass will handle both srcpad and sinkpad events.
* Sink events will be passed to subclass if @event callback has been
* provided.
* </para></listitem>
* </itemizedlist>
* </listitem>
* <listitem>
* <itemizedlist><title>Shutdown phase</title>
* <listitem><para>
* GstVideoEncoder class calls @stop to inform the subclass that data
*
* ## Shutdown phase
*
* * GstVideoEncoder class calls @stop to inform the subclass that data
* parsing will be stopped.
* </para></listitem>
* </itemizedlist>
* </listitem>
* </orderedlist>
*
* Subclass is responsible for providing pad template caps for
* source and sink pads. The pads need to be named "sink" and "src". It should
@ -91,16 +73,11 @@
* @gst_video_encoder_finish_frame.
*
* Things that subclass need to take care of:
* <itemizedlist>
* <listitem><para>Provide pad templates</para></listitem>
* <listitem><para>
* Provide source pad caps before pushing the first buffer
* </para></listitem>
* <listitem><para>
* Accept data in @handle_frame and provide encoded results to
*
* * Provide pad templates
* * Provide source pad caps before pushing the first buffer
* * Accept data in @handle_frame and provide encoded results to
* @gst_video_encoder_finish_frame.
* </para></listitem>
* </itemizedlist>
*
*/
@ -1928,7 +1905,7 @@ foreach_metadata (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data)
/**
* gst_video_encoder_finish_frame:
* @encoder: a #GstVideoEncoder
* @frame: (transfer full): an encoded #GstVideoCodecFrame
* @frame: (transfer full): an encoded #GstVideoCodecFrame
*
* @frame must have a valid encoded data buffer, whose metadata fields
* are then appropriately set according to frame data or no buffer at
@ -2367,7 +2344,7 @@ gst_video_encoder_get_oldest_frame (GstVideoEncoder * encoder)
* @frame_number: system_frame_number of a frame
*
* Get a pending unfinished #GstVideoCodecFrame
*
*
* Returns: (transfer full): pending unfinished #GstVideoCodecFrame identified by @frame_number.
*/
GstVideoCodecFrame *
@ -2397,7 +2374,7 @@ gst_video_encoder_get_frame (GstVideoEncoder * encoder, int frame_number)
* @encoder: a #GstVideoEncoder
*
* Get all pending unfinished #GstVideoCodecFrame
*
*
* Returns: (transfer full) (element-type GstVideoCodecFrame): pending unfinished #GstVideoCodecFrame.
*/
GList *

View file

@ -20,17 +20,14 @@
/**
* SECTION:gstvideofilter
* @title: GstVideoFilter
* @short_description: Base class for video filters
*
* <refsect2>
* <para>
*
* Provides useful functions and a base class for video filters.
* </para>
* <para>
*
* The videofilter will by default enable QoS on the parent GstBaseTransform
* to implement frame dropping.
* </para>
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H

View file

@ -170,7 +170,7 @@ typedef enum
* @GST_VIDEO_GL_TEXTURE_ORIENTATION_X_NORMAL_Y_FLIP: Bottom line first in memory, left row first
* @GST_VIDEO_GL_TEXTURE_ORIENTATION_X_FLIP_Y_NORMAL: Top line first in memory, right row first
* @GST_VIDEO_GL_TEXTURE_ORIENTATION_X_FLIP_Y_FLIP: Bottom line first in memory, right row first
*
*
* The orientation of the GL texture.
*/
typedef enum

View file

@ -26,6 +26,7 @@ GST_DEBUG_CATEGORY_STATIC (gst_video_pool_debug);
/**
* SECTION:gstvideopool
* @title: GstVideoBufferPool
* @short_description: GstBufferPool for raw video buffers
* @see_also: #GstBufferPool
*

View file

@ -20,18 +20,15 @@
/**
* SECTION:gstvideosink
* @title: GstVideoSink
* @short_description: Base class for video sinks
*
* <refsect2>
* <para>
* Provides useful functions and a base class for video sinks.
* </para>
* <para>
*
* Provides useful functions and a base class for video sinks.
*
* GstVideoSink will configure the default base sink to drop frames that
* arrive later than 20ms as this is considered the default threshold for
* observing out-of-sync frames.
* </para>
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
@ -92,7 +89,7 @@ static GstFlowReturn gst_video_sink_show_frame (GstBaseSink * bsink,
* @dst: the #GstVideoRectangle describing the destination area
* @result: a pointer to a #GstVideoRectangle which will receive the result area
* @scaling: a #gboolean indicating if scaling should be applied or not
*
*
* Takes @src rectangle and position it at the center of @dst rectangle with or
* without @scaling. It handles clipping if the @src rectangle is bigger than
* the @dst one and @scaling is set to FALSE.

View file

@ -105,7 +105,7 @@ struct _GstVideoSink {
* @parent_class: the parent class structure
* @show_frame: render a video frame. Maps to #GstBaseSinkClass.render() and
* #GstBaseSinkClass.preroll() vfuncs. Rendering during preroll will be
* suppressed if the #GstVideoSink:show-preroll-frame property is set to
* suppressed if the #GstVideoSink:show-preroll-frame property is set to
* %FALSE.
*
* The video sink class structure. Derived classes should override the

View file

@ -22,6 +22,7 @@
/**
* SECTION:gstnavigation
* @title: GstNavigation
* @short_description: Interface for creating, sending and parsing navigation
* events.
*
@ -31,32 +32,21 @@
* receiving navigation related bus events. One main usecase is DVD menu navigation.
*
* The main parts of the API are:
* <itemizedlist>
* <listitem>
* <para>
* The GstNavigation interface, implemented by elements which provide an application
* with the ability to create and inject navigation events into the pipeline.
* </para>
* </listitem>
* <listitem>
* <para>
* GstNavigation event handling API. GstNavigation events are created in response to
* calls on a GstNavigation interface implementation, and sent in the pipeline. Upstream
* elements can use the navigation event API functions to parse the contents of received
* messages.
* </para>
* </listitem>
* <listitem>
* <para>
* GstNavigation message handling API. GstNavigation messages may be sent on the message
* bus to inform applications of navigation related changes in the pipeline, such as the
* mouse moving over a clickable region, or the set of available angles changing.
* </para><para>
*
* * The GstNavigation interface, implemented by elements which provide an application
* with the ability to create and inject navigation events into the pipeline.
* * GstNavigation event handling API. GstNavigation events are created in response to
* calls on a GstNavigation interface implementation, and sent in the pipeline. Upstream
* elements can use the navigation event API functions to parse the contents of received
* messages.
*
* * GstNavigation message handling API. GstNavigation messages may be sent on the message
* bus to inform applications of navigation related changes in the pipeline, such as the
* mouse moving over a clickable region, or the set of available angles changing.
*
* The GstNavigation message functions provide functions for creating and parsing
* custom bus messages for signaling GstNavigation changes.
* </para>
* </listitem>
* </itemizedlist>
*
*/
#ifdef HAVE_CONFIG_H
@ -777,7 +767,7 @@ gst_navigation_event_parse_key_event (GstEvent * event, const gchar ** key)
* event.
* @y: Pointer to a gdouble to receive the y coordinate of the mouse button
* event.
*
*
* Retrieve the details of either a #GstNavigation mouse button press event or
* a mouse button release event. Determine which type the event is using
* gst_navigation_event_get_type() to retrieve the #GstNavigationEventType.

View file

@ -30,6 +30,7 @@
/**
* SECTION:gstvideochroma
* @title: GstVideoChromaResample
* @short_description: Functions and utility object for operating on chroma video planes
*
* The functions gst_video_chroma_from_string() and gst_video_chroma_to_string() convert

View file

@ -39,27 +39,17 @@
/**
* SECTION:videoconverter
* @title: GstVideoConverter
* @short_description: Generic video conversion
*
* <refsect2>
* <para>
* This object is used to convert video frames from one format to another.
* The object can perform conversion of:
* <itemizedlist>
* <listitem><para>
* video format
* </para></listitem>
* <listitem><para>
* video colorspace
* </para></listitem>
* <listitem><para>
* chroma-siting
* </para></listitem>
* <listitem><para>
* video size
* </para></listitem>
* </para>
* </refsect2>
*
* * video format
* * video colorspace
* * chroma-siting
* * video size
*
*/
/*

View file

@ -24,6 +24,7 @@
/**
* SECTION:gstvideodither
* @title: GstVideoDither
* @short_description: Utility object for dithering and quantizing lines of video
*
* GstVideoDither provides implementations of several dithering algorithms

View file

@ -134,7 +134,7 @@ gst_video_event_new_downstream_force_key_unit (GstClockTime timestamp,
* @count: integer that can be used to number key units
*
* Creates a new upstream force key unit event. An upstream force key unit event
* can be sent to request upstream elements to produce a key unit.
* can be sent to request upstream elements to produce a key unit.
*
* @running_time can be set to request a new key unit at a specific
* running_time. If set to GST_CLOCK_TIME_NONE, upstream elements will produce a

View file

@ -21,39 +21,32 @@
/**
* SECTION:gstvideooverlaycomposition
* @title: GstVideoOverlayRectangle
* @short_description: Video Buffer Overlay Compositions (Subtitles, Logos)
*
* <refsect2>
* <para>
* Functions to create and handle overlay compositions on video buffers.
* </para>
* <para>
*
* An overlay composition describes one or more overlay rectangles to be
* blended on top of a video buffer.
* </para>
* <para>
*
* This API serves two main purposes:
* <itemizedlist>
* <listitem>
* it can be used to attach overlay information (subtitles or logos)
* to non-raw video buffers such as GL/VAAPI/VDPAU surfaces. The actual
* blending of the overlay can then be done by e.g. the video sink that
* processes these non-raw buffers.
* </listitem>
* <listitem>
* it can also be used to blend overlay rectangles on top of raw video
* buffers, thus consolidating blending functionality for raw video in
* one place.
* </listitem>
*
* * it can be used to attach overlay information (subtitles or logos)
* to non-raw video buffers such as GL/VAAPI/VDPAU surfaces. The actual
* blending of the overlay can then be done by e.g. the video sink that
* processes these non-raw buffers.
*
* * it can also be used to blend overlay rectangles on top of raw video
* buffers, thus consolidating blending functionality for raw video in
* one place.
*
* Together, this allows existing overlay elements to easily handle raw
* and non-raw video as input in without major changes (once the overlays
* have been put into a #GstOverlayComposition object anyway) - for raw
* video the overlay can just use the blending function to blend the data
* on top of the video, and for surface buffers it can just attach them to
* the buffer and let the sink render the overlays.
* </itemizedlist>
* </para>
* </refsect2>
*
*/
/* TODO:

View file

@ -51,6 +51,7 @@ ensure_debug_category (void)
/**
* SECTION:gstvideoresampler
* @title: GstVideoResampler
* @short_description: Utility structure for resampler information
*
* #GstVideoResampler is a structure which holds the information

Some files were not shown because too many files have changed in this diff Show more