![]() |
![]() |
![]() |
GStreamer Base Plugins 1.0 Library Reference Manual | ![]() |
---|---|---|---|---|
Top | Description |
#include <gst/video/video.h> #define GST_VIDEO_FPS_RANGE #define GST_VIDEO_SIZE_RANGE enum GstVideoChromaSite; enum GstVideoColorMatrix; enum GstVideoColorPrimaries; enum GstVideoColorRange; GstVideoColorimetry; enum GstVideoFlags; enum GstVideoFormatFlags; void (*GstVideoFormatPack) (GstVideoFormatInfo *info
,const gpointer src
,gpointer data[GST_VIDEO_MAX_PLANES]
,const gint stride[GST_VIDEO_MAX_PLANES]
,gint x
,gint y
,gint width
); void (*GstVideoFormatUnpack) (GstVideoFormatInfo *info
,gpointer dest
,const gpointer data[GST_VIDEO_MAX_PLANES]
,const gint stride[GST_VIDEO_MAX_PLANES]
,gint x
,gint y
,gint width
); enum GstVideoInterlaceMode; enum GstVideoTransferFunction; enum GstVideoFormat; struct GstVideoFormatInfo; #define GST_VIDEO_FORMAT_INFO_BITS (info) #define GST_VIDEO_FORMAT_INFO_DATA (info, planes, comp) #define GST_VIDEO_FORMAT_INFO_DEPTH (info, c) #define GST_VIDEO_FORMAT_INFO_FLAGS (info) #define GST_VIDEO_FORMAT_INFO_FORMAT (info) #define GST_VIDEO_FORMAT_INFO_HAS_ALPHA (info) #define GST_VIDEO_FORMAT_INFO_HAS_PALETTE (info) #define GST_VIDEO_FORMAT_INFO_H_SUB (info, c) #define GST_VIDEO_FORMAT_INFO_IS_GRAY (info) #define GST_VIDEO_FORMAT_INFO_IS_LE (info) #define GST_VIDEO_FORMAT_INFO_IS_RGB (info) #define GST_VIDEO_FORMAT_INFO_IS_YUV (info) #define GST_VIDEO_FORMAT_INFO_IS_COMPLEX (info) #define GST_VIDEO_FORMAT_INFO_NAME (info) #define GST_VIDEO_FORMAT_INFO_N_COMPONENTS (info) #define GST_VIDEO_FORMAT_INFO_N_PLANES (info) #define GST_VIDEO_FORMAT_INFO_OFFSET (info, offsets, comp) #define GST_VIDEO_FORMAT_INFO_PLANE (info, c) #define GST_VIDEO_FORMAT_INFO_POFFSET (info, c) #define GST_VIDEO_FORMAT_INFO_PSTRIDE (info, c) #define GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info, c, h) #define GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (info, c, w) #define GST_VIDEO_FORMAT_INFO_SHIFT (info, c) #define GST_VIDEO_FORMAT_INFO_STRIDE (info, strides, comp) #define GST_VIDEO_FORMAT_INFO_W_SUB (info, c) struct GstVideoInfo; #define GST_VIDEO_INFO_COMP_DATA (i, d, c) #define GST_VIDEO_INFO_COMP_DEPTH (i, c) #define GST_VIDEO_INFO_COMP_HEIGHT (i, c) #define GST_VIDEO_INFO_COMP_OFFSET (i, c) #define GST_VIDEO_INFO_COMP_PLANE (i, c) #define GST_VIDEO_INFO_COMP_POFFSET (i, c) #define GST_VIDEO_INFO_COMP_PSTRIDE (i, c) #define GST_VIDEO_INFO_COMP_STRIDE (i, c) #define GST_VIDEO_INFO_COMP_WIDTH (i, c) #define GST_VIDEO_INFO_FLAGS (i) #define GST_VIDEO_INFO_FLAG_IS_SET (i, flag) #define GST_VIDEO_INFO_FLAG_SET (i, flag) #define GST_VIDEO_INFO_FLAG_UNSET (i, flag) #define GST_VIDEO_INFO_FORMAT (i) #define GST_VIDEO_INFO_FPS_D (i) #define GST_VIDEO_INFO_FPS_N (i) #define GST_VIDEO_INFO_HAS_ALPHA (i) #define GST_VIDEO_INFO_HEIGHT (i) #define GST_VIDEO_INFO_INTERLACE_MODE (i) #define GST_VIDEO_INFO_IS_GRAY (i) #define GST_VIDEO_INFO_IS_INTERLACED (i) #define GST_VIDEO_INFO_IS_RGB (i) #define GST_VIDEO_INFO_IS_YUV (i) #define GST_VIDEO_INFO_NAME (i) #define GST_VIDEO_INFO_N_COMPONENTS (i) #define GST_VIDEO_INFO_N_PLANES (i) #define GST_VIDEO_INFO_PAR_D (i) #define GST_VIDEO_INFO_PAR_N (i) #define GST_VIDEO_INFO_PLANE_OFFSET (i, p) #define GST_VIDEO_INFO_PLANE_STRIDE (i, p) #define GST_VIDEO_INFO_SIZE (i) #define GST_VIDEO_INFO_WIDTH (i) #define GST_VIDEO_MAX_COMPONENTS #define GST_VIDEO_MAX_PLANES #define GST_VIDEO_SUB_SCALE (scale, val) gboolean gst_video_info_convert (GstVideoInfo *info
,GstFormat src_format
,gint64 src_value
,GstFormat dest_format
,gint64 *dest_value
); gboolean gst_video_info_from_caps (GstVideoInfo *info
,const GstCaps *caps
); void gst_video_info_init (GstVideoInfo *info
); void gst_video_info_set_format (GstVideoInfo *info
,GstVideoFormat format
,guint width
,guint height
); GstCaps * gst_video_info_to_caps (GstVideoInfo *info
); gboolean gst_video_calculate_display_ratio (guint *dar_n
,guint *dar_d
,guint video_width
,guint video_height
,guint video_par_n
,guint video_par_d
,guint display_par_n
,guint display_par_d
); guint32 gst_video_format_to_fourcc (GstVideoFormat format
); GstVideoFormat gst_video_format_from_fourcc (guint32 fourcc
); GstBuffer * gst_video_parse_caps_palette (GstCaps *caps
); void (*GstVideoConvertSampleCallback) (GstSample *sample
,GError *error
,gpointer user_data
); GstSample * gst_video_convert_sample (GstSample *sample
,const GstCaps *to_caps
,GstClockTime timeout
,GError **error
); void gst_video_convert_sample_async (GstSample *sample
,const GstCaps *to_caps
,GstClockTime timeout
,GstVideoConvertSampleCallback callback
,gpointer user_data
,GDestroyNotify destroy_notify
); GstEvent * gst_video_event_new_still_frame (gboolean in_still
); gboolean gst_video_event_parse_still_frame (GstEvent *event
,gboolean *in_still
); GstEvent * gst_video_event_new_upstream_force_key_unit (GstClockTime running_time
,gboolean all_headers
,guint count
); GstEvent * gst_video_event_new_downstream_force_key_unit (GstClockTime timestamp
,GstClockTime stream_time
,GstClockTime running_time
,gboolean all_headers
,guint count
); gboolean gst_video_event_is_force_key_unit (GstEvent *event
); gboolean gst_video_event_parse_upstream_force_key_unit (GstEvent *event
,GstClockTime *running_time
,gboolean *all_headers
,guint *count
); gboolean gst_video_event_parse_downstream_force_key_unit (GstEvent *event
,GstClockTime *timestamp
,GstClockTime *stream_time
,GstClockTime *running_time
,gboolean *all_headers
,guint *count
);
This library contains some helper functions and includes the videosink and videofilter base classes.
typedef enum { GST_VIDEO_CHROMA_SITE_UNKNOWN = 0, GST_VIDEO_CHROMA_SITE_NONE = (1 << 0), GST_VIDEO_CHROMA_SITE_H_COSITED = (1 << 1), GST_VIDEO_CHROMA_SITE_V_COSITED = (1 << 2), GST_VIDEO_CHROMA_SITE_ALT_LINE = (1 << 3), /* some common chroma cositing */ GST_VIDEO_CHROMA_SITE_COSITED = (GST_VIDEO_CHROMA_SITE_H_COSITED | GST_VIDEO_CHROMA_SITE_V_COSITED), GST_VIDEO_CHROMA_SITE_JPEG = (GST_VIDEO_CHROMA_SITE_NONE), GST_VIDEO_CHROMA_SITE_MPEG2 = (GST_VIDEO_CHROMA_SITE_H_COSITED), GST_VIDEO_CHROMA_SITE_DV = (GST_VIDEO_CHROMA_SITE_COSITED | GST_VIDEO_CHROMA_SITE_ALT_LINE), } GstVideoChromaSite;
typedef enum { GST_VIDEO_COLOR_MATRIX_UNKNOWN = 0, GST_VIDEO_COLOR_MATRIX_RGB, GST_VIDEO_COLOR_MATRIX_FCC, GST_VIDEO_COLOR_MATRIX_BT709, GST_VIDEO_COLOR_MATRIX_BT601, GST_VIDEO_COLOR_MATRIX_SMPTE240M } GstVideoColorMatrix;
The color matrix is used to convert between Y'PbPr and non-linear RGB (R'G'B')
typedef enum { GST_VIDEO_COLOR_PRIMARIES_UNKNOWN = 0, GST_VIDEO_COLOR_PRIMARIES_BT709, GST_VIDEO_COLOR_PRIMARIES_BT470M, GST_VIDEO_COLOR_PRIMARIES_BT470BG, GST_VIDEO_COLOR_PRIMARIES_SMPTE170M, GST_VIDEO_COLOR_PRIMARIES_SMPTE240M } GstVideoColorPrimaries;
The color primaries define the how to transform linear RGB values to and from the CIE XYZ colorspace.
unknown color primaries | |
BT709 primaries | |
BT470M primaries | |
BT470BG primaries | |
SMPTE170M primaries | |
SMPTE240M primaries |
typedef enum { GST_VIDEO_COLOR_RANGE_UNKNOWN = 0, GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_RANGE_16_235 } GstVideoColorRange;
Possible color range values. These constants are defined for 8 bit color values and can be scaled for other bit depths.
typedef struct { GstVideoColorRange range; GstVideoColorMatrix matrix; GstVideoTransferFunction transfer; GstVideoColorPrimaries primaries; } GstVideoColorimetry;
Structure describing the color info.
GstVideoColorRange |
the color range. This is the valid range for the samples. It is used to convert the samples to Y'PbPr values. |
GstVideoColorMatrix |
the color matrix. Used to convert between Y'PbPr and non-linear RGB (R'G'B') |
GstVideoTransferFunction |
the transfer function. used to convert between R'G'B' and RGB |
GstVideoColorPrimaries |
color primaries. used to convert between R'G'B' and CIE XYZ |
typedef enum { GST_VIDEO_FLAG_NONE = 0, GST_VIDEO_FLAG_VARIABLE_FPS = (1 << 0) } GstVideoFlags;
Extra video flags
typedef enum { GST_VIDEO_FORMAT_FLAG_YUV = (1 << 0), GST_VIDEO_FORMAT_FLAG_RGB = (1 << 1), GST_VIDEO_FORMAT_FLAG_GRAY = (1 << 2), GST_VIDEO_FORMAT_FLAG_ALPHA = (1 << 3), GST_VIDEO_FORMAT_FLAG_LE = (1 << 4), GST_VIDEO_FORMAT_FLAG_PALETTE = (1 << 5), GST_VIDEO_FORMAT_FLAG_COMPLEX = (1 << 6) } GstVideoFormatFlags;
The different video flags that a format info can have.
The video format is YUV, components are numbered 0=Y, 1=U, 2=V. | |
The video format is RGB, components are numbered 0=R, 1=G, 2=B. | |
The video is gray, there is one gray component with index 0. | |
The video format has an alpha components with the number 3. | |
The video format has data stored in little endianness. | |
The video format has a palette. | |
The video format has a complex layout that can't be described with the usual information in the GstVideoFormatInfo. |
void (*GstVideoFormatPack) (GstVideoFormatInfo *info
,const gpointer src
,gpointer data[GST_VIDEO_MAX_PLANES]
,const gint stride[GST_VIDEO_MAX_PLANES]
,gint x
,gint y
,gint width
);
Packs width
pixels from src
to the given planes and strides in the
format info
. The pixels from source have each component interleaved
and will be packed into the planes in data
.
|
a GstVideoFormatInfo |
|
a source array |
|
pointers to the destination data planes |
|
strides of the destination planes |
|
the x position in the image to pack to |
|
the y position in the image to pack to |
|
the amount of pixels to pack. |
void (*GstVideoFormatUnpack) (GstVideoFormatInfo *info
,gpointer dest
,const gpointer data[GST_VIDEO_MAX_PLANES]
,const gint stride[GST_VIDEO_MAX_PLANES]
,gint x
,gint y
,gint width
);
Unpacks width
pixels from the given planes and strides containing data of
format info
. The pixels will be unpacked into dest
which each component
interleaved. dest
should at least be big enough to hold width
*
n_components * size(unpack_format) bytes.
|
a GstVideoFormatInfo |
|
a destination array |
|
pointers to the data planes |
|
strides of the planes |
|
the x position in the image to start from |
|
the y position in the image to start from |
|
the amount of pixels to unpack. |
typedef enum { GST_VIDEO_INTERLACE_MODE_PROGRESSIVE = 0, GST_VIDEO_INTERLACE_MODE_INTERLEAVED, GST_VIDEO_INTERLACE_MODE_MIXED, GST_VIDEO_INTERLACE_MODE_FIELDS } GstVideoInterlaceMode;
The possible values of the GstVideoInterlaceMode describing the interlace mode of the stream.
all frames are progressive | |
2 fields are interleaved in one video frame. Extra buffer flags describe the field order. | |
frames contains both interlaced and progressive video, the buffer flags describe the frame and fields. | |
2 fields are stored in one buffer, use the frame ID to get access to the required field. For multiview (the 'views' property > 1) the fields of view N can be found at frame ID (N * 2) and (N * 2) + 1. Each field has only half the amount of lines as noted in the height property. This mode requires multiple GstVideoMeta metadata to describe the fields. |
typedef enum { GST_VIDEO_TRANSFER_UNKNOWN = 0, GST_VIDEO_TRANSFER_GAMMA10, GST_VIDEO_TRANSFER_GAMMA18, GST_VIDEO_TRANSFER_GAMMA20, GST_VIDEO_TRANSFER_GAMMA22, GST_VIDEO_TRANSFER_BT709, GST_VIDEO_TRANSFER_SMPTE240M, GST_VIDEO_TRANSFER_SRGB, GST_VIDEO_TRANSFER_GAMMA28, GST_VIDEO_TRANSFER_LOG100, GST_VIDEO_TRANSFER_LOG316 } GstVideoTransferFunction;
The video transfer function defines the formula for converting between non-linear RGB (R'G'B') and linear RGB
unknown transfer function | |
linear RGB, gamma 1.0 curve | |
Gamma 1.8 curve | |
Gamma 2.0 curve | |
Gamma 2.2 curve | |
Gamma 2.2 curve with a linear segment in the lower range | |
Gamma 2.2 curve with a linear segment in the lower range | |
Gamma 2.4 curve with a linear segment in the lower range | |
Gamma 2.8 curve | |
Logarithmic transfer characteristic 100:1 range | |
Logarithmic transfer characteristic 316.22777:1 range |
typedef enum { GST_VIDEO_FORMAT_UNKNOWN, GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_YUY2, GST_VIDEO_FORMAT_UYVY, GST_VIDEO_FORMAT_AYUV, GST_VIDEO_FORMAT_RGBx, GST_VIDEO_FORMAT_BGRx, GST_VIDEO_FORMAT_xRGB, GST_VIDEO_FORMAT_xBGR, GST_VIDEO_FORMAT_RGBA, GST_VIDEO_FORMAT_BGRA, GST_VIDEO_FORMAT_ARGB, GST_VIDEO_FORMAT_ABGR, GST_VIDEO_FORMAT_RGB, GST_VIDEO_FORMAT_BGR, GST_VIDEO_FORMAT_Y41B, GST_VIDEO_FORMAT_Y42B, GST_VIDEO_FORMAT_YVYU, GST_VIDEO_FORMAT_Y444, GST_VIDEO_FORMAT_v210, GST_VIDEO_FORMAT_v216, GST_VIDEO_FORMAT_NV12, GST_VIDEO_FORMAT_NV21, GST_VIDEO_FORMAT_GRAY8, GST_VIDEO_FORMAT_GRAY16_BE, GST_VIDEO_FORMAT_GRAY16_LE, GST_VIDEO_FORMAT_v308, GST_VIDEO_FORMAT_Y800, GST_VIDEO_FORMAT_Y16, GST_VIDEO_FORMAT_RGB16, GST_VIDEO_FORMAT_BGR16, GST_VIDEO_FORMAT_RGB15, GST_VIDEO_FORMAT_BGR15, GST_VIDEO_FORMAT_UYVP, GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_RGB8_PALETTED, GST_VIDEO_FORMAT_YUV9, GST_VIDEO_FORMAT_YVU9, GST_VIDEO_FORMAT_IYU1, GST_VIDEO_FORMAT_ARGB64, GST_VIDEO_FORMAT_AYUV64, GST_VIDEO_FORMAT_r210, GST_VIDEO_FORMAT_ENCODED } GstVideoFormat;
Enum value describing the most common video formats.
Unknown or unset video format id | |
planar 4:2:0 YUV | |
planar 4:2:0 YVU (like I420 but UV planes swapped) | |
packed 4:2:2 YUV (Y0-U0-Y1-V0 Y2-U2-Y3-V2 Y4 ...) | |
packed 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...) | |
packed 4:4:4 YUV with alpha channel (A0-Y0-U0-V0 ...) | |
sparse rgb packed into 32 bit, space last | |
sparse reverse rgb packed into 32 bit, space last | |
sparse rgb packed into 32 bit, space first | |
sparse reverse rgb packed into 32 bit, space first | |
rgb with alpha channel last | |
reverse rgb with alpha channel last | |
rgb with alpha channel first | |
reverse rgb with alpha channel first | |
rgb | |
reverse rgb | |
planar 4:1:1 YUV (Since: 0.10.18) | |
planar 4:2:2 YUV (Since: 0.10.18) | |
packed 4:2:2 YUV (Y0-V0-Y1-U0 Y2-V2-Y3-U2 Y4 ...) (Since: 0.10.23) | |
planar 4:4:4 YUV (Since: 0.10.24) | |
packed 4:2:2 10-bit YUV, complex format (Since: 0.10.24) | |
packed 4:2:2 16-bit YUV, Y0-U0-Y1-V1 order (Since: 0.10.24) | |
planar 4:2:0 YUV with interleaved UV plane (Since: 0.10.26) | |
planar 4:2:0 YUV with interleaved VU plane (Since: 0.10.26) | |
8-bit grayscale (Since: 0.10.29) | |
16-bit grayscale, most significant byte first (Since: 0.10.29) | |
16-bit grayscale, least significant byte first (Since: 0.10.29) | |
packed 4:4:4 YUV (Since: 0.10.29) | |
same as GST_VIDEO_FORMAT_GRAY8 (Since: 0.10.30) | |
same as GST_VIDEO_FORMAT_GRAY16_LE (Since: 0.10.30) | |
rgb 5-6-5 bits per component (Since: 0.10.30) | |
reverse rgb 5-6-5 bits per component (Since: 0.10.30) | |
rgb 5-5-5 bits per component (Since: 0.10.30) | |
reverse rgb 5-5-5 bits per component (Since: 0.10.30) | |
packed 10-bit 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...) (Since: 0.10.31) | |
planar 4:4:2:0 AYUV (Since: 0.10.31) | |
8-bit paletted RGB (Since: 0.10.32) | |
planar 4:1:0 YUV (Since: 0.10.32) | |
planar 4:1:0 YUV (like YUV9 but UV planes swapped) (Since: 0.10.32) | |
packed 4:1:1 YUV (Cb-Y0-Y1-Cr-Y2-Y3 ...) (Since: 0.10.32) | |
rgb with alpha channel first, 16 bits per channel (Since: 0.10.33) | |
packed 4:4:4 YUV with alpha channel, 16 bits per channel (A0-Y0-U0-V0 ...) (Since: 0.10.33) | |
packed 4:4:4 RGB, 10 bits per channel (Since: 0.10.33) | |
Encoded video format |
struct GstVideoFormatInfo { GstVideoFormat format; const gchar *name; const gchar *description; GstVideoFormatFlags flags; guint bits; guint n_components; guint shift[GST_VIDEO_MAX_COMPONENTS]; guint depth[GST_VIDEO_MAX_COMPONENTS]; gint pixel_stride[GST_VIDEO_MAX_COMPONENTS]; guint n_planes; guint plane[GST_VIDEO_MAX_COMPONENTS]; guint poffset[GST_VIDEO_MAX_COMPONENTS]; guint w_sub[GST_VIDEO_MAX_COMPONENTS]; guint h_sub[GST_VIDEO_MAX_COMPONENTS]; GstVideoFormat unpack_format; GstVideoFormatUnpack unpack_func; GstVideoFormatPack pack_func; };
Information for a video format.
GstVideoFormat |
GstVideoFormat |
const gchar * |
string representation of the format |
const gchar * |
use readable description of the format |
GstVideoFormatFlags |
GstVideoFormatFlags |
guint |
The number of bits used to pack data items. This can be less than 8 when multiple pixels are stored in a byte. for values > 8 multiple bytes should be read according to the endianness flag before applying the shift and mask. |
guint |
the number of components in the video format. |
guint |
the number of bits to shift away to get the component data |
guint |
the depth in bits for each component |
gint |
the pixel stride of each component. This is the amount of bytes to the pixel immediately to the right. When bits < 8, the stride is expressed in bits. |
guint |
the number of planes for this format. The number of planes can be less than the amount of components when multiple components are packed into one plane. |
guint |
the plane number where a component can be found |
guint |
the offset in the plane where the first pixel of the components can be found. If bits < 8 the amount is specified in bits. |
guint |
subsampling factor of the width for the component. Use GST_VIDEO_SUB_SCALE to scale a width. |
guint |
subsampling factor of the height for the component. Use GST_VIDEO_SUB_SCALE to scale a height. |
GstVideoFormat |
the format of the unpacked pixels. |
GstVideoFormatUnpack |
an unpack function for this format |
GstVideoFormatPack |
an pack function for this format |
#define GST_VIDEO_FORMAT_INFO_HAS_ALPHA(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)
#define GST_VIDEO_FORMAT_INFO_HAS_PALETTE(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_PALETTE)
#define GST_VIDEO_FORMAT_INFO_IS_GRAY(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_GRAY)
#define GST_VIDEO_FORMAT_INFO_IS_LE(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_LE)
#define GST_VIDEO_FORMAT_INFO_IS_RGB(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_RGB)
#define GST_VIDEO_FORMAT_INFO_IS_YUV(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_YUV)
#define GST_VIDEO_FORMAT_INFO_IS_COMPLEX(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_COMPLEX)
#define GST_VIDEO_FORMAT_INFO_N_COMPONENTS(info) ((info)->n_components)
#define GST_VIDEO_FORMAT_INFO_PSTRIDE(info,c) ((info)->pixel_stride[c])
#define GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(info,c,h) GST_VIDEO_SUB_SCALE ((info)->h_sub[c],(h))
#define GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(info,c,w) GST_VIDEO_SUB_SCALE ((info)->w_sub[c],(w))
#define GST_VIDEO_FORMAT_INFO_STRIDE(info,strides,comp) ((strides)[(info)->plane[comp]])
struct GstVideoInfo { const GstVideoFormatInfo *finfo; GstVideoInterlaceMode interlace_mode; GstVideoFlags flags; gint width; gint height; gsize size; gint views; GstVideoChromaSite chroma_site; GstVideoColorimetry colorimetry; GstBuffer *palette; gint par_n; gint par_d; gint fps_n; gint fps_d; gsize offset[GST_VIDEO_MAX_PLANES]; gint stride[GST_VIDEO_MAX_PLANES]; };
Information describing image properties. This information can be filled
in from GstCaps with gst_video_info_from_caps()
. The information is also used
to store the specific video info when mapping a video frame with
gst_video_frame_map()
.
Use the provided macros to access the info in this structure.
const GstVideoFormatInfo * |
the format info of the video |
GstVideoInterlaceMode |
the interlace mode |
GstVideoFlags |
additional video flags |
gint |
the width of the video |
gint |
the height of the video |
the default size of one frame | |
gint |
the number of views for multiview video |
GstVideoChromaSite |
a GstVideoChromaSite. |
GstVideoColorimetry |
the colorimetry info |
GstBuffer * |
a buffer with palette data |
gint |
the pixel-aspect-ratio numerator |
gint |
the pixel-aspect-ratio demnominator |
gint |
the framerate numerator |
gint |
the framerate demnominator |
offsets of the planes | |
gint |
strides of the planes |
#define GST_VIDEO_INFO_COMP_DATA(i,d,c) GST_VIDEO_FORMAT_INFO_DATA((i)->finfo,d,(c))
#define GST_VIDEO_INFO_COMP_DEPTH(i,c) GST_VIDEO_FORMAT_INFO_DEPTH((i)->finfo,(c))
#define GST_VIDEO_INFO_COMP_HEIGHT(i,c) GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT((i)->finfo,(c),(i)->height)
#define GST_VIDEO_INFO_COMP_OFFSET(i,c) GST_VIDEO_FORMAT_INFO_OFFSET((i)->finfo,(i)->offset,(c))
#define GST_VIDEO_INFO_COMP_PLANE(i,c) GST_VIDEO_FORMAT_INFO_PLANE((i)->finfo,(c))
#define GST_VIDEO_INFO_COMP_POFFSET(i,c) GST_VIDEO_FORMAT_INFO_POFFSET((i)->finfo,(c))
#define GST_VIDEO_INFO_COMP_PSTRIDE(i,c) GST_VIDEO_FORMAT_INFO_PSTRIDE((i)->finfo,(c))
#define GST_VIDEO_INFO_COMP_STRIDE(i,c) GST_VIDEO_FORMAT_INFO_STRIDE((i)->finfo,(i)->stride,(c))
#define GST_VIDEO_INFO_COMP_WIDTH(i,c) GST_VIDEO_FORMAT_INFO_SCALE_WIDTH((i)->finfo,(c),(i)->width)
#define GST_VIDEO_INFO_FLAG_IS_SET(i,flag) ((GST_VIDEO_INFO_FLAGS(i) & (flag)) == (flag))
#define GST_VIDEO_INFO_FLAG_SET(i,flag) (GST_VIDEO_INFO_FLAGS(i) |= (flag))
#define GST_VIDEO_INFO_FLAG_UNSET(i,flag) (GST_VIDEO_INFO_FLAGS(i) &= ~(flag))
#define GST_VIDEO_INFO_HAS_ALPHA(i) (GST_VIDEO_FORMAT_INFO_HAS_ALPHA((i)->finfo))
#define GST_VIDEO_INFO_IS_GRAY(i) (GST_VIDEO_FORMAT_INFO_IS_GRAY((i)->finfo))
#define GST_VIDEO_INFO_IS_INTERLACED(i) ((i)->interlace_mode != GST_VIDEO_INTERLACE_MODE_PROGRESSIVE)
#define GST_VIDEO_INFO_N_COMPONENTS(i) GST_VIDEO_FORMAT_INFO_N_COMPONENTS((i)->finfo)
#define GST_VIDEO_INFO_N_PLANES(i) (GST_VIDEO_FORMAT_INFO_N_PLANES((i)->finfo))
gboolean gst_video_info_convert (GstVideoInfo *info
,GstFormat src_format
,gint64 src_value
,GstFormat dest_format
,gint64 *dest_value
);
gboolean gst_video_info_from_caps (GstVideoInfo *info
,const GstCaps *caps
);
Parse caps
and update info
.
|
a GstVideoInfo |
|
a GstCaps |
Returns : |
TRUE if caps could be parsed |
void gst_video_info_init (GstVideoInfo *info
);
Initialize info
with default values.
|
a GstVideoInfo |
void gst_video_info_set_format (GstVideoInfo *info
,GstVideoFormat format
,guint width
,guint height
);
Set the default info for a video frame of format
and width
and height
.
|
a GstVideoInfo |
|
the format |
|
a width |
|
a height |
GstCaps * gst_video_info_to_caps (GstVideoInfo *info
);
Convert the values of info
into a GstCaps.
|
a GstVideoInfo |
Returns : |
a new GstCaps containing the info of info . |
gboolean gst_video_calculate_display_ratio (guint *dar_n
,guint *dar_d
,guint video_width
,guint video_height
,guint video_par_n
,guint video_par_d
,guint display_par_n
,guint display_par_d
);
Given the Pixel Aspect Ratio and size of an input video frame, and the pixel aspect ratio of the intended display device, calculates the actual display ratio the video will be rendered with.
|
Numerator of the calculated display_ratio |
|
Denominator of the calculated display_ratio |
|
Width of the video frame in pixels |
|
Height of the video frame in pixels |
|
Numerator of the pixel aspect ratio of the input video. |
|
Denominator of the pixel aspect ratio of the input video. |
|
Numerator of the pixel aspect ratio of the display device |
|
Denominator of the pixel aspect ratio of the display device |
Returns : |
A boolean indicating success and a calculated Display Ratio in the dar_n and dar_d parameters. The return value is FALSE in the case of integer overflow or other error. |
Since 0.10.7
guint32 gst_video_format_to_fourcc (GstVideoFormat format
);
Converts a GstVideoFormat value into the corresponding FOURCC. Only
a few YUV formats have corresponding FOURCC values. If format
has
no corresponding FOURCC value, 0 is returned.
|
a GstVideoFormat video format |
Returns : |
the FOURCC corresponding to format
|
Since 0.10.16
GstVideoFormat gst_video_format_from_fourcc (guint32 fourcc
);
Converts a FOURCC value into the corresponding GstVideoFormat. If the FOURCC cannot be represented by GstVideoFormat, GST_VIDEO_FORMAT_UNKNOWN is returned.
|
a FOURCC value representing raw YUV video |
Returns : |
the GstVideoFormat describing the FOURCC value |
Since 0.10.16
GstBuffer * gst_video_parse_caps_palette (GstCaps *caps
);
Returns the palette data from the caps as a GstBuffer. For GST_VIDEO_FORMAT_RGB8_PALETTED this is containing 256 guint32 values, each containing ARGB colors in native endianness.
Since 0.10.32
void (*GstVideoConvertSampleCallback) (GstSample *sample
,GError *error
,gpointer user_data
);
GstSample * gst_video_convert_sample (GstSample *sample
,const GstCaps *to_caps
,GstClockTime timeout
,GError **error
);
Converts a raw video buffer into the specified output caps.
The output caps can be any raw video formats or any image formats (jpeg, png, ...).
The width, height and pixel-aspect-ratio can also be specified in the output caps.
|
a GstSample |
|
the GstCaps to convert to |
|
the maximum amount of time allowed for the processing. |
|
pointer to a GError. Can be NULL . |
Returns : |
The converted GstSample, or NULL if an error happened (in which case err
will point to the GError). |
Since 0.10.31
void gst_video_convert_sample_async (GstSample *sample
,const GstCaps *to_caps
,GstClockTime timeout
,GstVideoConvertSampleCallback callback
,gpointer user_data
,GDestroyNotify destroy_notify
);
Converts a raw video buffer into the specified output caps.
The output caps can be any raw video formats or any image formats (jpeg, png, ...).
The width, height and pixel-aspect-ratio can also be specified in the output caps.
callback
will be called after conversion, when an error occured or if conversion didn't
finish after timeout
. callback
will always be called from the thread default
GMainContext
, see g_main_context_get_thread_default()
. If GLib before 2.22 is used,
this will always be the global default main context.
destroy_notify
will be called after the callback was called and user_data
is not needed
anymore.
|
a GstSample |
|
the GstCaps to convert to |
|
the maximum amount of time allowed for the processing. |
|
GstVideoConvertSampleCallback that will be called after conversion. |
|
extra data that will be passed to the callback
|
|
GDestroyNotify to be called after user_data is not needed anymore |
Since 0.10.31
GstEvent * gst_video_event_new_still_frame (gboolean in_still
);
Creates a new Still Frame event. If in_still
is TRUE
, then the event
represents the start of a still frame sequence. If it is FALSE
, then
the event ends a still frame sequence.
To parse an event created by gst_video_event_new_still_frame()
use
gst_video_event_parse_still_frame()
.
|
boolean value for the still-frame state of the event. |
Returns : |
The new GstEvent |
Since 0.10.26
gboolean gst_video_event_parse_still_frame (GstEvent *event
,gboolean *in_still
);
Parse a GstEvent, identify if it is a Still Frame event, and return the still-frame state from the event if it is. If the event represents the start of a still frame, the in_still variable will be set to TRUE, otherwise FALSE. It is OK to pass NULL for the in_still variable order to just check whether the event is a valid still-frame event.
Create a still frame event using gst_video_event_new_still_frame()
|
A GstEvent to parse |
|
A boolean to receive the still-frame status from the event, or NULL |
Returns : |
TRUE if the event is a valid still-frame event. FALSE if not |
Since 0.10.26
GstEvent * gst_video_event_new_upstream_force_key_unit (GstClockTime running_time
,gboolean all_headers
,guint count
);
Creates a new upstream force key unit event. An upstream force key unit event can be sent to request upstream elements to produce a key unit.
running_time
can be set to request a new key unit at a specific
running_time. If set to GST_CLOCK_TIME_NONE, upstream elements will produce a
new key unit as soon as possible.
To parse an event created by gst_video_event_new_downstream_force_key_unit()
use
gst_video_event_parse_downstream_force_key_unit()
.
|
the running_time at which a new key unit should be produced |
|
TRUE to produce headers when starting a new key unit |
|
integer that can be used to number key units |
Returns : |
The new GstEvent |
Since 0.10.36
GstEvent * gst_video_event_new_downstream_force_key_unit (GstClockTime timestamp
,GstClockTime stream_time
,GstClockTime running_time
,gboolean all_headers
,guint count
);
Creates a new downstream force key unit event. A downstream force key unit event can be sent down the pipeline to request downstream elements to produce a key unit. A downstream force key unit event must also be sent when handling an upstream force key unit event to notify downstream that the latter has been handled.
To parse an event created by gst_video_event_new_downstream_force_key_unit()
use
gst_video_event_parse_downstream_force_key_unit()
.
|
the timestamp of the buffer that starts a new key unit |
|
the stream_time of the buffer that starts a new key unit |
|
the running_time of the buffer that starts a new key unit |
|
TRUE to produce headers when starting a new key unit |
|
integer that can be used to number key units |
Returns : |
The new GstEvent |
Since 0.10.36
gboolean gst_video_event_is_force_key_unit (GstEvent *event
);
Checks if an event is a force key unit event. Returns true for both upstream and downstream force key unit events.
Since 0.10.36
gboolean gst_video_event_parse_upstream_force_key_unit (GstEvent *event
,GstClockTime *running_time
,gboolean *all_headers
,guint *count
);
Get running-time, all-headers and count in the force key unit event. See
gst_video_event_new_upstream_force_key_unit()
for a full description of the
upstream force key unit event.
Create an upstream force key unit event using gst_video_event_new_upstream_force_key_unit()
|
A GstEvent to parse |
|
A pointer to the running_time in the event. [out] |
|
A pointer to the all_headers flag in the event. [out] |
|
A pointer to the count field in the event. [out] |
Returns : |
TRUE if the event is a valid upstream force-key-unit event. FALSE if not |
Since 0.10.36
gboolean gst_video_event_parse_downstream_force_key_unit (GstEvent *event
,GstClockTime *timestamp
,GstClockTime *stream_time
,GstClockTime *running_time
,gboolean *all_headers
,guint *count
);
Get timestamp, stream-time, running-time, all-headers and count in the force
key unit event. See gst_video_event_new_downstream_force_key_unit()
for a
full description of the downstream force key unit event.
|
A GstEvent to parse |
|
A pointer to the timestamp in the event. [out] |
|
A pointer to the stream-time in the event. [out] |
|
A pointer to the running-time in the event. [out] |
|
A pointer to the all_headers flag in the event. [out] |
|
A pointer to the count field of the event. [out] |
Returns : |
TRUE if the event is a valid downstream force key unit event. |
Since 0.10.36