播放ts over udp
gst-launch-1.0 udpsrc uri=udp://233.233.233.223:6666 \
! queue2 use-buffering=1 max-size-buffers=0 \
max-size-time=0 max-size-bytes=0 low-percent=50 \
! decodebin ! videoconvert ! autovideosink
延时
gst-launch-1.0 rtmpsrc location=rtmp://172.17.1.55:1935/test/orig ! flvdemux ! queue min-threshold-time=10000000000 max-size-buffers=0 max-size-time=0 max-size-bytes=0 ! flvmux ! rtmpsink location=rtmp://172.17.1.55:1935/test/delay
生成字幕视频
gst-launch-1.0 videotestsrc pattern=17 foreground-color=0 ! video/x-raw,width=1280,height=720 ! textoverlay text=123456 ! autovideosink
textoverlay
插件利用pango进行渲染,所以支持pango标记语言,可以用如下的方式来输出字符串[2]
gst-launch-1.0 videotestsrc ! textoverlay text="<span foreground=\"blue\" background=\"green\" size=\"x-large\">Blue text</span> is <i>cool</i>" ! xvimagesink
具体格式定义参考链接Pango Text Attribute Markup Language
videomix
gst-launch-1.0 \
videotestsrc pattern=1 ! \
video/x-raw,format=AYUV,framerate=\(fraction\)10/1,width=100,height=100 ! \
videobox border-alpha=0 top=-70 bottom=-70 right=-220 ! \
videomixer name=mix sink_0::alpha=0.7 sink_1::alpha=0.5 ! \
videoconvert ! xvimagesink \
videotestsrc ! \
video/x-raw,format=AYUV,framerate=\(fraction\)5/1,width=320,height=240 ! mix.
输出pipeline graph
在代码中增加如下代码即可
GST_DEBUG_BIN_TO_DOT_FILE(pipeline, GST_DEBUG_GRAPH_SHOW_ALL, "pipeline");
运行程序前,需要设置环境变量
# export GST_DEBUG_DUMP_DOT_DIR=/tmp/
编译Gstreamer
PKG_CONFIG_PATH=/home/meng/bins/gstreamer/lib/pkgconfig
./configure --prefix=/home/meng/bins/gstreamer
编译支持nvenc版本的libav插件
PKG_CONFIG_PATH=/home/meng/bins/gstreamer/lib/pkgconfig ./configure --prefix=/home/meng/bins/gstreamer --with-libav-extra-configure="--enable-nonfree --enable-nvenc --enable-gpl --enable-libx264"
渐变修改gstreamer element的属性值
static GstTimedValueControlSource *
set_interpolation (GstObject * element, GESVideoTransitionPrivate * priv,
const gchar * propname)
{
GstControlSource *control_source;
g_object_set (element, propname, (gfloat) 0.0, NULL);
control_source = gst_interpolation_control_source_new ();
gst_object_add_control_binding (GST_OBJECT (element),
gst_direct_control_binding_new (GST_OBJECT (element), propname,
control_source));
g_object_set (control_source, "mode", GST_INTERPOLATION_MODE_LINEAR, NULL);
return GST_TIMED_VALUE_CONTROL_SOURCE (control_source);
}
static void
ges_video_transition_update_control_source (GstTimedValueControlSource * ts,
guint64 duration, gdouble start_value, gdouble end_value)
{
gst_timed_value_control_source_unset_all (ts);
gst_timed_value_control_source_set (ts, 0, start_value);
gst_timed_value_control_source_set (ts, duration, end_value);
}
priv->smpte_control_source =
set_interpolation (GST_OBJECT (priv->smpte), priv, "position");
ges_video_transition_update_control_source (priv->smpte_control_source,
duration, 1.0, 0.0);
录制rtmp
GST_DEBUG_NO_COLOR=1 GST_DEBUG=4,GST_HUB:2 LD_LIBRARY_PATH=/root/nmpdirector/lib gst-launch-1.0 rtmpsrc location=rtmp://host/app/stream ! flvdemux ! flvmux ! filesink location=/root/test.flv 2> log.txt
画中画
MAIN_SCREEN=file:///file/path \
SUB_SCREEN=file:///file/path \
gst-launch-1.0 -v \
uridecodebin uri=$MAIN_SCREEN name=decoder1 \
uridecodebin uri=$SUB_SCREEN name=decoder2 \
videomixer name=mix \
sink_0::xpos=0 sink_0::ypos=0 \
sink_1::xpos=640 sink_1::ypos=360 \
! autovideosink \
decoder1. \
! queue ! videoscale \
! video/x-raw,width=1280,height=720 \
! videoconvert \
! mix.sink_0 \
decoder2. \
! queue ! videoscale \
! video/x-raw,width=640,height=360 \
! videoconvert \
! mix.sink_1 \