03.动态构建 Pipeline
2026/1/15大约 4 分钟dynamic-pipelinepad
GStreamer学习笔记:03.动态构建 Pipeline
本示例尝试处理动态 Pad(Dynamically Pads),这是处理复杂媒体格式(如同时包含音视频的文件)时的关键技术。
核心概念
1. 数据结构封装
使用结构体包含所有元素,以便传递给回调函数:
typedef struct _CustomData
{
GstElement *pipeline;
GstElement *source;
GstElement *videoconvert;
GstElement *videosink;
GstElement *audioconvert;
GstElement *audioresample;
GstElement *audiosink;
} CustomData;2. 创建元素
// uri 解码元素
data.source = gst_element_factory_make("uridecodebin", "source");
// 视频处理相关元素
data.videoconvert = gst_element_factory_make("videoconvert", "videoconvert"); /* 视频格式转换 */
data.videosink = gst_element_factory_make("autovideosink", "videosink"); /* 视频播放 */
// 音频处理相关元素
data.audioconvert = gst_element_factory_make("audioconvert", "audioconvert");
/* audioconvert 转换格式以匹配前后音频编解码器的能力差异(不同平台下) */
data.audioresample = gst_element_factory_make("audioresample", "resample");
/* audioresample 重新采样以匹配前后音频编解码器的不同采样率 */
data.audiosink = gst_element_factory_make("autoaudiosink", "audiosink");
/* autoaudiosink 把音频流渲染到声卡上 */3. 关于 Pad 动态创建
由于源元素 uridecodebin 的 source_pad 是在当收到一些数据后才会自动动态创建,所以这里无法直接连接,只能后续等待 pad-added 信号在回调函数中连接。
Pipeline 结构
source
- pad:src_0 -> videoconvert -> videosink
- pad:src_1 -> audioconvert -> audioresample -> audiosink链接元素
gst_bin_add_many(
GST_BIN(data.pipeline),
data.source,
data.videoconvert, data.videosink,
data.audioconvert, data.audioresample, data.audiosink,
NULL
);
// 先链接非源元素
if (
!gst_element_link_many(data.videoconvert, data.videosink, NULL) ||
!gst_element_link_many(data.audioconvert, data.audioresample, data.audiosink, NULL)
)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
return -1;
}4. 绑定 Pad-Added 信号
当收到 pad-added 信号时,执行回调连接 src_pad 和后续元素:
g_signal_connect(data.source, "pad-added", G_CALLBACK(pad_added_handler), &data);查看所有支持的所有信号:
gst-inspect-1.0 uridecodebin5. Pad-Added 回调处理
获取目标 Pad
GstPad *video_sink_pad = gst_element_get_static_pad(data->videoconvert, "sink");
GstPad *audio_sink_pad = gst_element_get_static_pad(data->audioconvert, "sink");检查 Pad 是否已连接
if (gst_pad_is_linked(video_sink_pad) && gst_pad_is_linked(audio_sink_pad))
{
g_print("We are already linked. Ignoring.\n");
goto exit;
}检查 Pad 的 Capabilities
// 获取当前 pad 支持的所有 capabilities 能力(GstCaps[])
new_pad_caps = gst_pad_get_current_caps(new_pad);
// 获取所有 capabilities 能力的第一个 capability(GstCaps 结构体)
new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
// 获取 capability 能力(GstCaps 结构体)的 name 属性
new_pad_type = gst_structure_get_name(new_pad_struct);根据 Pad 类型链接
if (g_str_has_prefix(new_pad_type, "video/x-raw"))
{
// 顺序必须是 src_pad -> sink_pad
ret = gst_pad_link(new_pad, video_sink_pad);
if (GST_PAD_LINK_FAILED(ret))
g_print("Type is '%s' but link failed.\n", new_pad_type);
else
g_print("Link succeeded (type '%s').\n", new_pad_type);
}
else if (g_str_has_prefix(new_pad_type, "audio/x-raw"))
{
ret = gst_pad_link(new_pad, audio_sink_pad);
if (GST_PAD_LINK_FAILED(ret))
g_print("Type is '%s' but link failed.\n", new_pad_type);
else
g_print("Link succeeded (type '%s').\n", new_pad_type);
}
else
{
g_print("It has type '%s' which is not support. Ignoring.\n", new_pad_type);
}6. 事件循环
do
{
msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
if (msg != NULL)
{
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_ERROR:
// 错误处理
terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print("End-Of-Stream reached.\n");
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
// 仅输出来自管道的消息
if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data.pipeline))
{
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed(msg, &old_state, &new_state, &pending_state);
g_print("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
}
break;
default:
g_printerr("Unexpected message received.\n");
break;
}
gst_message_unref(msg);
}
} while (!terminate);完整代码
#include <gst/gst.h>
typedef struct _CustomData
{
GstElement *pipeline;
GstElement *source;
GstElement *videoconvert;
GstElement *videosink;
GstElement *audioconvert;
GstElement *audioresample;
GstElement *audiosink;
} CustomData;
static void pad_added_handler(GstElement *src, GstPad *pad, CustomData *data);
int main(int argc, char *argv[])
{
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
gst_init(&argc, &argv);
data.source = gst_element_factory_make("uridecodebin", "source");
data.videoconvert = gst_element_factory_make("videoconvert", "videoconvert");
data.videosink = gst_element_factory_make("autovideosink", "videosink");
data.audioconvert = gst_element_factory_make("audioconvert", "audioconvert");
data.audioresample = gst_element_factory_make("audioresample", "resample");
data.audiosink = gst_element_factory_make("autoaudiosink", "audiosink");
data.pipeline = gst_pipeline_new("test-pipeline");
if (!data.pipeline || !data.source ||
!data.videoconvert || !data.videosink ||
!data.audioconvert || !data.audioresample || !data.audiosink)
{
g_printerr("Not all elements could be created.\n");
return -1;
}
gst_bin_add_many(
GST_BIN(data.pipeline),
data.source,
data.videoconvert, data.videosink,
data.audioconvert, data.audioresample, data.audiosink,
NULL
);
if (!gst_element_link_many(data.videoconvert, data.videosink, NULL) ||
!gst_element_link_many(data.audioconvert, data.audioresample, data.audiosink, NULL))
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
return -1;
}
g_object_set(data.source, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
g_signal_connect(data.source, "pad-added", G_CALLBACK(pad_added_handler), &data);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
return -1;
}
bus = gst_element_get_bus(data.pipeline);
do
{
msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
if (msg != NULL)
{
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_ERROR:
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(msg->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print("End-Of-Stream reached.\n");
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data.pipeline))
{
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed(msg, &old_state, &new_state, &pending_state);
g_print("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
}
break;
default:
g_printerr("Unexpected message received.\n");
break;
}
gst_message_unref(msg);
}
} while (!terminate);
gst_object_unref(bus);
gst_element_set_state(data.pipeline, GST_STATE_NULL);
gst_object_unref(data.pipeline);
return 0;
}
static void pad_added_handler(GstElement *src, GstPad *new_pad, CustomData *data)
{
GstPad *video_sink_pad = gst_element_get_static_pad(data->videoconvert, "sink");
GstPad *audio_sink_pad = gst_element_get_static_pad(data->audioconvert, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print("Received new pad '%s' from '%s':\n", GST_PAD_NAME(new_pad), GST_ELEMENT_NAME(src));
if (gst_pad_is_linked(video_sink_pad) && gst_pad_is_linked(audio_sink_pad))
{
g_print("We are already linked. Ignoring.\n");
goto exit;
}
new_pad_caps = gst_pad_get_current_caps(new_pad);
new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
new_pad_type = gst_structure_get_name(new_pad_struct);
if (g_str_has_prefix(new_pad_type, "video/x-raw"))
{
ret = gst_pad_link(new_pad, video_sink_pad);
if (GST_PAD_LINK_FAILED(ret))
g_print("Type is '%s' but link failed.\n", new_pad_type);
else
g_print("Link succeeded (type '%s').\n", new_pad_type);
}
else if (g_str_has_prefix(new_pad_type, "audio/x-raw"))
{
ret = gst_pad_link(new_pad, audio_sink_pad);
if (GST_PAD_LINK_FAILED(ret))
g_print("Type is '%s' but link failed.\n", new_pad_type);
else
g_print("Link succeeded (type '%s').\n", new_pad_type);
}
else
{
g_print("It has type '%s' which is not support. Ignoring.\n", new_pad_type);
}
exit:
if (new_pad_caps != NULL)
gst_caps_unref(new_pad_caps);
gst_object_unref(video_sink_pad);
gst_object_unref(audio_sink_pad);
}编译和运行
gcc main.c -o main.out $(pkg-config --cflags --libs gstreamer-1.0)
./main.out总结
本示例展示了:
- 处理动态 Pad 的创建
- 使用信号机制(
pad-added)处理异步事件 - 根据 Pad 的 Capabilities 类型进行路由
- 复杂 media pipeline 的构建
- 事件循环和状态变化监听
动态 Pad 是 GStreamer 处理复杂媒体格式(如同时包含音频、视频的字幕流)的核心机制,理解这一概念对学习 GStreamer 很重要。