GStreamer 服务器开发

GStreamer 不仅可以用于客户端多媒体处理,还能构建强大的媒体服务器。以下是完整的 GStreamer 服务器开发方案。

常见服务器类型

类型协议典型用途
RTSP 服务器RTSP/RTP

实时视频监控、IPTV

RTMP服务器RTMP实时视频监控
HTTP 服务器HTTP/HTTPS视频点播、直播
WebRTC 服务器WebRTC实时通信、视频会议
RTP 服务器RTP/UDP低延迟视频传输
SRT服务器UDP低延迟视频传输

 

GStreamer RTSP 服务端与客户端

1. RTSP 服务端开发

1.1 基于 GstRTSP 库的服务端实现

1.1.1 基础 RTSP 服务器

c

#include <gst/rtsp-server/rtsp-server.h>
#include <glib.h>

#define DEFAULT_RTSP_PORT "8554"

static char *port = (char *)DEFAULT_RTSP_PORT;

int main(int argc, char *argv[]) {
    GMainLoop *loop;
    GstRTSPServer *server;
    GstRTSPMountPoints *mounts;
    GstRTSPMediaFactory *factory;
    
    gst_init(&argc, &argv);
    
    loop = g_main_loop_new(NULL, FALSE);
    
    /* 创建 RTSP 服务器实例 */
    server = gst_rtsp_server_new();
    g_object_set(server, "service", port, NULL);
    
    /* 获取挂载点 */
    mounts = gst_rtsp_server_get_mount_points(server);
    
    /* 创建媒体工厂 */
    factory = gst_rtsp_media_factory_new();
    gst_rtsp_media_factory_set_launch(factory, 
        "( videotestsrc ! x264enc ! rtph264pay name=pay0 pt=96 )");
    
    /* 共享媒体,多个客户端可以连接同一个流 */
    gst_rtsp_media_factory_set_shared(factory, TRUE);
    
    /* 添加路由 */
    gst_rtsp_mount_points_add_factory(mounts, "/test", factory);
    g_object_unref(mounts);
    
    /* 附加服务器到主上下文 */
    gst_rtsp_server_attach(server, NULL);
    
    g_print("RTSP server running at rtsp://127.0.0.1:%s/test\n", port);
    g_main_loop_run(loop);
    
    return 0;
}
1.1.2 编译与运行

bash

gcc rtsp_server.c -o rtsp_server `pkg-config --cflags --libs gstreamer-rtsp-server-1.0`
./rtsp_server

1.2 高级 RTSP 服务器功能

1.2.1 多流支持

c

/* 添加多个流 */
void add_streams(GstRTSPServer *server) {
    GstRTSPMountPoints *mounts = gst_rtsp_server_get_mount_points(server);
    
    /* 测试视频流 */
    GstRTSPMediaFactory *test_factory = gst_rtsp_media_factory_new();
    gst_rtsp_media_factory_set_launch(test_factory,
        "( videotestsrc pattern=ball ! x264enc ! rtph264pay name=pay0 pt=96 )");
    gst_rtsp_mount_points_add_factory(mounts, "/test", test_factory);
    
    /* 摄像头流 */
    GstRTSPMediaFactory *cam_factory = gst_rtsp_media_factory_new();
    gst_rtsp_media_factory_set_launch(cam_factory,
        "( v4l2src device=/dev/video0 ! videoconvert ! x264enc ! rtph264pay name=pay0 pt=96 )");
    gst_rtsp_mount_points_add_factory(mounts, "/camera", cam_factory);
    
    /* 文件流 */
    GstRTSPMediaFactory *file_factory = gst_rtsp_media_factory_new();
    gst_rtsp_media_factory_set_launch(file_factory,
        "( filesrc location=video.mp4 ! qtdemux ! h264parse ! rtph264pay name=pay0 pt=96 )");
    gst_rtsp_mount_points_add_factory(mounts, "/video", file_factory);
    
    g_object_unref(mounts);
}
1.2.2 认证支持

c

/* 设置RTSP认证 */
void setup_auth(GstRTSPServer *server) {
    GstRTSPAuth *auth = gst_rtsp_auth_new();
    GstRTSPToken *token = gst_rtsp_token_new(GST_RTSP_TOKEN_MEDIA_FACTORY_ROLE, 
        gst_rtsp_token_media_factory_role_new("admin"));
    
    /* 添加用户 */
    gst_rtsp_auth_set_default_token(auth, token);
    gst_rtsp_auth_add_basic(auth, "admin", "password123", token);
    
    /* 设置认证回调 */
    gst_rtsp_auth_set_anonymous(auth, FALSE);
    gst_rtsp_server_set_auth(server, auth);
    
    g_object_unref(auth);
    g_object_unref(token);
}

2. RTSP 客户端开发

2.1 使用 GStreamer 的 rtspsrc

2.1.1 基础 RTSP 客户端

c

#include <gst/gst.h>

int main(int argc, char *argv[]) {
    GstElement *pipeline, *src, *depay, *decoder, *conv, *sink;
    GMainLoop *loop;
    
    gst_init(&argc, &argv);
    
    if (argc != 2) {
        g_printerr("Usage: %s <rtsp_url>\n", argv[0]);
        return -1;
    }
    
    loop = g_main_loop_new(NULL, FALSE);
    
    /* 创建管道 */
    pipeline = gst_pipeline_new("rtsp-client");
    
    /* 创建元素 */
    src = gst_element_factory_make("rtspsrc", "source");
    g_object_set(src, "location", argv[1], "latency", 0, NULL);
    
    depay = gst_element_factory_make("rtph264depay", "depay");
    decoder = gst_element_factory_make("avdec_h264", "decoder");
    conv = gst_element_factory_make("videoconvert", "converter");
    sink = gst_element_factory_make("autovideosink", "sink");
    
    /* 添加元素到管道 */
    gst_bin_add_many(GST_BIN(pipeline), src, depay, decoder, conv, sink, NULL);
    
    /* 链接元素 */
    if (!gst_element_link_many(depay, decoder, conv, sink, NULL)) {
        g_error("Failed to link elements");
        return -1;
    }
    
    /* 动态链接 rtspsrc 和 depay */
    g_signal_connect(src, "pad-added", G_CALLBACK(on_pad_added), depay);
    
    /* 启动管道 */
    gst_element_set_state(pipeline, GST_STATE_PLAYING);
    g_print("Playing RTSP stream...\n");
    
    g_main_loop_run(loop);
    
    /* 清理 */
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(pipeline);
    g_main_loop_unref(loop);
    
    return 0;
}

static void on_pad_added(GstElement *element, GstPad *pad, gpointer data) {
    GstElement *depay = GST_ELEMENT(data);
    GstPad *sinkpad = gst_element_get_static_pad(depay, "sink");
    
    if (gst_pad_is_linked(sinkpad)) {
        g_object_unref(sinkpad);
        return;
    }
    
    GstPadLinkReturn ret = gst_pad_link(pad, sinkpad);
    if (GST_PAD_LINK_FAILED(ret)) {
        g_warning("Failed to link pads");
    }
    
    g_object_unref(sinkpad);
}
2.1.2 编译与运行

bash

gcc rtsp_client.c -o rtsp_client `pkg-config --cflags --libs gstreamer-1.0`
./rtsp_client rtsp://127.0.0.1:8554/test

2.2 高级 RTSP 客户端功能

2.2.1 低延迟配置

c

/* 创建低延迟RTSP客户端管道 */
GstElement *create_low_latency_pipeline(const gchar *uri) {
    GstElement *pipeline = gst_pipeline_new("rtsp-client");
    
    GstElement *src = gst_element_factory_make("rtspsrc", "source");
    g_object_set(src, 
        "location", uri,
        "latency", 0,
        "udp-reconnect", 1,
        "do-rtsp-keep-alive", 1,
        "drop-on-latency", 1,
        "buffer-mode", 0,  // 无缓冲
        NULL);
    
    GstElement *depay = gst_element_factory_make("rtph264depay", "depay");
    GstElement *decoder = gst_element_factory_make("avdec_h264", "decoder");
    GstElement *conv = gst_element_factory_make("videoconvert", "converter");
    GstElement *sink = gst_element_factory_make("autovideosink", "sink");
    g_object_set(sink, "sync", FALSE, NULL);  // 禁用同步
    
    gst_bin_add_many(GST_BIN(pipeline), src, depay, decoder, conv, sink, NULL);
    
    if (!gst_element_link_many(depay, decoder, conv, sink, NULL)) {
        gst_object_unref(pipeline);
        return NULL;
    }
    
    g_signal_connect(src, "pad-added", G_CALLBACK(on_pad_added), depay);
    
    return pipeline;
}
2.2.2 认证支持

c

/* 设置RTSP认证 */
void setup_rtsp_auth(GstElement *rtspsrc, const gchar *user, const gchar *pass) {
    GstStructure *extra_headers = gst_structure_new_empty("extra-headers");
    gchar *auth_str = g_strdup_printf("Authorization: Basic %s", 
        g_base64_encode((const guchar *)g_strdup_printf("%s:%s", user, pass), -1));
    
    gst_structure_set(extra_headers, "extra-headers", G_TYPE_STRING, auth_str, NULL);
    g_object_set(rtspsrc, "extra-headers", extra_headers, NULL);
    
    g_free(auth_str);
    gst_structure_free(extra_headers);
}

3. 命令行工具示例

3.1 服务端命令行

bash

# 测试视频流
gst-rtsp-launch --gst-debug=3 "( videotestsrc ! x264enc ! rtph264pay name=pay0 pt=96 )"

# 摄像头流
gst-rtsp-launch "( v4l2src device=/dev/video0 ! videoconvert ! x264enc ! rtph264pay name=pay0 pt=96 )"

# 文件流
gst-rtsp-launch "( filesrc location=video.mp4 ! qtdemux ! h264parse ! rtph264pay name=pay0 pt=96 )"

3.2 客户端命令行

bash

# 基本播放
gst-launch-1.0 rtspsrc location=rtsp://127.0.0.1:8554/test latency=0 ! \
    rtph264depay ! avdec_h264 ! videoconvert ! autovideosink

# 低延迟播放
gst-launch-1.0 rtspsrc location=rtsp://127.0.0.1:8554/test latency=0 drop-on-latency=true ! \
    rtph264depay ! avdec_h264 ! videoconvert ! autovideosink sync=false

# 带认证的播放
gst-launch-1.0 rtspsrc location="rtsp://127.0.0.1:8554/test" user-id=admin user-pw=password123 ! \
    rtph264depay ! avdec_h264 ! videoconvert ! autovideosink

4. 高级功能实现

4.1 多线程 RTSP 服务器

c

/* 创建线程池 */
void setup_thread_pool(GstRTSPServer *server) {
    GstRTSPThreadPool *pool = gst_rtsp_thread_pool_new();
    
    /* 设置线程池参数 */
    gst_rtsp_thread_pool_set_max_threads(pool, 10);
    gst_rtsp_thread_pool_set_thread_priority(pool, 15);  // 中等优先级
    
    /* 应用到服务器 */
    gst_rtsp_server_set_thread_pool(server, pool);
    g_object_unref(pool);
}

4.2 统计与监控

c

/* 服务器统计信息 */
void print_server_stats(GstRTSPServer *server) {
    guint64 bytes_served;
    guint num_sessions;
    guint num_connections;
    
    g_object_get(server,
        "bytes-served", &bytes_served,
        "num-sessions", &num_sessions,
        "num-connections", &num_connections,
        NULL);
    
    g_print("Server Stats:\n");
    g_print("  Bytes served: %" G_GUINT64_FORMAT "\n", bytes_served);
    g_print("  Active sessions: %u\n", num_sessions);
    g_print("  Total connections: %u\n", num_connections);
}

/* 定期打印统计信息 */
g_timeout_add_seconds(5, (GSourceFunc)print_server_stats, server);

4.3 硬件加速支持

bash

# 使用VAAPI硬件编码的RTSP服务器
gst-rtsp-launch "( v4l2src device=/dev/video0 ! videoconvert ! vaapih264enc ! rtph264pay name=pay0 pt=96 )"

# 使用NVDEC硬件解码的RTSP客户端
gst-launch-1.0 rtspsrc location=rtsp://127.0.0.1:8554/test ! \
    rtph264depay ! nvh264dec ! videoconvert ! autovideosink

5. 常见问题解决

5.1 连接问题

问题: 客户端无法连接到服务器
解决方案:

  • 检查防火墙设置 sudo ufw allow 8554/tcp

  • 验证服务器IP地址

  • 检查服务器是否正在运行 netstat -tulnp | grep 8554

5.2 延迟问题

问题: 视频流延迟过高
解决方案:

bash

# 服务器端减少缓冲
gst-rtsp-launch "( videotestsrc ! x264enc tune=zerolatency ! rtph264pay name=pay0 pt=96 config-interval=1 )"

# 客户端低延迟配置
gst-launch-1.0 rtspsrc location=rtsp://127.0.0.1:8554/test latency=0 ! \
    rtph264depay ! avdec_h264 ! videoconvert ! autovideosink sync=false

5.3 音视频不同步

问题: 音频和视频不同步
解决方案:

bash

# 确保时间戳正确
gst-rtsp-launch "( videotestsrc ! x264enc ! rtph264pay name=pay0 pt=96 )"

# 客户端使用同步队列
gst-launch-1.0 rtspsrc location=rtsp://127.0.0.1:8554/test ! \
    rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! \
    queue max-size-buffers=0 max-size-time=0 max-size-bytes=0 min-threshold-time=0 ! \
    autovideosink

GStreamer RTMP 服务端与客户端

1. RTMP 服务端开发

使用 GStreamer 实现 RTMP 服务端

#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>

int main(int argc, char *argv[]) {
    GstElement *pipeline, *rtmpsrc, *queue, *flvdemux, *video_dec, *video_conv, *video_sink;
    GstElement *audio_dec, *audio_conv, *audio_sink;
    GMainLoop *loop;
    
    gst_init(&argc, &argv);
    
    loop = g_main_loop_new(NULL, FALSE);
    
    // 创建管道
    pipeline = gst_pipeline_new("rtmp-server");
    
    // 创建元素
    rtmpsrc = gst_element_factory_make("rtmpsrc", "rtmp-source");
    g_object_set(rtmpsrc, "location", "rtmp://localhost/live/stream", NULL);
    
    flvdemux = gst_element_factory_make("flvdemux", "demuxer");
    queue = gst_element_factory_make("queue", "queue");
    
    video_dec = gst_element_factory_make("avdec_h264", "video-decoder");
    video_conv = gst_element_factory_make("videoconvert", "video-converter");
    video_sink = gst_element_factory_make("autovideosink", "video-sink");
    
    audio_dec = gst_element_factory_make("avdec_aac", "audio-decoder");
    audio_conv = gst_element_factory_make("audioconvert", "audio-converter");
    audio_sink = gst_element_factory_make("autoaudiosink", "audio-sink");
    
    // 添加元素到管道
    gst_bin_add_many(GST_BIN(pipeline), rtmpsrc, queue, flvdemux, 
                    video_dec, video_conv, video_sink,
                    audio_dec, audio_conv, audio_sink, NULL);
    
    // 链接元素
    if (!gst_element_link_many(rtmpsrc, queue, flvdemux, NULL)) {
        g_error("Failed to link elements");
        return -1;
    }
    
    // 动态链接音频和视频流
    g_signal_connect(flvdemux, "pad-added", G_CALLBACK(on_pad_added), pipeline);
    
    // 启动管道
    gst_element_set_state(pipeline, GST_STATE_PLAYING);
    g_print("RTMP server is running...\n");
    
    g_main_loop_run(loop);
    
    // 清理
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(pipeline);
    g_main_loop_unref(loop);
    
    return 0;
}

static void on_pad_added(GstElement *element, GstPad *pad, gpointer data) {
    GstElement *pipeline = GST_ELEMENT(data);
    GstPad *sinkpad;
    GstElement *decoder, *conv, *sink;
    
    GstCaps *caps = gst_pad_get_current_caps(pad);
    const gchar *name = gst_structure_get_name(gst_caps_get_structure(caps, 0));
    
    if (g_str_has_prefix(name, "video")) {
        decoder = gst_bin_get_by_name(GST_BIN(pipeline), "video-decoder");
        conv = gst_bin_get_by_name(GST_BIN(pipeline), "video-converter");
        sink = gst_bin_get_by_name(GST_BIN(pipeline), "video-sink");
    } else if (g_str_has_prefix(name, "audio")) {
        decoder = gst_bin_get_by_name(GST_BIN(pipeline), "audio-decoder");
        conv = gst_bin_get_by_name(GST_BIN(pipeline), "audio-converter");
        sink = gst_bin_get_by_name(GST_BIN(pipeline), "audio-sink");
    } else {
        gst_caps_unref(caps);
        return;
    }
    
    gst_caps_unref(caps);
    
    sinkpad = gst_element_get_static_pad(decoder, "sink");
    gst_pad_link(pad, sinkpad);
    gst_object_unref(sinkpad);
    
    gst_element_link_many(decoder, conv, sink, NULL);
    gst_object_unref(decoder);
    gst_object_unref(conv);
    gst_object_unref(sink);
}

2. RTMP 客户端开发

2.1 RTMP 推流客户端

2.1.1 使用 GStreamer 推流

c

#include <gst/gst.h>

int main(int argc, char *argv[]) {
    GstElement *pipeline, *src, *enc, *mux, *sink;
    GMainLoop *loop;
    
    gst_init(&argc, &argv);
    
    loop = g_main_loop_new(NULL, FALSE);
    
    // 创建管道
    pipeline = gst_pipeline_new("rtmp-push-client");
    
    // 创建元素
    src = gst_element_factory_make("videotestsrc", "source");
    enc = gst_element_factory_make("x264enc", "encoder");
    mux = gst_element_factory_make("flvmux", "muxer");
    sink = gst_element_factory_make("rtmpsink", "sink");
    
    // 设置参数
    g_object_set(enc, "bitrate", 2000, "key-int-max", 30, NULL);
    g_object_set(sink, "location", "rtmp://localhost/live/stream", NULL);
    
    // 添加元素到管道
    gst_bin_add_many(GST_BIN(pipeline), src, enc, mux, sink, NULL);
    
    // 链接元素
    if (!gst_element_link_many(src, enc, mux, sink, NULL)) {
        g_error("Failed to link elements");
        return -1;
    }
    
    // 启动管道
    gst_element_set_state(pipeline, GST_STATE_PLAYING);
    g_print("Pushing stream to RTMP server...\n");
    
    g_main_loop_run(loop);
    
    // 清理
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(pipeline);
    g_main_loop_unref(loop);
    
    return 0;
}
2.1.2 命令行推流示例

bash

# 测试源推流
gst-launch-1.0 videotestsrc ! \
    videoconvert ! \
    x264enc bitrate=2000 key-int-max=30 ! \
    flvmux ! \
    rtmpsink location="rtmp://localhost/live/stream live=1"

# 摄像头推流
gst-launch-1.0 v4l2src device=/dev/video0 ! \
    videoconvert ! \
    x264enc bitrate=2000 key-int-max=30 ! \
    flvmux ! \
    rtmpsink location="rtmp://localhost/live/stream live=1"

# 带音频的推流
gst-launch-1.0 videotestsrc ! \
    videoconvert ! \
    x264enc bitrate=2000 key-int-max=30 ! \
    flvmux name=mux ! \
    rtmpsink location="rtmp://localhost/live/stream" \
    audiotestsrc ! \
    audioconvert ! \
    voaacenc bitrate=128000 ! \
    mux.

2.2 RTMP 拉流客户端

2.2.1 使用 GStreamer 拉流

c

#include <gst/gst.h>

int main(int argc, char *argv[]) {
    GstElement *pipeline, *src, *demux, *video_dec, *video_conv, *video_sink;
    GstElement *audio_dec, *audio_conv, *audio_sink;
    GMainLoop *loop;
    
    gst_init(&argc, &argv);
    
    loop = g_main_loop_new(NULL, FALSE);
    
    // 创建管道
    pipeline = gst_pipeline_new("rtmp-pull-client");
    
    // 创建元素
    src = gst_element_factory_make("rtmpsrc", "source");
    g_object_set(src, "location", "rtmp://localhost/live/stream", NULL);
    
    demux = gst_element_factory_make("flvdemux", "demuxer");
    
    video_dec = gst_element_factory_make("avdec_h264", "video-decoder");
    video_conv = gst_element_factory_make("videoconvert", "video-converter");
    video_sink = gst_element_factory_make("autovideosink", "video-sink");
    
    audio_dec = gst_element_factory_make("avdec_aac", "audio-decoder");
    audio_conv = gst_element_factory_make("audioconvert", "audio-converter");
    audio_sink = gst_element_factory_make("autoaudiosink", "audio-sink");
    
    // 添加元素到管道
    gst_bin_add_many(GST_BIN(pipeline), src, demux, 
                    video_dec, video_conv, video_sink,
                    audio_dec, audio_conv, audio_sink, NULL);
    
    // 链接元素
    if (!gst_element_link(src, demux)) {
        g_error("Failed to link src and demux");
        return -1;
    }
    
    // 动态链接音频和视频流
    g_signal_connect(demux, "pad-added", G_CALLBACK(on_pad_added), pipeline);
    
    // 启动管道
    gst_element_set_state(pipeline, GST_STATE_PLAYING);
    g_print("Playing RTMP stream...\n");
    
    g_main_loop_run(loop);
    
    // 清理
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(pipeline);
    g_main_loop_unref(loop);
    
    return 0;
}

static void on_pad_added(GstElement *element, GstPad *pad, gpointer data) {
    GstElement *pipeline = GST_ELEMENT(data);
    GstPad *sinkpad;
    GstElement *decoder, *conv, *sink;
    
    GstCaps *caps = gst_pad_get_current_caps(pad);
    const gchar *name = gst_structure_get_name(gst_caps_get_structure(caps, 0));
    
    if (g_str_has_prefix(name, "video")) {
        decoder = gst_bin_get_by_name(GST_BIN(pipeline), "video-decoder");
        conv = gst_bin_get_by_name(GST_BIN(pipeline), "video-converter");
        sink = gst_bin_get_by_name(GST_BIN(pipeline), "video-sink");
    } else if (g_str_has_prefix(name, "audio")) {
        decoder = gst_bin_get_by_name(GST_BIN(pipeline), "audio-decoder");
        conv = gst_bin_get_by_name(GST_BIN(pipeline), "audio-converter");
        sink = gst_bin_get_by_name(GST_BIN(pipeline), "audio-sink");
    } else {
        gst_caps_unref(caps);
        return;
    }
    
    gst_caps_unref(caps);
    
    sinkpad = gst_element_get_static_pad(decoder, "sink");
    gst_pad_link(pad, sinkpad);
    gst_object_unref(sinkpad);
    
    gst_element_link_many(decoder, conv, sink, NULL);
    gst_object_unref(decoder);
    gst_object_unref(conv);
    gst_object_unref(sink);
}
2.2.2 命令行拉流示例

bash

# 基本拉流播放
gst-launch-1.0 rtmpsrc location="rtmp://localhost/live/stream" ! \
    flvdemux name=demux \
    demux.video ! queue ! h264parse ! avdec_h264 ! videoconvert ! autovideosink \
    demux.audio ! queue ! aacparse ! avdec_aac ! audioconvert ! autoaudiosink

# 低延迟配置
gst-launch-1.0 rtmpsrc location="rtmp://localhost/live/stream" ! \
    flvdemux name=demux \
    demux.video ! queue max-size-time=0 ! h264parse ! avdec_h264 ! videoconvert ! autovideosink sync=false \
    demux.audio ! queue max-size-time=0 ! aacparse ! avdec_aac ! audioconvert ! autoaudiosink sync=false

3. 高级功能实现

3.1 推流认证

bash

# 带认证的推流
gst-launch-1.0 videotestsrc ! \
    videoconvert ! \
    x264enc bitrate=2000 key-int-max=30 ! \
    flvmux ! \
    rtmpsink location="rtmp://localhost/live/stream?user=admin&pwd=secret123 live=1"

3.2 多码率转码推流

bash

# 高清流
gst-launch-1.0 videotestsrc ! \
    videoconvert ! \
    videoscale ! \
    video/x-raw,width=1280,height=720 ! \
    x264enc bitrate=3000 key-int-max=30 ! \
    flvmux name=mux_hd ! \
    rtmpsink location="rtmp://localhost/live/stream_hd" \
    
# 标清流
videotestsrc ! \
    videoconvert ! \
    videoscale ! \
    video/x-raw,width=640,height=360 ! \
    x264enc bitrate=1000 key-int-max=30 ! \
    flvmux name=mux_sd ! \
    rtmpsink location="rtmp://localhost/live/stream_sd"

3.3 录制推流内容

bash

# 推流同时录制
gst-launch-1.0 videotestsrc ! \
    tee name=t ! \
    queue ! videoconvert ! x264enc ! flvmux ! rtmpsink location="rtmp://localhost/live/stream" \
    t. ! queue ! videoconvert ! x264enc ! mp4mux ! filesink location=recording.mp4

4. 性能优化技巧

4.1 硬件加速

bash

# 使用VAAPI硬件编码推流
gst-launch-1.0 v4l2src ! \
    videoconvert ! \
    vaapih264enc bitrate=2000 ! \
    h264parse ! \
    flvmux ! \
    rtmpsink location="rtmp://localhost/live/stream"

4.2 零拷贝优化

bash

# 使用DMA-BUF减少内存拷贝
gst-launch-1.0 v4l2src ! \
    video/x-raw,format=NV12 ! \
    v4l2h264enc ! \
    h264parse ! \
    flvmux ! \
    rtmpsink location="rtmp://localhost/live/stream"

4.3 网络优化

bash

# 调整缓冲区大小
gst-launch-1.0 rtmpsrc location="rtmp://localhost/live/stream" \
    buffer-time=0 latency=0 ! \
    flvdemux ! decodebin ! autovideosink

GStreamer WebRTC 服务端与客户端

 WebRTC 是一种支持浏览器实时通信的技术,结合 GStreamer 可以实现强大的媒体处理能力。

1. WebRTC 服务端开发

1.1 信令服务器 (基于 websocketpp)

cpp

#include <websocketpp/config/asio_no_tls.hpp>
#include <websocketpp/server.hpp>
#include <json/json.h>
#include <iostream>

typedef websocketpp::server<websocketpp::config::asio> server;
using websocketpp::lib::placeholders::_1;
using websocketpp::lib::placeholders::_2;

class SignalingServer {
public:
    SignalingServer() {
        m_server.init_asio();
        m_server.set_open_handler(bind(&SignalingServer::on_open, this, _1));
        m_server.set_close_handler(bind(&SignalingServer::on_close, this, _1));
        m_server.set_message_handler(bind(&SignalingServer::on_message, this, _1, _2));
    }

    void run(uint16_t port) {
        m_server.listen(port);
        m_server.start_accept();
        m_server.run();
    }

private:
    void on_open(websocketpp::connection_hdl hdl) {
        std::cout << "New client connected" << std::endl;
        m_connections.insert(hdl);
    }

    void on_close(websocketpp::connection_hdl hdl) {
        std::cout << "Client disconnected" << std::endl;
        m_connections.erase(hdl);
    }

    void on_message(websocketpp::connection_hdl hdl, server::message_ptr msg) {
        Json::Value root;
        Json::Reader reader;
        
        if (!reader.parse(msg->get_payload(), root)) {
            std::cerr << "Failed to parse JSON" << std::endl;
            return;
        }

        std::string type = root["type"].asString();
        
        if (type == "offer") {
            // 转发offer给所有客户端(实际应根据房间号转发给特定对等端)
            for (auto it : m_connections) {
                if (it != hdl) { // 不发送回原发送者
                    m_server.send(it, msg->get_payload(), msg->get_opcode());
                }
            }
        } else if (type == "answer") {
            // 转发answer
            for (auto it : m_connections) {
                if (it != hdl) {
                    m_server.send(it, msg->get_payload(), msg->get_opcode());
                }
            }
        } else if (type == "ice-candidate") {
            // 转发ICE候选
            for (auto it : m_connections) {
                if (it != hdl) {
                    m_server.send(it, msg->get_payload(), msg->get_opcode());
                }
            }
        }
    }

    server m_server;
    std::set<websocketpp::connection_hdl, std::owner_less<websocketpp::connection_hdl>> m_connections;
};

int main() {
    SignalingServer server;
    server.run(9002);
    return 0;
}

1.2 WebRTC 媒体服务器

cpp

#include <gst/gst.h>
#include <gst/webrtc/webrtc.h>
#include <string>
#include <map>

struct PeerConnection {
    GstElement *pipe;
    GstElement *webrtc;
};

std::map<std::string, PeerConnection> peers;

static void on_negotiation_needed(GstElement *webrtc, gpointer user_data) {
    g_signal_emit_by_name(webrtc, "create-offer", NULL, NULL);
}

static void on_offer_created(GstPromise *promise, gpointer user_data) {
    GstElement *webrtc = (GstElement *)user_data;
    const GstStructure *reply = gst_promise_get_reply(promise);
    const gchar *sdp;
    gst_structure_get(reply, "sdp", G_TYPE_STRING, &sdp, NULL);
    
    // 这里应该通过信令服务器发送offer
    g_print("Generated offer:\n%s\n", sdp);
    
    gst_promise_unref(promise);
}

static void on_ice_candidate(GstElement *webrtc, guint mline_index, 
                           gchar *candidate, gpointer user_data) {
    // 这里应该通过信令服务器发送ICE候选
    g_print("ICE candidate: %s\n", candidate);
}

void create_webrtc_server() {
    gst_init(NULL, NULL);
    
    GMainLoop *loop = g_main_loop_new(NULL, FALSE);
    
    // 创建管道
    GstElement *pipeline = gst_pipeline_new("webrtc-server");
    GstElement *src = gst_element_factory_make("videotestsrc", "source");
    GstElement *videoconvert = gst_element_factory_make("videoconvert", "convert");
    GstElement *encoder = gst_element_factory_make("vp8enc", "encoder");
    GstElement *payloader = gst_element_factory_make("rtpvp8pay", "payloader");
    GstElement *webrtcbin = gst_element_factory_make("webrtcbin", "webrtc");
    
    if (!pipeline || !src || !videoconvert || !encoder || !payloader || !webrtcbin) {
        g_printerr("One element could not be created. Exiting.\n");
        return;
    }
    
    // 配置元素
    g_object_set(src, "is-live", TRUE, NULL);
    g_object_set(encoder, "deadline", 1, NULL); // 低延迟编码
    
    // 添加元素到管道
    gst_bin_add_many(GST_BIN(pipeline), src, videoconvert, encoder, payloader, webrtcbin, NULL);
    
    // 链接元素
    if (!gst_element_link_many(src, videoconvert, encoder, payloader, webrtcbin, NULL)) {
        g_printerr("Elements could not be linked.\n");
        gst_object_unref(pipeline);
        return;
    }
    
    // 设置webrtc信号
    g_signal_connect(webrtcbin, "on-negotiation-needed", 
                    G_CALLBACK(on_negotiation_needed), NULL);
    g_signal_connect(webrtcbin, "on-ice-candidate", 
                    G_CALLBACK(on_ice_candidate), NULL);
    
    // 启动管道
    gst_element_set_state(pipeline, GST_STATE_PLAYING);
    
    // 运行主循环
    g_print("WebRTC server started\n");
    g_main_loop_run(loop);
    
    // 清理
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(pipeline);
    g_main_loop_unref(loop);
}

int main() {
    create_webrtc_server();
    return 0;
}

编译:

# 信令服务器编译
g++ signaling_server.cpp -o signaling_server -lwebsocketpp -lboost_system -lboost_random -lboost_thread -lpthread -ljsoncpp

# WebRTC媒体服务器编译
g++ webrtc_server.cpp -o webrtc_server `pkg-config --cflags --libs gstreamer-1.0 gstreamer-webrtc-1.0`

 

2. WebRTC 客户端开发

2.1 浏览器 JavaScript 客户端

// client.js
const video = document.getElementById('video');
const startButton = document.getElementById('start');
const stopButton = document.getElementById('stop');

let peerConnection;
let ws;

startButton.onclick = start;
stopButton.onclick = stop;

function start() {
    const config = {
        iceServers: [{ urls: "stun:stun.l.google.com:19302" }]
    };

    peerConnection = new RTCPeerConnection(config);
    peerConnection.ontrack = gotRemoteStream;
    peerConnection.onicecandidate = gotIceCandidate;

    ws = new WebSocket('ws://localhost:9002');
    
    ws.onopen = () => {
        console.log('WebSocket connected');
    };

    ws.onmessage = async (event) => {
        const message = JSON.parse(event.data);
        
        if (message.type === 'offer') {
            await peerConnection.setRemoteDescription(
                new RTCSessionDescription(message));
            
            const answer = await peerConnection.createAnswer();
            await peerConnection.setLocalDescription(answer);
            
            ws.send(JSON.stringify({
                type: 'answer',
                sdp: answer.sdp
            }));
        } else if (message.type === 'answer') {
            await peerConnection.setRemoteDescription(
                new RTCSessionDescription(message));
        } else if (message.type === 'ice-candidate') {
            try {
                await peerConnection.addIceCandidate(message.candidate);
            } catch (e) {
                console.error('Error adding ICE candidate', e);
            }
        }
    };
}

function stop() {
    if (peerConnection) {
        peerConnection.close();
        peerConnection = null;
    }
    if (ws) {
        ws.close();
        ws = null;
    }
    video.srcObject = null;
}

function gotRemoteStream(event) {
    if (video.srcObject !== event.streams[0]) {
        video.srcObject = event.streams[0];
        console.log('Received remote stream');
    }
}

function gotIceCandidate(event) {
    if (event.candidate) {
        ws.send(JSON.stringify({
            type: 'ice-candidate',
            candidate: event.candidate
        }));
    }
}
<!DOCTYPE html>
<html>
<head>
    <title>WebRTC GStreamer Client</title>
    <script src="client.js"></script>
</head>
<body>
    <video id="video" autoplay playsinline></video>
    <button id="start">Start</button>
    <button id="stop">Stop</button>
</body>
</html>

2.2 GStreamer WebRTC 客户端

2.2.1 C语言实现

c

#include <gst/gst.h>
#include <gst/webrtc/webrtc.h>

#define STUN_SERVER "stun://stun.l.google.com:19302"

typedef struct {
    GstElement *pipe;
    GstElement *webrtc;
} ClientData;

static void on_negotiation_needed(GstElement *webrtc, gpointer user_data) {
    g_print("Negotiation needed\n");
}

static void on_ice_candidate(GstElement *webrtc, guint mline_index, 
                           gchar *candidate, gpointer user_data) {
    g_print("ICE candidate: %s\n", candidate);
}

static void on_answer_created(GstPromise *promise, gpointer user_data) {
    ClientData *data = user_data;
    const GstStructure *reply = gst_promise_get_reply(promise);
    GstWebRTCSessionDescription *answer = NULL;
    
    gst_structure_get(reply, "answer", GST_TYPE_WEBRTC_SESSION_DESCRIPTION, &answer, NULL);
    gst_promise_unref(promise);
    
    g_print("Answer created:\n%s\n", gst_sdp_message_as_text(answer->sdp));
    
    GstPromise *promise2 = gst_promise_new();
    g_signal_emit_by_name(data->webrtc, "set-local-description", answer, promise2);
    gst_promise_interrupt(promise2);
    gst_promise_unref(promise2);
    
    gst_webrtc_session_description_free(answer);
}

static void on_offer_set(GstPromise *promise, gpointer user_data) {
    ClientData *data = user_data;
    gst_promise_unref(promise);
    
    GstPromise *promise2 = gst_promise_new_with_change_func(on_answer_created, data, NULL);
    g_signal_emit_by_name(data->webrtc, "create-answer", NULL, promise2);
}

int main(int argc, char *argv[]) {
    GMainLoop *loop;
    ClientData data;
    
    gst_init(&argc, &argv);
    
    loop = g_main_loop_new(NULL, FALSE);
    
    /* 创建管道 */
    data.pipe = gst_pipeline_new("webrtc-client");
    data.webrtc = gst_element_factory_make("webrtcbin", "webrtc");
    
    /* 创建测试源 */
    GstElement *src = gst_element_factory_make("videotestsrc", "src");
    GstElement *capsfilter = gst_element_factory_make("capsfilter", "caps");
    GstElement *encoder = gst_element_factory_make("vp8enc", "encoder");
    GstElement *payloader = gst_element_factory_make("rtpvp8pay", "payloader");
    
    /* 设置参数 */
    GstCaps *caps = gst_caps_new_simple("video/x-raw",
        "width", G_TYPE_INT, 640,
        "height", G_TYPE_INT, 480,
        "framerate", GST_TYPE_FRACTION, 30, 1,
        NULL);
    g_object_set(capsfilter, "caps", caps, NULL);
    gst_caps_unref(caps);
    
    g_object_set(data.webrtc, "stun-server", STUN_SERVER, NULL);
    
    /* 添加元素到管道 */
    gst_bin_add_many(GST_BIN(data.pipe), src, capsfilter, encoder, payloader, data.webrtc, NULL);
    
    /* 链接元素 */
    if (!gst_element_link_many(src, capsfilter, encoder, payloader, data.webrtc, NULL)) {
        g_error("Failed to link elements");
        return -1;
    }
    
    /* 连接信号 */
    g_signal_connect(data.webrtc, "on-negotiation-needed", 
                    G_CALLBACK(on_negotiation_needed), &data);
    g_signal_connect(data.webrtc, "on-ice-candidate", 
                    G_CALLBACK(on_ice_candidate), &data);
    
    /* 启动管道 */
    gst_element_set_state(data.pipe, GST_STATE_PLAYING);
    
    /* 这里应该从信令服务器获取offer,简化示例直接创建 */
    GstPromise *promise = gst_promise_new_with_change_func(on_offer_set, &data, NULL);
    g_signal_emit_by_name(data.webrtc, "create-offer", NULL, promise);
    
    g_main_loop_run(loop);
    
    /* 清理 */
    gst_element_set_state(data.pipe, GST_STATE_NULL);
    gst_object_unref(data.pipe);
    g_main_loop_unref(loop);
    
    return 0;
}
2.2.2 编译与运行

bash

gcc webrtc_client.c -o webrtc_client `pkg-config --cflags --libs gstreamer-webrtc-1.0`
./webrtc_client

2.3 运行步骤

  1. 启动信令服务器:

    bash

    ./signaling_server
  2. 启动WebRTC媒体服务器:

    bash

    ./webrtc_server
  3. 在浏览器中打开HTML页面,点击"Start"按钮建立连接

2.4 关键实现细节

  1. 信令协议

    • 使用JSON格式交换SDP和ICE候选

    • 消息类型包括:offer/answer/ice-candidate

  2. 媒体管道

    • 服务端:videotestsrc → vp8enc → rtpvp8pay → webrtcbin

    • 客户端:通过浏览器原生WebRTC API接收流

  3. NAT穿透

    • 使用STUN服务器(Google公共STUN)

    • 复杂网络可能需要配置TURN服务器

 

3. 高级功能实现

3.1 双向音视频通信

c

// 在客户端管道中添加音频流
GstElement *audiosrc = gst_element_factory_make("audiotestsrc", "audiosrc");
GstElement *audioconv = gst_element_factory_make("audioconvert", "audioconv");
GstElement *opusenc = gst_element_factory_make("opusenc", "opusenc");
GstElement *rtpopuspay = gst_element_factory_make("rtpopuspay", "rtpopuspay");

gst_bin_add_many(GST_BIN(data.pipe), audiosrc, audioconv, opusenc, rtpopuspay, NULL);
gst_element_link_many(audiosrc, audioconv, opusenc, rtpopuspay, data.webrtc, NULL);

// 在webrtcbin中添加接收处理
g_signal_connect(data.webrtc, "pad-added", G_CALLBACK(on_incoming_stream), &data);

static void on_incoming_stream(GstElement *webrtc, GstPad *pad, ClientData *data) {
    GstElement *queue = gst_element_factory_make("queue", NULL);
    GstElement *conv = NULL;
    GstElement *sink = NULL;
    
    GstCaps *caps = gst_pad_get_current_caps(pad);
    const gchar *name = gst_structure_get_name(gst_caps_get_structure(caps, 0));
    
    if (g_str_has_prefix(name, "video")) {
        conv = gst_element_factory_make("videoconvert", NULL);
        sink = gst_element_factory_make("autovideosink", NULL);
    } else if (g_str_has_prefix(name, "audio")) {
        conv = gst_element_factory_make("audioconvert", NULL);
        sink = gst_element_factory_make("autoaudiosink", NULL);
    }
    
    if (conv && sink) {
        gst_bin_add_many(GST_BIN(data->pipe), queue, conv, sink, NULL);
        gst_element_link_many(queue, conv, sink, NULL);
        
        GstPad *sinkpad = gst_element_get_static_pad(queue, "sink");
        gst_pad_link(pad, sinkpad);
        gst_object_unref(sinkpad);
        
        gst_element_sync_state_with_parent(queue);
        gst_element_sync_state_with_parent(conv);
        gst_element_sync_state_with_parent(sink);
    }
    
    gst_caps_unref(caps);
}

3.2 屏幕共享

c

// Linux 使用ximagesrc或pipewiresrc
GstElement *src = gst_element_factory_make("ximagesrc", "src");
g_object_set(src, "use-damage", FALSE, "show-pointer", FALSE, NULL);

// Windows 使用dx9screencapsrc或wasapisrc
// GstElement *src = gst_element_factory_make("dx9screencapsrc", "src");

// macOS 使用avfvideosrc
// GstElement *src = gst_element_factory_make("avfvideosrc", "src");
// g_object_set(src, "capture-screen", TRUE, NULL);

3.3 信令服务器增强

c

// 添加房间管理功能
typedef struct {
    GHashTable *peers;  // key: conn, value: PeerConnection
    gchar *room_id;
} Room;

static GHashTable *rooms = NULL;

// 创建房间
Room *create_room(const gchar *room_id) {
    Room *room = g_new0(Room, 1);
    room->room_id = g_strdup(room_id);
    room->peers = g_hash_table_new(g_direct_hash, g_direct_equal);
    g_hash_table_insert(rooms, room->room_id, room);
    return room;
}

// 处理加入房间消息
if (g_str_equal(msg_type, "join")) {
    const gchar *room_id = json_object_get_string_member(obj, "room");
    Room *room = g_hash_table_lookup(rooms, room_id);
    
    if (!room) {
        room = create_room(room_id);
    }
    
    g_hash_table_insert(room->peers, conn, pc);
    
    // 通知其他peer有新用户加入
    g_hash_table_foreach(room->peers, notify_new_peer, pc);
}

4. 常见问题解决

4.1 ICE 协商失败

解决方案:

  • 检查STUN/TURN服务器配置

  • 确保网络没有阻止UDP流量

  • 添加ICE调试信息:

    c

    g_object_set(webrtc, "ice-tcp", FALSE, "ice-udp", TRUE, NULL);
    GST_DEBUG="webrtcbin:5,ICE:5" ./your_program

4.2 高延迟

优化方法:

c

// 使用低延迟编码器
GstElement *enc = gst_element_factory_make("x264enc", "enc");
g_object_set(enc, "tune", 0x00000004 /* zerolatency */, "speed-preset", "ultrafast", NULL);

// 减少缓冲
g_object_set(webrtc, "latency", 0, NULL);

4.3 浏览器兼容性问题

解决方案:

  • 确保SDP格式兼容

  • 使用VP8编解码器(最广泛支持)

  • 添加SDP修改回调:

    c

    g_signal_connect(webrtc, "on-set-description", G_CALLBACK(modify_sdp), NULL);
    
    static void modify_sdp(GstElement *webrtc, GstWebRTCSessionDescription *desc, gpointer user_data) {
        GstSDPMessage *sdp = desc->sdp;
        // 修改SDP以确保浏览器兼容
    }

GStreamer RTP 服务端与客户端

1. RTP 服务端开发

1.1 基础视频 RTP 服务端

c

#include <gst/gst.h>

int main(int argc, char *argv[]) {
    GstElement *pipeline, *src, *enc, *pay, *sink;
    GMainLoop *loop;
    
    gst_init(&argc, &argv);
    
    loop = g_main_loop_new(NULL, FALSE);
    
    /* 创建管道 */
    pipeline = gst_pipeline_new("rtp-server");
    
    /* 创建元素 */
    src = gst_element_factory_make("videotestsrc", "source");
    enc = gst_element_factory_make("x264enc", "encoder");
    pay = gst_element_factory_make("rtph264pay", "payloader");
    sink = gst_element_factory_make("udpsink", "sink");
    
    /* 设置参数 */
    g_object_set(enc, "bitrate", 2000, "key-int-max", 30, NULL);
    g_object_set(sink, "host", "127.0.0.1", "port", 5000, NULL);
    
    /* 添加元素到管道 */
    gst_bin_add_many(GST_BIN(pipeline), src, enc, pay, sink, NULL);
    
    /* 链接元素 */
    if (!gst_element_link_many(src, enc, pay, sink, NULL)) {
        g_error("Failed to link elements");
        return -1;
    }
    
    /* 启动管道 */
    gst_element_set_state(pipeline, GST_STATE_PLAYING);
    g_print("Streaming RTP video to udp://127.0.0.1:5000\n");
    
    g_main_loop_run(loop);
    
    /* 清理 */
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(pipeline);
    g_main_loop_unref(loop);
    
    return 0;
}

1.2 带音频的视频 RTP 服务端

c

#include <gst/gst.h>

int main(int argc, char *argv[]) {
    GstElement *pipeline, *vsrc, *venc, *vpay, *asrc, *aconv, *aenc, *apay, *vsink, *asink;
    GMainLoop *loop;
    
    gst_init(&argc, &argv);
    
    loop = g_main_loop_new(NULL, FALSE);
    
    /* 创建管道 */
    pipeline = gst_pipeline_new("rtp-server");
    
    /* 视频元素 */
    vsrc = gst_element_factory_make("videotestsrc", "vsource");
    venc = gst_element_factory_make("x264enc", "vencoder");
    vpay = gst_element_factory_make("rtph264pay", "vpayloader");
    vsink = gst_element_factory_make("udpsink", "vsink");
    
    /* 音频元素 */
    asrc = gst_element_factory_make("audiotestsrc", "asource");
    aconv = gst_element_factory_make("audioconvert", "aconverter");
    aenc = gst_element_factory_make("opusenc", "aencoder");
    apay = gst_element_factory_make("rtpopuspay", "apayloader");
    asink = gst_element_factory_make("udpsink", "asink");
    
    /* 设置参数 */
    g_object_set(venc, "bitrate", 2000, "key-int-max", 30, NULL);
    g_object_set(vsink, "host", "127.0.0.1", "port", 5000, NULL);
    g_object_set(asink, "host", "127.0.0.1", "port", 5002, NULL);
    
    /* 添加元素到管道 */
    gst_bin_add_many(GST_BIN(pipeline), 
                    vsrc, venc, vpay, vsink,
                    asrc, aconv, aenc, apay, asink, NULL);
    
    /* 链接视频元素 */
    if (!gst_element_link_many(vsrc, venc, vpay, vsink, NULL)) {
        g_error("Failed to link video elements");
        return -1;
    }
    
    /* 链接音频元素 */
    if (!gst_element_link_many(asrc, aconv, aenc, apay, asink, NULL)) {
        g_error("Failed to link audio elements");
        return -1;
    }
    
    /* 启动管道 */
    gst_element_set_state(pipeline, GST_STATE_PLAYING);
    g_print("Streaming RTP:\n  Video: udp://127.0.0.1:5000\n  Audio: udp://127.0.0.1:5002\n");
    
    g_main_loop_run(loop);
    
    /* 清理 */
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(pipeline);
    g_main_loop_unref(loop);
    
    return 0;
}

1.3 编译与运行服务端

bash

gcc rtp_server.c -o rtp_server `pkg-config --cflags --libs gstreamer-1.0`
./rtp_server

2. RTP 客户端开发

2.1 基础视频 RTP 客户端

c

#include <gst/gst.h>

int main(int argc, char *argv[]) {
    GstElement *pipeline, *src, *depay, *dec, *conv, *sink;
    GMainLoop *loop;
    
    gst_init(&argc, &argv);
    
    loop = g_main_loop_new(NULL, FALSE);
    
    /* 创建管道 */
    pipeline = gst_pipeline_new("rtp-client");
    
    /* 创建元素 */
    src = gst_element_factory_make("udpsrc", "source");
    depay = gst_element_factory_make("rtph264depay", "depayloader");
    dec = gst_element_factory_make("avdec_h264", "decoder");
    conv = gst_element_factory_make("videoconvert", "converter");
    sink = gst_element_factory_make("autovideosink", "sink");
    
    /* 设置参数 */
    g_object_set(src, "port", 5000, NULL);
    
    /* 添加元素到管道 */
    gst_bin_add_many(GST_BIN(pipeline), src, depay, dec, conv, sink, NULL);
    
    /* 链接元素 */
    if (!gst_element_link_many(src, depay, dec, conv, sink, NULL)) {
        g_error("Failed to link elements");
        return -1;
    }
    
    /* 启动管道 */
    gst_element_set_state(pipeline, GST_STATE_PLAYING);
    g_print("Receiving RTP video from udp://127.0.0.1:5000\n");
    
    g_main_loop_run(loop);
    
    /* 清理 */
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(pipeline);
    g_main_loop_unref(loop);
    
    return 0;
}

2.2 带音频的视频 RTP 客户端

c

#include <gst/gst.h>

int main(int argc, char *argv[]) {
    GstElement *pipeline, *vsrc, *vdepay, *vdec, *vconv, *vsink;
    GstElement *asrc, *adepay, *adec, *aconv, *asink;
    GMainLoop *loop;
    
    gst_init(&argc, &argv);
    
    loop = g_main_loop_new(NULL, FALSE);
    
    /* 创建管道 */
    pipeline = gst_pipeline_new("rtp-client");
    
    /* 视频元素 */
    vsrc = gst_element_factory_make("udpsrc", "vsource");
    vdepay = gst_element_factory_make("rtph264depay", "vdepayloader");
    vdec = gst_element_factory_make("avdec_h264", "vdecoder");
    vconv = gst_element_factory_make("videoconvert", "vconverter");
    vsink = gst_element_factory_make("autovideosink", "vsink");
    
    /* 音频元素 */
    asrc = gst_element_factory_make("udpsrc", "asource");
    adepay = gst_element_factory_make("rtpopusdepay", "adepayloader");
    adec = gst_element_factory_make("opusdec", "adecoder");
    aconv = gst_element_factory_make("audioconvert", "aconverter");
    asink = gst_element_factory_make("autoaudiosink", "asink");
    
    /* 设置参数 */
    g_object_set(vsrc, "port", 5000, NULL);
    g_object_set(asrc, "port", 5002, NULL);
    
    /* 添加元素到管道 */
    gst_bin_add_many(GST_BIN(pipeline), 
                    vsrc, vdepay, vdec, vconv, vsink,
                    asrc, adepay, adec, aconv, asink, NULL);
    
    /* 链接视频元素 */
    if (!gst_element_link_many(vsrc, vdepay, vdec, vconv, vsink, NULL)) {
        g_error("Failed to link video elements");
        return -1;
    }
    
    /* 链接音频元素 */
    if (!gst_element_link_many(asrc, adepay, adec, aconv, asink, NULL)) {
        g_error("Failed to link audio elements");
        return -1;
    }
    
    /* 启动管道 */
    gst_element_set_state(pipeline, GST_STATE_PLAYING);
    g_print("Receiving RTP:\n  Video: udp://127.0.0.1:5000\n  Audio: udp://127.0.0.1:5002\n");
    
    g_main_loop_run(loop);
    
    /* 清理 */
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(pipeline);
    g_main_loop_unref(loop);
    
    return 0;
}

2.3 编译与运行客户端

bash

gcc rtp_client.c -o rtp_client `pkg-config --cflags --libs gstreamer-1.0`
./rtp_client

3. 高级功能实现

3.1 动态码率调整

c

// 在服务端添加动态码率控制
GstElement *enc = gst_element_factory_make("x264enc", "encoder");
g_object_set(enc, "bitrate", 2000, "pass", 4, "quantizer", 20, NULL);

// 运行时调整码率
void adjust_bitrate(GstElement *enc, int new_bitrate) {
    g_object_set(enc, "bitrate", new_bitrate, NULL);
    g_print("Adjusted bitrate to %d kbps\n", new_bitrate);
}

3.2 网络状况监测

c


3.3 多播支持

// 在客户端添加QoS监测
GstElement *rtpsrc = gst_element_factory_make("udpsrc", "source");
g_object_set(rtpsrc, "port", 5000, NULL);

GstElement *rtpjitterbuffer = gst_element_factory_make("rtpjitterbuffer", "jitterbuf");
g_object_set(rtpjitterbuffer, "latency", 200, "do-lost", TRUE, NULL);

// 添加QoS消息处理
GstBus *bus = gst_element_get_bus(pipeline);
gst_bus_add_watch(bus, on_bus_message, NULL);

static gboolean on_bus_message(GstBus *bus, GstMessage *msg, gpointer data) {
    switch (GST_MESSAGE_TYPE(msg)) {
        case GST_MESSAGE_QOS: {
            gdouble proportion;
            guint64 processed, dropped;
            
            gst_message_parse_qos(msg, &proportion, &processed, &dropped);
            g_print("QOS: proportion=%.2f, processed=%" G_GUINT64_FORMAT ", dropped=%" G_GUINT64_FORMAT "\n",
                   proportion, processed, dropped);
            break;
        }
        default:
            break;
    }
    return TRUE;
}

c

// 服务端使用多播地址
g_object_set(sink, "host", "224.1.1.1", "port", 5000, "auto-multicast", TRUE, NULL);

// 客户端加入多播组
g_object_set(src, "address", "224.1.1.1", "port", 5000, "auto-multicast", TRUE, NULL);

4. 命令行工具实现

4.1 服务端命令行

bash

# 视频流
gst-launch-1.0 videotestsrc ! \
    videoconvert ! \
    x264enc bitrate=2000 key-int-max=30 ! \
    rtph264pay ! \
    udpsink host=127.0.0.1 port=5000

# 音视频流
gst-launch-1.0 videotestsrc ! \
    videoconvert ! \
    x264enc bitrate=2000 key-int-max=30 ! \
    rtph264pay ! \
    udpsink host=127.0.0.1 port=5000 \
    audiotestsrc ! \
    audioconvert ! \
    opusenc ! \
    rtpopuspay ! \
    udpsink host=127.0.0.1 port=5002

4.2 客户端命令行

bash

# 视频流
gst-launch-1.0 udpsrc port=5000 ! \
    application/x-rtp,encoding-name=H264,payload=96 ! \
    rtph264depay ! \
    avdec_h264 ! \
    videoconvert ! \
    autovideosink

# 音视频流
gst-launch-1.0 udpsrc port=5000 ! \
    application/x-rtp,encoding-name=H264,payload=96 ! \
    rtph264depay ! \
    avdec_h264 ! \
    videoconvert ! \
    autovideosink \
    udpsrc port=5002 ! \
    application/x-rtp,encoding-name=OPUS,payload=96 ! \
    rtpopusdepay ! \
    opusdec ! \
    audioconvert ! \
    autoaudiosink

5. 性能优化技巧

5.1 硬件加速

bash

# 服务端使用VAAPI硬件编码
gst-launch-1.0 videotestsrc ! \
    videoconvert ! \
    vaapih264enc bitrate=2000 ! \
    rtph264pay ! \
    udpsink host=127.0.0.1 port=5000

# 客户端使用VAAPI硬件解码
gst-launch-1.0 udpsrc port=5000 ! \
    application/x-rtp,encoding-name=H264,payload=96 ! \
    rtph264depay ! \
    vaapih264dec ! \
    videoconvert ! \
    autovideosink

5.2 低延迟配置

bash

# 服务端低延迟配置
gst-launch-1.0 videotestsrc ! \
    videoconvert ! \
    x264enc tune=zerolatency speed-preset=ultrafast bitrate=2000 key-int-max=30 ! \
    rtph264pay config-interval=1 pt=96 ! \
    udpsink host=127.0.0.1 port=5000 sync=false async=false

# 客户端低延迟配置
gst-launch-1.0 udpsrc port=5000 ! \
    application/x-rtp,encoding-name=H264,payload=96 ! \
    rtph264depay ! \
    avdec_h264 ! \
    videoconvert ! \
    autovideosink sync=false

5.3 缓冲区优化

c

// 服务端减少缓冲区
g_object_set(udpsink, "sync", FALSE, "async", FALSE, NULL);

// 客户端优化jitter buffer
GstElement *jitterbuf = gst_element_factory_make("rtpjitterbuffer", "jitterbuf");
g_object_set(jitterbuf, 
    "latency", 100,  // 100ms延迟
    "do-lost", TRUE, // 允许丢包处理
    NULL);

 

GStreamer HTTP-FLV 服务端与客户端

1. HTTP服务器

基本概念

HTTP-FLV 服务端需要能够:

  1. 接收媒体流并进行转封装为 FLV 格式

  2. 通过 HTTP 协议提供流媒体服务

使用 GStreamer 实现

#include <gst/gst.h>
#include <string>
#include <iostream>

// 定义管道结构体
struct CustomData {
    GstElement *pipeline;
    GstElement *video_src;
    GstElement *audio_src;
    GstElement *flv_mux;
    GstElement *tcp_sink;
};

// 总线消息处理函数
static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data) {
    GMainLoop *loop = (GMainLoop *)data;

    switch (GST_MESSAGE_TYPE(msg)) {
        case GST_MESSAGE_EOS:
            g_print("End of stream\n");
            g_main_loop_quit(loop);
            break;
        case GST_MESSAGE_ERROR: {
            gchar *debug;
            GError *error;
            
            gst_message_parse_error(msg, &error, &debug);
            g_printerr("ERROR: %s\n", error->message);
            g_error_free(error);
            g_free(debug);
            
            g_main_loop_quit(loop);
            break;
        }
        default:
            break;
    }

    return TRUE;
}

// 创建服务端管道
static void create_server_pipeline(CustomData *data) {
    GstElement *video_convert, *video_encoder, *audio_convert, *audio_encoder;
    GstElement *video_queue, *audio_queue;
    GstBus *bus;
    GMainLoop *loop;
    
    // 初始化GStreamer
    gst_init(NULL, NULL);
    
    loop = g_main_loop_new(NULL, FALSE);
    
    // 创建管道元素
    data->pipeline = gst_pipeline_new("http-flv-server");
    data->video_src = gst_element_factory_make("videotestsrc", "video-source");
    data->audio_src = gst_element_factory_make("audiotestsrc", "audio-source");
    video_convert = gst_element_factory_make("videoconvert", "video-convert");
    video_encoder = gst_element_factory_make("x264enc", "video-encoder");
    audio_convert = gst_element_factory_make("audioconvert", "audio-convert");
    audio_encoder = gst_element_factory_make("voaacenc", "audio-encoder");
    data->flv_mux = gst_element_factory_make("flvmux", "flv-muxer");
    data->tcp_sink = gst_element_factory_make("tcpserversink", "tcp-sink");
    video_queue = gst_element_factory_make("queue", "video-queue");
    audio_queue = gst_element_factory_make("queue", "audio-queue");
    
    if (!data->pipeline || !data->video_src || !data->audio_src || !video_convert ||
        !video_encoder || !audio_convert || !audio_encoder || !data->flv_mux || 
        !data->tcp_sink || !video_queue || !audio_queue) {
        g_printerr("One element could not be created. Exiting.\n");
        return;
    }
    
    // 配置元素参数
    g_object_set(data->video_src, "is-live", TRUE, "pattern", 0, NULL);
    g_object_set(data->audio_src, "is-live", TRUE, "wave", 4, NULL); // 4=red-noise
    g_object_set(video_encoder, "tune", 0x4, "bitrate", 500, NULL); // 0x4=zerolatency
    g_object_set(data->flv_mux, "streamable", TRUE, NULL);
    g_object_set(data->tcp_sink, "host", "0.0.0.0", "port", 9000, NULL);
    
    // 添加元素到管道
    gst_bin_add_many(GST_BIN(data->pipeline), data->video_src, video_convert, 
                    video_encoder, video_queue, data->audio_src, audio_convert,
                    audio_encoder, audio_queue, data->flv_mux, data->tcp_sink, NULL);
    
    // 链接元素
    // 视频处理链路
    if (!gst_element_link_many(data->video_src, video_convert, video_encoder, 
                             video_queue, data->flv_mux, NULL)) {
        g_printerr("Video elements could not be linked.\n");
        gst_object_unref(data->pipeline);
        return;
    }
    
    // 音频处理链路
    if (!gst_element_link_many(data->audio_src, audio_convert, audio_encoder, 
                             audio_queue, data->flv_mux, NULL)) {
        g_printerr("Audio elements could not be linked.\n");
        gst_object_unref(data->pipeline);
        return;
    }
    
    // 链接muxer到sink
    if (!gst_element_link(data->flv_mux, data->tcp_sink)) {
        g_printerr("Muxer and sink could not be linked.\n");
        gst_object_unref(data->pipeline);
        return;
    }
    
    // 设置总线消息处理器
    bus = gst_pipeline_get_bus(GST_PIPELINE(data->pipeline));
    gst_bus_add_watch(bus, bus_call, loop);
    gst_object_unref(bus);
    
    // 开始播放
    g_print("Starting HTTP-FLV server on port 9000...\n");
    gst_element_set_state(data->pipeline, GST_STATE_PLAYING);
    
    // 运行主循环
    g_main_loop_run(loop);
    
    // 清理
    gst_element_set_state(data->pipeline, GST_STATE_NULL);
    gst_object_unref(data->pipeline);
    g_main_loop_unref(loop);
}

int main(int argc, char *argv[]) {
    CustomData data;
    
    create_server_pipeline(&data);
    
    return 0;
}

编译命令:

g++ http_flv_server.cpp -o http_flv_server `pkg-config --cflags --libs gstreamer-1.0`

服务端实现:

  • 使用 videotestsrc 和 audiotestsrc 生成测试音视频

  • 通过 x264enc 和 voaacenc 进行编码

  • 使用 flvmux 封装为 FLV 格式

  • 通过 tcpserversink 提供 TCP 服务

命令行实现

bash

# 使用 GStreamer 创建 HTTP-FLV 服务器
gst-launch-1.0 \
    videotestsrc is-live=true pattern=ball ! \
    video/x-raw,width=640,height=480,framerate=30/1 ! \
    queue ! x264enc tune=zerolatency ! \
    flvmux name=mux streamable=true ! \
    queue ! hlssink2 location=/tmp/segment_%05d.ts playlist-location=/tmp/playlist.m3u8 \
    audiotestsrc is-live=true wave=red-noise ! \
    audioconvert ! audioresample ! voaacenc ! queue ! mux.

或者更简单的测试:

bash

# 使用 tcpserversink 提供 FLV 流
gst-launch-1.0 -v flvmux name=mux ! tcpserversink host=0.0.0.0 port=9000 \
    videotestsrc is-live=true ! video/x-raw,width=640,height=480,framerate=15/1 ! videoconvert ! x264enc tune=zerolatency ! mux. \
    audiotestsrc is-live=true ! audioconvert ! voaacenc ! mux.

2. HTTP客户端

2.1代码实现

#include <gst/gst.h>
#include <glib.h>
#include <iostream>

// 客户端数据结构
struct ClientData {
    GstElement *pipeline;
    GstElement *source;
    GstElement *demux;
    GstElement *video_queue;
    GstElement *audio_queue;
    GMainLoop *loop;
};

// 处理demux的pad-added信号
static void on_pad_added(GstElement *element, GstPad *pad, gpointer data) {
    ClientData *client_data = (ClientData *)data;
    GstPad *sinkpad = NULL;
    GstCaps *caps;
    gchar *pad_name;
    
    pad_name = gst_pad_get_name(pad);
    caps = gst_pad_get_current_caps(pad);
    g_print("New pad %s with caps %s\n", pad_name, gst_caps_to_string(caps));
    g_free(pad_name);
    gst_caps_unref(caps);
    
    // 检查pad类型并链接到相应的处理链
    if (g_strrstr(gst_pad_get_name(pad), "video") != NULL) {
        sinkpad = gst_element_get_static_pad(client_data->video_queue, "sink");
    } else if (g_strrstr(gst_pad_get_name(pad), "audio") != NULL) {
        sinkpad = gst_element_get_static_pad(client_data->audio_queue, "sink");
    }
    
    if (sinkpad != NULL) {
        if (gst_pad_link(pad, sinkpad) != GST_PAD_LINK_OK) {
            g_printerr("Failed to link pads!\n");
        } else {
            g_print("Linked pad %s\n", gst_pad_get_name(pad));
        }
        gst_object_unref(sinkpad);
    }
}

// 总线消息处理
static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data) {
    ClientData *client_data = (ClientData *)data;
    
    switch (GST_MESSAGE_TYPE(msg)) {
        case GST_MESSAGE_EOS:
            g_print("End of stream\n");
            g_main_loop_quit(client_data->loop);
            break;
        case GST_MESSAGE_ERROR: {
            gchar *debug;
            GError *error;
            
            gst_message_parse_error(msg, &error, &debug);
            g_printerr("ERROR: %s\n", error->message);
            g_error_free(error);
            g_free(debug);
            
            g_main_loop_quit(client_data->loop);
            break;
        }
        default:
            break;
    }
    
    return TRUE;
}

// 创建客户端管道
void create_client_pipeline(ClientData *data, const gchar *uri) {
    GstElement *video_parse, *video_decoder, *video_convert, *video_sink;
    GstElement *audio_parse, *audio_decoder, *audio_convert, *audio_sink;
    GstBus *bus;
    
    // 初始化GStreamer
    gst_init(NULL, NULL);
    
    data->loop = g_main_loop_new(NULL, FALSE);
    
    // 创建管道元素
    data->pipeline = gst_pipeline_new("http-flv-client");
    data->source = gst_element_factory_make("souphttpsrc", "source");
    data->demux = gst_element_factory_make("flvdemux", "demux");
    data->video_queue = gst_element_factory_make("queue", "video-queue");
    data->audio_queue = gst_element_factory_make("queue", "audio-queue");
    video_parse = gst_element_factory_make("h264parse", "h264-parser");
    video_decoder = gst_element_factory_make("avdec_h264", "h264-decoder");
    video_convert = gst_element_factory_make("videoconvert", "video-convert");
    video_sink = gst_element_factory_make("autovideosink", "video-sink");
    audio_parse = gst_element_factory_make("aacparse", "aac-parser");
    audio_decoder = gst_element_factory_make("avdec_aac", "aac-decoder");
    audio_convert = gst_element_factory_make("audioconvert", "audio-convert");
    audio_sink = gst_element_factory_make("autoaudiosink", "audio-sink");
    
    if (!data->pipeline || !data->source || !data->demux || !data->video_queue || 
        !data->audio_queue || !video_parse || !video_decoder || !video_convert || 
        !video_sink || !audio_parse || !audio_decoder || !audio_convert || !audio_sink) {
        g_printerr("One element could not be created. Exiting.\n");
        return;
    }
    
    // 配置源元素
    g_object_set(data->source, "location", uri, NULL);
    
    // 添加元素到管道
    gst_bin_add_many(GST_BIN(data->pipeline), data->source, data->demux, 
                    data->video_queue, video_parse, video_decoder, video_convert, 
                    video_sink, data->audio_queue, audio_parse, audio_decoder, 
                    audio_convert, audio_sink, NULL);
    
    // 链接source到demux
    if (!gst_element_link(data->source, data->demux)) {
        g_printerr("Source and demuxer could not be linked.\n");
        gst_object_unref(data->pipeline);
        return;
    }
    
    // 视频处理链路
    if (!gst_element_link_many(data->video_queue, video_parse, video_decoder, 
                             video_convert, video_sink, NULL)) {
        g_printerr("Video elements could not be linked.\n");
        gst_object_unref(data->pipeline);
        return;
    }
    
    // 音频处理链路
    if (!gst_element_link_many(data->audio_queue, audio_parse, audio_decoder, 
                             audio_convert, audio_sink, NULL)) {
        g_printerr("Audio elements could not be linked.\n");
        gst_object_unref(data->pipeline);
        return;
    }
    
    // 设置demux的pad-added信号处理
    g_signal_connect(data->demux, "pad-added", G_CALLBACK(on_pad_added), data);
    
    // 设置总线消息处理器
    bus = gst_pipeline_get_bus(GST_PIPELINE(data->pipeline));
    gst_bus_add_watch(bus, bus_call, data);
    gst_object_unref(bus);
    
    // 开始播放
    g_print("Starting HTTP-FLV client...\n");
    gst_element_set_state(data->pipeline, GST_STATE_PLAYING);
    
    // 运行主循环
    g_main_loop_run(data->loop);
    
    // 清理
    gst_element_set_state(data->pipeline, GST_STATE_NULL);
    gst_object_unref(data->pipeline);
    g_main_loop_unref(data->loop);
}

int main(int argc, char *argv[]) {
    ClientData data;
    
    if (argc != 2) {
        g_printerr("Usage: %s <http-flv-url>\n", argv[0]);
        return -1;
    }
    
    create_client_pipeline(&data, argv[1]);
    
    return 0;
}

编译命令:

g++ http_flv_client.cpp -o http_flv_client `pkg-config --cflags --libs gstreamer-1.0`

客户端实现:

  • 使用 souphttpsrc 接收 HTTP-FLV 流

  • 通过 flvdemux 解封装 FLV 流

  • 使用动态 pad 处理来处理音视频流

  • 分别解码和渲染音视频

2.2命令行实现

使用 GStreamer 播放 HTTP-FLV 流
bash
# 播放 HTTP-FLV 流
gst-launch-1.0 -v flvdemux name=demux ! queue ! h264parse ! avdec_h264 ! videoconvert ! autovideosink \
    demux. ! queue ! aacparse ! avdec_aac ! audioconvert ! autoaudiosink \
    urisourcebin uri=http://server:port/live/stream.flv ! demux.

使用说明

  1. 启动服务端:

    bash

    ./http_flv_server

    服务端将在 TCP 端口 9000 上提供 HTTP-FLV 流

  2. 启动客户端:

    bash

    ./http_flv_client http://server-ip:9000

    将 server-ip 替换为服务端实际 IP 地址

 

GStreamer SRT 服务端与客户端

SRT (Secure Reliable Transport) 是一种开源的传输协议,能够在不可预测的网络环境下提供安全、可靠的视频传输。

1. SRT 服务端实现 

服务端代码实现

cpp

#include <gst/gst.h>
#include <glib.h>
#include <iostream>

struct ServerData {
    GstElement *pipeline;
    GstElement *video_src;
    GstElement *audio_src;
    GstElement *video_enc;
    GstElement *audio_enc;
    GstElement *mux;
    GstElement *srt_sink;
    GMainLoop *loop;
};

static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data) {
    ServerData *server_data = (ServerData *)data;

    switch (GST_MESSAGE_TYPE(msg)) {
        case GST_MESSAGE_EOS:
            g_print("End of stream\n");
            g_main_loop_quit(server_data->loop);
            break;
        case GST_MESSAGE_ERROR: {
            gchar *debug;
            GError *error;
            
            gst_message_parse_error(msg, &error, &debug);
            g_printerr("ERROR: %s\n", error->message);
            g_error_free(error);
            g_free(debug);
            
            g_main_loop_quit(server_data->loop);
            break;
        }
        default:
            break;
    }

    return TRUE;
}

void create_srt_server(ServerData *data, int port) {
    GstElement *video_convert, *audio_convert;
    GstElement *video_queue, *audio_queue;
    GstBus *bus;

    gst_init(NULL, NULL);
    
    data->loop = g_main_loop_new(NULL, FALSE);

    // 创建管道元素
    data->pipeline = gst_pipeline_new("srt-server");
    data->video_src = gst_element_factory_make("videotestsrc", "video-source");
    data->audio_src = gst_element_factory_make("audiotestsrc", "audio-source");
    video_convert = gst_element_factory_make("videoconvert", "video-convert");
    data->video_enc = gst_element_factory_make("x264enc", "video-encoder");
    audio_convert = gst_element_factory_make("audioconvert", "audio-convert");
    data->audio_enc = gst_element_factory_make("voaacenc", "audio-encoder");
    data->mux = gst_element_factory_make("mpegtsmux", "mux");
    data->srt_sink = gst_element_factory_make("srtsink", "srt-sink");
    video_queue = gst_element_factory_make("queue", "video-queue");
    audio_queue = gst_element_factory_make("queue", "audio-queue");

    if (!data->pipeline || !data->video_src || !data->audio_src || !video_convert ||
        !data->video_enc || !audio_convert || !data->audio_enc || !data->mux || 
        !data->srt_sink || !video_queue || !audio_queue) {
        g_printerr("One element could not be created. Exiting.\n");
        return;
    }

    // 配置元素参数
    g_object_set(data->video_src, "is-live", TRUE, "pattern", 0, NULL);
    g_object_set(data->audio_src, "is-live", TRUE, "wave", 4, NULL);
    g_object_set(data->video_enc, "tune", 0x4, "bitrate", 500, NULL); // zerolatency
    g_object_set(data->srt_sink, "uri", g_strdup_printf("srt://:%d?mode=listener", port), NULL);

    // 添加元素到管道
    gst_bin_add_many(GST_BIN(data->pipeline), data->video_src, video_convert, 
                    video_queue, data->video_enc, data->audio_src, audio_convert,
                    audio_queue, data->audio_enc, data->mux, data->srt_sink, NULL);

    // 链接视频处理链路
    if (!gst_element_link_many(data->video_src, video_convert, video_queue, 
                             data->video_enc, NULL)) {
        g_printerr("Video elements could not be linked.\n");
        gst_object_unref(data->pipeline);
        return;
    }

    // 链接音频处理链路
    if (!gst_element_link_many(data->audio_src, audio_convert, audio_queue, 
                             data->audio_enc, NULL)) {
        g_printerr("Audio elements could not be linked.\n");
        gst_object_unref(data->pipeline);
        return;
    }

    // 链接编码器到muxer
    GstPad *video_sink_pad = gst_element_get_request_pad(data->mux, "video_%u");
    GstPad *audio_sink_pad = gst_element_get_request_pad(data->mux, "audio_%u");
    
    GstPad *video_src_pad = gst_element_get_static_pad(data->video_enc, "src");
    GstPad *audio_src_pad = gst_element_get_static_pad(data->audio_enc, "src");
    
    if (gst_pad_link(video_src_pad, video_sink_pad) != GST_PAD_LINK_OK ||
        gst_pad_link(audio_src_pad, audio_sink_pad) != GST_PAD_LINK_OK) {
        g_printerr("Could not link encoders to muxer.\n");
        gst_object_unref(data->pipeline);
        return;
    }
    
    gst_object_unref(video_src_pad);
    gst_object_unref(audio_src_pad);
    gst_object_unref(video_sink_pad);
    gst_object_unref(audio_sink_pad);

    // 链接muxer到srt sink
    if (!gst_element_link(data->mux, data->srt_sink)) {
        g_printerr("Muxer and SRT sink could not be linked.\n");
        gst_object_unref(data->pipeline);
        return;
    }

    // 设置总线消息处理器
    bus = gst_pipeline_get_bus(GST_PIPELINE(data->pipeline));
    gst_bus_add_watch(bus, bus_call, data);
    gst_object_unref(bus);

    // 开始播放
    g_print("Starting SRT server on port %d...\n", port);
    gst_element_set_state(data->pipeline, GST_STATE_PLAYING);

    // 运行主循环
    g_main_loop_run(data->loop);

    // 清理
    gst_element_set_state(data->pipeline, GST_STATE_NULL);
    gst_object_unref(data->pipeline);
    g_main_loop_unref(data->loop);
}

int main(int argc, char *argv[]) {
    ServerData data;
    int port = 1234;

    if (argc > 1) {
        port = atoi(argv[1]);
    }

    create_srt_server(&data, port);

    return 0;
}

服务端编译命令

bash

g++ srt_server.cpp -o srt_server `pkg-config --cflags --libs gstreamer-1.0 gstreamer-srt-1.0`

SRT服务端命令行(发送流)

bash

gst-launch-1.0 \
  videotestsrc is-live=true pattern=ball ! \
  video/x-raw,width=640,height=480,framerate=30/1 ! \
  queue ! x264enc tune=zerolatency bitrate=500 ! \
  mpegtsmux name=mux ! \
  srtsink uri="srt://:1234?mode=listener" \
  audiotestsrc wave=sine ! \
  audioconvert ! voaacenc ! mux.

关键参数说明:

  • mode=listener:服务端模式,等待客户端连接

  • srtsink:SRT输出插件,默认端口1234

  • mpegtsmux:将音视频打包为MPEG-TS格式(SRT常用封装)

  • 测试源:videotestsrc(视频)和audiotestsrc(音频)

加密传输(服务端)

bash

gst-launch-1.0 \
  videotestsrc ! x264enc ! mpegtsmux ! \
  srtsink uri="srt://:1234?mode=listener&passphrase=mysecret&pbkeylen=16"

2. SRT 客户端实现 

客户端代码实现

cpp

#include <gst/gst.h>
#include <glib.h>
#include <iostream>

struct ClientData {
    GstElement *pipeline;
    GstElement *srt_src;
    GstElement *demux;
    GstElement *video_queue;
    GstElement *audio_queue;
    GMainLoop *loop;
};

static void on_pad_added(GstElement *element, GstPad *pad, gpointer data) {
    ClientData *client_data = (ClientData *)data;
    GstPad *sinkpad = NULL;
    GstCaps *caps;
    gchar *pad_name;
    
    pad_name = gst_pad_get_name(pad);
    caps = gst_pad_get_current_caps(pad);
    g_print("New pad %s with caps %s\n", pad_name, gst_caps_to_string(caps));
    g_free(pad_name);
    gst_caps_unref(caps);
    
    // 检查pad类型并链接到相应的处理链
    if (g_strrstr(gst_pad_get_name(pad), "video") != NULL) {
        sinkpad = gst_element_get_static_pad(client_data->video_queue, "sink");
    } else if (g_strrstr(gst_pad_get_name(pad), "audio") != NULL) {
        sinkpad = gst_element_get_static_pad(client_data->audio_queue, "sink");
    }
    
    if (sinkpad != NULL) {
        if (gst_pad_link(pad, sinkpad) != GST_PAD_LINK_OK) {
            g_printerr("Failed to link pads!\n");
        } else {
            g_print("Linked pad %s\n", gst_pad_get_name(pad));
        }
        gst_object_unref(sinkpad);
    }
}

static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data) {
    ClientData *client_data = (ClientData *)data;
    
    switch (GST_MESSAGE_TYPE(msg)) {
        case GST_MESSAGE_EOS:
            g_print("End of stream\n");
            g_main_loop_quit(client_data->loop);
            break;
        case GST_MESSAGE_ERROR: {
            gchar *debug;
            GError *error;
            
            gst_message_parse_error(msg, &error, &debug);
            g_printerr("ERROR: %s\n", error->message);
            g_error_free(error);
            g_free(debug);
            
            g_main_loop_quit(client_data->loop);
            break;
        }
        default:
            break;
    }
    
    return TRUE;
}

void create_srt_client(ClientData *data, const gchar *uri) {
    GstElement *video_parse, *video_decoder, *video_convert, *video_sink;
    GstElement *audio_parse, *audio_decoder, *audio_convert, *audio_sink;
    GstBus *bus;
    
    gst_init(NULL, NULL);
    
    data->loop = g_main_loop_new(NULL, FALSE);
    
    // 创建管道元素
    data->pipeline = gst_pipeline_new("srt-client");
    data->srt_src = gst_element_factory_make("srtsrc", "srt-source");
    data->demux = gst_element_factory_make("tsdemux", "demux");
    data->video_queue = gst_element_factory_make("queue", "video-queue");
    data->audio_queue = gst_element_factory_make("queue", "audio-queue");
    video_parse = gst_element_factory_make("h264parse", "h264-parser");
    video_decoder = gst_element_factory_make("avdec_h264", "h264-decoder");
    video_convert = gst_element_factory_make("videoconvert", "video-convert");
    video_sink = gst_element_factory_make("autovideosink", "video-sink");
    audio_parse = gst_element_factory_make("aacparse", "aac-parser");
    audio_decoder = gst_element_factory_make("avdec_aac", "aac-decoder");
    audio_convert = gst_element_factory_make("audioconvert", "audio-convert");
    audio_sink = gst_element_factory_make("autoaudiosink", "audio-sink");
    
    if (!data->pipeline || !data->srt_src || !data->demux || !data->video_queue || 
        !data->audio_queue || !video_parse || !video_decoder || !video_convert || 
        !video_sink || !audio_parse || !audio_decoder || !audio_convert || !audio_sink) {
        g_printerr("One element could not be created. Exiting.\n");
        return;
    }
    
    // 配置SRT源
    g_object_set(data->srt_src, "uri", uri, NULL);
    
    // 添加元素到管道
    gst_bin_add_many(GST_BIN(data->pipeline), data->srt_src, data->demux, 
                    data->video_queue, video_parse, video_decoder, video_convert, 
                    video_sink, data->audio_queue, audio_parse, audio_decoder, 
                    audio_convert, audio_sink, NULL);
    
    // 链接SRT源到demux
    if (!gst_element_link(data->srt_src, data->demux)) {
        g_printerr("SRT source and demuxer could not be linked.\n");
        gst_object_unref(data->pipeline);
        return;
    }
    
    // 视频处理链路
    if (!gst_element_link_many(data->video_queue, video_parse, video_decoder, 
                             video_convert, video_sink, NULL)) {
        g_printerr("Video elements could not be linked.\n");
        gst_object_unref(data->pipeline);
        return;
    }
    
    // 音频处理链路
    if (!gst_element_link_many(data->audio_queue, audio_parse, audio_decoder, 
                             audio_convert, audio_sink, NULL)) {
        g_printerr("Audio elements could not be linked.\n");
        gst_object_unref(data->pipeline);
        return;
    }
    
    // 设置demux的pad-added信号处理
    g_signal_connect(data->demux, "pad-added", G_CALLBACK(on_pad_added), data);
    
    // 设置总线消息处理器
    bus = gst_pipeline_get_bus(GST_PIPELINE(data->pipeline));
    gst_bus_add_watch(bus, bus_call, data);
    gst_object_unref(bus);
    
    // 开始播放
    g_print("Starting SRT client...\n");
    gst_element_set_state(data->pipeline, GST_STATE_PLAYING);
    
    // 运行主循环
    g_main_loop_run(data->loop);
    
    // 清理
    gst_element_set_state(data->pipeline, GST_STATE_NULL);
    gst_object_unref(data->pipeline);
    g_main_loop_unref(data->loop);
}

int main(int argc, char *argv[]) {
    ClientData data;
    
    if (argc != 2) {
        g_printerr("Usage: %s <srt-uri>\n", argv[0]);
        g_printerr("Example: %s srt://127.0.0.1:1234\n", argv[0]);
        return -1;
    }
    
    create_srt_client(&data, argv[1]);
    
    return 0;
}

客户端编译命令

bash

g++ srt_client.cpp -o srt_client `pkg-config --cflags --libs gstreamer-1.0 gstreamer-srt-1.0`

SRT客户端命令行(接收流)

bash

gst-launch-1.0 \
  srtsrc uri="srt://127.0.0.1:1234?mode=caller" ! \
  tsdemux name=demux \
  demux. ! queue ! h264parse ! avdec_h264 ! videoconvert ! autovideosink \
  demux. ! queue ! aacparse ! avdec_aac ! audioconvert ! autoaudiosink

关键参数说明:

  • mode=caller:客户端模式,主动连接服务端

  • tsdemux:解复用MPEG-TS流

  • 分别处理视频(H.264)和音频(AAC)流

低延迟模式(客户端)

bash

gst-launch-1.0 \
  srtsrc uri="srt://127.0.0.1:1234?mode=caller&latency=100" ! \
  tsdemux ! ...

常用SRT参数速查表

参数说明示例值
mode连接模式(listener/caller)mode=listener
latency传输延迟(毫秒)latency=200
passphrase加密密码passphrase=12345
pbkeylen密钥长度(16/24/32)pbkeylen=16
maxbw最大带宽(bytes/sec)maxbw=10000000

 

3. 使用说明

  1. 启动服务端:

    bash

    ./srt_server [port]

    默认端口为1234

  2. 启动客户端:

    bash

    ./srt_client srt://server-ip:port

    将 server-ip 和 port 替换为服务端实际地址和端口

4. SRT 关键参数配置

可以在 URI 中添加 SRT 参数:

cpp

// 服务端配置示例
g_object_set(data->srt_sink, "uri", "srt://:1234?mode=listener&latency=200&passphrase=mysecret", NULL);

// 客户端配置示例
g_object_set(data->srt_src, "uri", "srt://127.0.0.1:1234?mode=caller&latency=200&passphrase=mysecret", NULL);

常用 SRT 参数:

  • mode=listener/caller/rendezvous - 连接模式

  • latency=ms - 延迟设置(毫秒)

  • passphrase - 加密密码

  • pbkeylen - 加密密钥长度(16/24/32)

  • maxbw - 最大带宽

5. 扩展建议

  1. 加密传输:

    • 使用 passphrase 和 pbkeylen 参数启用加密

  2. 自适应码率:

    • 根据网络状况动态调整编码参数

  3. 多路复用:

    • 支持多个客户端同时连接

  4. 统计信息:

    • 通过 SRT 的统计信息监控传输质量

  5. 错误恢复:

    • 实现自动重连和错误恢复机制

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

byxdaz

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值