我們使用gstreamer-rtsp-server ,實作了USB相機轉RTSP網絡流的基本功能,但是效果很一般,接下來我們需要做優化,以提升其整體性能。
我自己的測試硬體環境在文章《【GStreamer 】USB相機 Jeston TX1平台測試環境說明_機器人虎哥的部落格-CSDN部落格》有說明,請必要的時候參考。
1、test-launch 測試,使用videotestsrc 為視訊源
一直以來我們測試的指令為:使用videotestsrc 做為源,可以同時并發連結多路
./test-launch "( videotestsrc ! x264enc ! rtph264pay name=pay0 pt=96 )"

2、test-launch 測試,使用v4l2src device=/dev/video0 為視訊源
./test-launch --gst-debug-level=3 "( v4l2src device=/dev/video0 ! videoconvert! videoscale ! video/x-raw, width=640, height=480, framerate=25/1 ! queue ! x264enc bitrate=2048 ! rtph264pay name=pay0 pt=96 )"
發現使用USB相機做為視訊源,連結第二路就會出現報錯。
錯誤資訊為:
Call to S_FMT failed for YUYV @ 640x480: Device or resource busy)
0:00:45.254816029 21038 0x55b0381590 WARN rtspmedia rtsp-media.c:2573:wait_preroll: failed to preroll pipeline
0:00:45.254838113 21038 0x55b0381590 WARN rtspmedia rtsp-media.c:2877:gst_rtsp_media_prepare: failed to preroll pipeline
0:00:45.259095810 21038 0x55b0381590 ERROR rtspclient rtsp-client.c:767:find_media: client 0x55b0566d60: can't prepare media
0:00:45.259610808 21038 0x55b0381590 ERROR rtspclient rtsp-client.c:2041:handle_setup_request: client 0x55b0566d60: media '/test' not found
是以,我們其實可以了解為,這個測試代碼,每次一個連結,都會從視訊源開始到最後的封裝,全部都重建立立一次,1中我們使用了videotestsrc 做為測試源,支援複用,2中我們的USB裝置,就不支援複用了,是以報了Device or resource busy 的資訊。
3、修改代碼,USB相機源時支援多路複用轉發
上面我們測試,發現USB源測試時,不支援并發複用,無法支援多個用戶端連結,如何修改,可以實作呢?仔細閱讀官方給的示例代碼後發現有這樣的描述:
預設情況下,工廠将為每個客戶機建立一個新管道。如果要在用戶端之間共享管道,請使用 `gst_rtsp_media_factory_set_shared()`。
仔細閱讀test-readme.c 代碼:
/* GStreamer
*/
#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>
int
main (int argc, char *argv[])
{
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* create a server instance */
server = gst_rtsp_server_new ();
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points (server);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory,
"( videotestsrc is-live=1 ! x264enc ! rtph264pay name=pay0 pt=96 )");
gst_rtsp_media_factory_set_shared (factory, TRUE);
/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory (mounts, "/test", factory);
/* don't need the ref to the mapper anymore */
g_object_unref (mounts);
/* attach the server to the default maincontext */
gst_rtsp_server_attach (server, NULL);
/* start serving */
g_print ("stream ready at rtsp://127.0.0.1:8554/test\n");
g_main_loop_run (loop);
return 0;
}
然後再閱讀test-launch.c 代碼:
/* GStreamer
*/
#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>
#define DEFAULT_RTSP_PORT "8554"
static char *port = (char *) DEFAULT_RTSP_PORT;
static GOptionEntry entries[] = {
{"port", 'p', 0, G_OPTION_ARG_STRING, &port,
"Port to listen on (default: " DEFAULT_RTSP_PORT ")", "PORT"},
{NULL}
};
int
main (int argc, char *argv[])
{
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
GOptionContext *optctx;
GError *error = NULL;
optctx = g_option_context_new ("<launch line> - Test RTSP Server, Launch\n\n"
"Example: \"( videotestsrc ! x264enc ! rtph264pay name=pay0 pt=96 )\"");
g_option_context_add_main_entries (optctx, entries, NULL);
g_option_context_add_group (optctx, gst_init_get_option_group ());
if (!g_option_context_parse (optctx, &argc, &argv, &error)) {
g_printerr ("Error parsing options: %s\n", error->message);
g_option_context_free (optctx);
g_clear_error (&error);
return -1;
}
g_option_context_free (optctx);
loop = g_main_loop_new (NULL, FALSE);
/* create a server instance */
server = gst_rtsp_server_new ();
g_object_set (server, "service", port, NULL);
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points (server);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory, argv[1]);
gst_rtsp_media_factory_set_shared (factory, TRUE);
/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory (mounts, "/test", factory);
/* don't need the ref to the mapper anymore */
g_object_unref (mounts);
/* attach the server to the default maincontext */
gst_rtsp_server_attach (server, NULL);
/* start serving */
g_print ("stream ready at rtsp://127.0.0.1:%s/test\n", port);
g_main_loop_run (loop);
return 0;
}
發現差别和描述是一緻的:
/* test-readme.c */
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory,
"( videotestsrc is-live=1 ! x264enc ! rtph264pay name=pay0 pt=96 )");
gst_rtsp_media_factory_set_shared (factory, TRUE);
gst_rtsp_mount_points_add_factory (mounts, "/test", factory);
/* test-launch.c */
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory, argv[1]);
gst_rtsp_mount_points_add_factory (mounts, "/test", factory);
發現這個差別和描述,我們再看看這個描述
預設情況下,工廠将為每個客戶機建立一個新管道。如果要在用戶端之間共享管道,請使用 `gst_rtsp_media_factory_set_shared()`。
我們修改test-launch.c 代碼,重新命名一個新檔案:test-launch2.c 修改後的代碼:
/* GStreamer
*/
#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>
#define DEFAULT_RTSP_PORT "8554"
static char *port = (char *) DEFAULT_RTSP_PORT;
static GOptionEntry entries[] = {
{"port", 'p', 0, G_OPTION_ARG_STRING, &port,
"Port to listen on (default: " DEFAULT_RTSP_PORT ")", "PORT"},
{NULL}
};
int
main (int argc, char *argv[])
{
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
GOptionContext *optctx;
GError *error = NULL;
optctx = g_option_context_new ("<launch line> - Test RTSP Server, Launch\n\n"
"Example: \"( videotestsrc ! x264enc ! rtph264pay name=pay0 pt=96 )\"");
g_option_context_add_main_entries (optctx, entries, NULL);
g_option_context_add_group (optctx, gst_init_get_option_group ());
if (!g_option_context_parse (optctx, &argc, &argv, &error)) {
g_printerr ("Error parsing options: %s\n", error->message);
g_option_context_free (optctx);
g_clear_error (&error);
return -1;
}
g_option_context_free (optctx);
loop = g_main_loop_new (NULL, FALSE);
/* create a server instance */
server = gst_rtsp_server_new ();
g_object_set (server, "service", port, NULL);
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points (server);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory, argv[1]);
//新增加 用戶端之間共享管道 不用重建立立
gst_rtsp_media_factory_set_shared (factory, TRUE);
/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory (mounts, "/test", factory);
/* don't need the ref to the mapper anymore */
g_object_unref (mounts);
/* attach the server to the default maincontext */
gst_rtsp_server_attach (server, NULL);
/* start serving */
g_print ("stream ready at rtsp://127.0.0.1:%s/test\n", port);
g_main_loop_run (loop);
return 0;
}
編譯:
gcc test-launch2.c -o test-launch2 $(pkg-config --cflags --libs gstreamer-rtsp-server-1.0 gstreamer-1.0)
測試:
./test-launch2 --gst-debug-level=3 "( v4l2src device=/dev/video0 ! videoconvert! videoscale ! video/x-raw, width=640, height=480, framerate=25/1 ! queue ! x264enc bitrate=2048 ! rtph264pay name=pay0 pt=96 )"
這樣修改就實作了一個可以多路複用的USB轉RTSP伺服器
4、測試硬體加速的效果
./test-launch2 --gst-debug-level=3 "( v4l2src device=/dev/video0 ! video/x-raw,format=YUY2,width=640, height=480, framerate=25/1 ! nvvidconv ! video/x-raw(memory:NVMM), format=(string)I420, width=640, height=480, framerate=25/1 ! queue ! omxh264enc bitrate=2048 ! rtph264pay name=pay0 pt=96 )"
雖然感覺圖像品質變差了,但是資源占用真心有很大優化。而且延時也有一定的優化。