发布时间:2023-10-17 13:30
gcc 版本7.5.0
g++ 版本7.5.0
ubuntu 版本18.04
gst-rtsp-server 版本1.8.0
sudo apt-get install gtk-doc-tools
sudo apt-get install libgstreamer1.0-0 gstreamer1.0-plugins-base
sudo apt-get install gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-plugins-ugly
sudo apt-get install gstreamer1.0-libav gstreamer1.0-doc gstreamer1.0-tools
sudo apt-get install gstreamer1.0-x gstreamer1.0-alsa gstreamer1.0-gl gstreamer1.0-gtk3 gstreamer1.0-qt5 gstreamer1.0-pulseaudio
sudo apt-get install libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev
git clone git://anongit.freedesktop.org/gstreamer/gst-rtsp-server
或者
wget https://github.com/GStreamer/gst-rtsp-server/archive/1.8.zip
cd gst-rtsp-server
git checkout remotes/origin/1.8 or git clone https://github.com/GStreamer/common.git
./autogen.sh
make -j4
sudo make install
//进入demo
cd example
1、切换到examples目录:
cd examples
2、搭建Rtsp Server:
./test-launch \"( videotestsrc ! x264enc ! rtph264pay name=pay0 pt=96 )\"
直接读取摄像头(笔记本电脑一般自带摄像头,台式机请插入USB摄像头)视频的命令就是它了:
$ ./test-launch \"( v4l2src ! video/x-raw-yuv,format=\'fourcc\'YUY2,width=640,height=480 ! ffmpegcolorspace ! x264enc ! rtph264pay name=pay0 pt=96 )\"
直接读取CSI摄像头
./test-launch \"nvarguscamerasrc ! nvvidconv ! clockoverlay ! omxh264enc ! rtph264pay pt=96 name=pay0\"---成功
./test-launch \"( mfw_v4lsrc device=/dev/video0 ! queue ! vpuenc codec=6 ! rtph264pay name=pay0 pt=96 )\"---失败
3、播放rtsp流:
gst-launch-1.0 playbin uri=rtsp://127.0.0.1:8554/test
或者直接通过VLC打开以及通过opencv也可以打开
# 发送1
gst-launch-1.0 videotestsrc ! video/x-raw,format=I420 ! omxh264enc ! video/x-h264,stream-format=byte-stream ! rtph264pay mtu=1400 ! udpsink host=127.0.0.1 port=5000
# 接收1
gst-launch-1.0 udpsrc port=5000 ! \'application/x-rtp,encoding-name=(string)H264\' ! rtph264depay ! h264parse ! omxh264dec ! nvoverlaysink sync=false async=false
# 发送2
./test-launch \"(videotestsrc ! video/x-raw,format=I420,framerate= 25/1 ! x264enc ! video/x-h264,stream-format=byte-stream ! rtph264pay name=pay0 pt=96)\"
# 接收2
gst-launch-1.0 rtspsrc location=rtsp://127.0.01:8554/test ! rtph264depay ! h264parse ! omxh264dec ! nvoverlaysink sync=false async=false
test-readme.c
#include
#include
int main (int argc, char *argv[]) {
//声明相关对象
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
//构建 rtsp 服务器
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE); // 创建 rtsp 服务器的主消息循环,也是默认的消息循环。
server = gst_rtsp_server_new (); // 创建 rtsp 服务器对象
mounts = gst_rtsp_server_get_mount_points (server); // 获取 rtsp 服务器的装载点集合的引用
// 装载点集合 mounts 是服务器 server 的属性
factory = gst_rtsp_media_factory_new (); // 创建媒体工厂,用来产生媒体数据流
gst_rtsp_media_factory_set_launch (factory, \"( videotestsrc is-live=1 ! x264enc ! rtph264pay name=pay0 pt=96 )\");
gst_rtsp_media_factory_set_shared (factory, TRUE);
gst_rtsp_mount_points_add_factory (mounts, \"/test\", factory); // 把媒体工厂添加到装载点集合
g_object_unref (mounts);
gst_rtsp_server_attach (server, NULL); // 把服务器附加到默认的消息循环。
//运行 rtsp 服务器
g_print (\"stream ready at rtsp://127.0.0.1:8554/test\\n\");
g_main_loop_run (loop);
return 0;
}
服务器管理另外四个对象: GstRTSPSessionPool、GstRTSPMountPoints、 GstRTSPAuth 和 GstRTSPThreadPool。
GstRTSPSessionPool 是一个跟踪服务器中所有活动会话的对象。通常会为每个为某个媒体流执行设置请求的客户机保留一个会话。它包含客户端与服务器协商以接收特定流的配置,即UDP使用的传输和端口对以及流的状态。会话池的默认实现通常就足够了,但服务器可以使用替代实现。
GstRTSPMountPoints 对象更有趣,在服务器对象有用之前需要更多的配置。此对象管理从请求URL到特定流的映射及其配置。我们将在下一个主题中解释如何配置此对象。
GstRTSPAuth是对用户进行身份验证并授权用户执行的操作的对象。默认情况下,服务器没有 GstRTSPAuth 对象,因此不会尝试执行任何身份验证或授权。
GstRTSPThreadPool 管理用于客户端连接和媒体管道的线程。服务器有一个线程池的默认实现,在大多数情况下应该足够了。
类似example里面的C文件,将你所命名的文件名(C文件),加在Makefile,Makefile.am,Makefile.in对应位置,直接通过sudo make -j8即可完成修改,将生成对应可执行文件直接./运行即可。
代码1
/* GStreamer
* Copyright (C) 2008 Wim Taymans <wim.taymans at gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include
#include
int
main (int argc, char *argv[])
{
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* create a server instance */
server = gst_rtsp_server_new ();
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points (server);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory,
\"( nvarguscamerasrc ! video/x-raw(memory:NVMM),width=1280,height=720,framerate=60/1 ! nvvidconv ! clockoverlay halignment=left valignment=top time-format=\'%Y/%m/%d %H:%M:%S\' ! x264enc ! rtph264pay name=pay0 pt=96 )\");
gst_rtsp_media_factory_set_shared (factory, TRUE);
/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory (mounts, \"/test\", factory);
/* don\'t need the ref to the mapper anymore */
g_object_unref (mounts);
/* attach the server to the default maincontext */
gst_rtsp_server_attach (server, NULL);
/* start serving */
g_print (\"stream ready at rtsp://127.0.0.1:8554/test\\n\");
g_main_loop_run (loop);
return 0;
}
代码2
/* GStreamer
* Copyright (C) 2008 Wim Taymans <wim.taymans at gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include
#include
typedef struct
{
gboolean white;
GstClockTime timestamp;
} MyContext;
/* called when we need to give data to appsrc */
static void
need_data (GstElement * appsrc, guint unused, MyContext * ctx)
{
GstBuffer *buffer;
guint size;
GstFlowReturn ret;
size = 385 * 288 * 2;
buffer = gst_buffer_new_allocate (NULL, size, NULL);
/* this makes the image black/white */
gst_buffer_memset (buffer, 0, ctx->white ? 0xff : 0x0, size);
ctx->white = !ctx->white;
/* increment the timestamp every 1/2 second */
GST_BUFFER_PTS (buffer) = ctx->timestamp;
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 2);
ctx->timestamp += GST_BUFFER_DURATION (buffer);
g_signal_emit_by_name (appsrc, \"push-buffer\", buffer, &ret);
}
/* called when a new media pipeline is constructed. We can query the
* pipeline and configure our appsrc */
static void
media_configure (GstRTSPMediaFactory * factory, GstRTSPMedia * media,
gpointer user_data)
{
GstElement *element, *appsrc;
MyContext *ctx;
/* get the element used for providing the streams of the media */
element = gst_rtsp_media_get_element (media);
/* get our appsrc, we named it \'mysrc\' with the name property */
appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), \"mysrc\");
/* this instructs appsrc that we will be dealing with timed buffer */
gst_util_set_object_arg (G_OBJECT (appsrc), \"format\", \"time\");
/* configure the caps of the video */
g_object_set (G_OBJECT (appsrc), \"caps\",
gst_caps_new_simple (\"video/x-raw\",
\"format\", G_TYPE_STRING, \"RGB16\",
\"width\", G_TYPE_INT, 384,
\"height\", G_TYPE_INT, 288,
\"framerate\", GST_TYPE_FRACTION, 0, 1, NULL), NULL);
ctx = g_new0 (MyContext, 1);
ctx->white = FALSE;
ctx->timestamp = 0;
/* make sure ther datais freed when the media is gone */
g_object_set_data_full (G_OBJECT (media), \"my-extra-data\", ctx,
(GDestroyNotify) g_free);
/* install the callback that will be called when a buffer is needed */
g_signal_connect (appsrc, \"need-data\", (GCallback) need_data, ctx);
gst_object_unref (appsrc);
gst_object_unref (element);
}
int
main (int argc, char *argv[])
{
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* create a server instance */
server = gst_rtsp_server_new ();
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points (server);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory,
\"( nvarguscamerasrc ! video/x-raw(memory:NVMM),width=1280,height=720,framerate=60/1 ! nvvidconv ! clockoverlay halignment=left valignment=top time-format=\'%Y/%m/%d %H:%M:%S\' ! x264enc ! rtph264pay name=pay0 pt=96 )\");
/* notify when our media is ready, This is called whenever someone asks for
* the media and a new pipeline with our appsrc is created */
g_signal_connect (factory, \"media-configure\", (GCallback) media_configure,
NULL);
gst_rtsp_media_factory_set_shared (factory, TRUE);
/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory (mounts, \"/test\", factory);
/* don\'t need the ref to the mapper anymore */
g_object_unref (mounts);
/* attach the server to the default maincontext */
gst_rtsp_server_attach (server, NULL);
/* start serving */
g_print (\"stream ready at rtsp://127.0.0.1:8554/test\\n\");
g_main_loop_run (loop);
return 0;
}
代码3
/* GStreamer
* Copyright (C) 2008 Wim Taymans <wim.taymans at gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include
#include
typedef struct
{
gboolean white;
GstClockTime timestamp;
} MyContext;
/* called when we need to give data to appsrc */
static void
need_data (GstElement * appsrc, guint unused, MyContext * ctx)
{
GstBuffer *buffer;
guint size;
GstFlowReturn ret;
size = 385 * 288 * 2;
buffer = gst_buffer_new_allocate (NULL, size, NULL);
/* this makes the image black/white */
gst_buffer_memset (buffer, 0, ctx->white ? 0xff : 0x0, size);
ctx->white = !ctx->white;
/* increment the timestamp every 1/2 second */
GST_BUFFER_PTS (buffer) = ctx->timestamp;
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 2);
ctx->timestamp += GST_BUFFER_DURATION (buffer);
g_signal_emit_by_name (appsrc, \"push-buffer\", buffer, &ret);
}
/* called when a new media pipeline is constructed. We can query the
* pipeline and configure our appsrc */
static void
media_configure (GstRTSPMediaFactory * factory, GstRTSPMedia * media,
gpointer user_data)
{
GstElement *element, *appsrc;
MyContext *ctx;
/* get the element used for providing the streams of the media */
element = gst_rtsp_media_get_element (media);
/* get our appsrc, we named it \'mysrc\' with the name property */
appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), \"mysrc\");
/* this instructs appsrc that we will be dealing with timed buffer */
gst_util_set_object_arg (G_OBJECT (appsrc), \"format\", \"time\");
/* configure the caps of the video */
g_object_set (G_OBJECT (appsrc), \"caps\",
gst_caps_new_simple (\"video/x-raw\",
\"format\", G_TYPE_STRING, \"RGB16\",
\"width\", G_TYPE_INT, 384,
\"height\", G_TYPE_INT, 288,
\"framerate\", GST_TYPE_FRACTION, 0, 1, NULL), NULL);
ctx = g_new0 (MyContext, 1);
ctx->white = FALSE;
ctx->timestamp = 0;
/* make sure ther datais freed when the media is gone */
g_object_set_data_full (G_OBJECT (media), \"my-extra-data\", ctx,
(GDestroyNotify) g_free);
/* install the callback that will be called when a buffer is needed */
g_signal_connect (appsrc, \"need-data\", (GCallback) need_data, ctx);
gst_object_unref (appsrc);
gst_object_unref (element);
}
int
main (int argc, char *argv[])
{
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* create a server instance */
server = gst_rtsp_server_new ();
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points (server);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory,
\"( nvarguscamerasrc ! video/x-raw(memory:NVMM),width=1280,height=720,framerate=60/1 ! nvvidconv ! clockoverlay halignment=left valignment=top time-format=\'%Y/%m/%d %H:%M:%S\' ! x264enc ! rtph264pay name=pay0 pt=96 )\");
/* notify when our media is ready, This is called whenever someone asks for
* the media and a new pipeline with our appsrc is created */
g_signal_connect (factory, \"media-configure\", (GCallback) media_configure,
NULL);
gst_rtsp_media_factory_set_shared (factory, TRUE);
/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory (mounts, \"/test\", factory);
/* don\'t need the ref to the mapper anymore */
g_object_unref (mounts);
/* attach the server to the default maincontext */
gst_rtsp_server_attach (server, NULL);
/* start serving */
g_print (\"stream ready at rtsp://127.0.0.1:8554/test\\n\");
g_main_loop_run (loop);
return 0;
}
获取系统绝对时间
gst_rtsp_media_factory_set_launch (factory,
\"( nvarguscamerasrc ! video/x-raw(memory:NVMM),width=1280,height=720,framerate=60/1 ! nvvidconv ! clockoverlay halignment=left valignment=top time-format=\'%Y/%m/%d %H:%M:%S\' ! x264enc ! rtph264pay name=pay0 pt=96 )\");
获取系统年月日
gst_rtsp_media_factory_set_launch (factory,
\"( nvarguscamerasrc ! video/x-raw(memory:NVMM),width=1280,height=720,framerate=60/1 ! nvvidconv ! clockoverlay halignment=left valignment=top time-format=\'%Y/%m/%d %H:%M:%S\' ! x264enc ! rtph264pay name=pay0 pt=96 )\");
sudo systemctl restart nvargus-daemon
(可以重启nvargus-daemon,重启后程序可以正常运行)
基于树莓派板子
基于jetson nano
有关%E4%BD%A0%E6%98%AF%E4相关类型解码问题解决
Counterfactual Attention Learning for Fine-Grained Visual Categorization and Re-identification 论文学习
PyTorch中 nn.Conv2d与nn.ConvTranspose2d函数的用法
neovis.js+vue实现知识图谱前端展示,踩坑后的经验分享!!!
istio 服务网格_如何在Kubernetes上使用Istio服务网格设置JHipster微服务
apache flink目录遍历漏洞(CVE-2020-17518复现)