Working with multiple appsrcs/cameras

This commit is contained in:
Sam Armstrong 2018-09-27 14:34:48 +10:00
parent 6e795a070a
commit 4937b1d078
5 changed files with 63 additions and 36 deletions

View File

@ -1,19 +1,40 @@
# Set up your streams to rtsp here.
#
# "type": ["cam" , "topic"]
#
# "source":
# if type=cam, location of v4l2 camera
# if type=topic, the ROS Image topic to subscribe to.
#
# Notes:
# Use a lower framerate of 10/1 on ROS topics.
# There may be issues with appsrc 'media not prepared' errors if the framerate is too high.
streams:
stream1:
type: cam
source: /dev/video1
mountpoint: /back
caps: video/x-raw,framerate=30/1,width=1280,height=720
bitrate: 500
stream2:
# stream-x:
# type: cam
# source: "v4l2src device=/dev/video0 ! videoconvert ! videoscale ! video/x-raw,framerate=15/1,width=1280,height=720"
# mountpoint: /front
# bitrate: 500
# stream1:
# type: cam
# source: "v4l2src device=/dev/video0 ! videoconvert ! videoscale ! video/x-raw,framerate=15/1,width=1280,height=720"
# mountpoint: /back
# bitrate: 500
stream-a:
type: topic
source: /usb_cam/image_raw
source: /usb_cam0/image_raw
mountpoint: /front
caps: video/x-raw,framerate=10/1,width=640,height=480
bitrate: 500
stream-yay:
type: topic
source: /usb_cam1/image_raw
mountpoint: /back
caps: video/x-raw,framerate=10/1,width=640,height=480
bitrate: 500

View File

@ -12,11 +12,11 @@ namespace image2rtsp {
void print_error(char *s);
private:
ros::Subscriber sub;
GstAppSrc *appsrc;
std::map<std::string, ros::Subscriber> subs;
std::map<std::string, GstAppSrc*> appsrc;
std::map<std::string, int> num_of_clients;
GstCaps* gst_caps_new_from_image(const sensor_msgs::Image::ConstPtr &msg);
void imageCallback(const sensor_msgs::Image::ConstPtr& msg);
void imageCallback(const sensor_msgs::Image::ConstPtr& msg, const std::string& topic);
void video_mainloop_start();
void rtsp_server_add_url(const char *url, const char *sPipeline, GstElement **appsrc);
GstRTSPServer *rtsp_server_create();

View File

@ -1,6 +1,6 @@
<launch>
<!-- Start the image publisher -->
<node name="usb_cam" pkg="usb_cam" type="usb_cam_node" output="screen" >
<!-- Start the image publisher/s if required -->
<!-- <node name="usb_cam0" pkg="usb_cam" type="usb_cam_node" output="screen" >
<param name="video_device" value="/dev/video0" />
<param name="image_width" value="640" />
<param name="image_height" value="480" />
@ -9,6 +9,16 @@
<param name="io_method" value="mmap"/>
<param name="autofocus" value="true"/>
</node>
<node name="usb_cam1" pkg="usb_cam" type="usb_cam_node" output="screen" >
<param name="video_device" value="/dev/video1" />
<param name="image_width" value="640" />
<param name="image_height" value="480" />
<param name="pixel_format" value="yuyv" />
<param name="camera_frame_id" value="yuyv" />
<param name="io_method" value="mmap"/>
<param name="autofocus" value="true"/>
</node> -->
<!-- <node pkg="ros_rtsp" name="camera_publisher" type="camera_publisher" args="0"/> -->
<node pkg="nodelet" type="nodelet" name="standalone_nodelet" args="manager" output="screen"/>

View File

@ -26,7 +26,6 @@ void Image2RTSPNodelet::onInit() {
putenv((char*)"GST_DEBUG=*:2");
}
appsrc = NULL;
ros::NodeHandle& nh = getPrivateNodeHandle();
video_mainloop_start();
@ -48,12 +47,11 @@ void Image2RTSPNodelet::onInit() {
// Convert to string for ease of use
mountpoint = static_cast<std::string>(stream["mountpoint"]);
bitrate = std::to_string(static_cast<int>(stream["bitrate"]));
caps = static_cast<std::string>(stream["caps"]);
// uvc camera?
if (stream["type"]=="cam")
{
pipeline = "( v4l2src device=" + static_cast<std::string>(stream["source"]) + " ! videoconvert ! videoscale ! " + caps + " ! x264enc tune=zerolatency bitrate=" + bitrate + pipeline_tail;
pipeline = "( " + static_cast<std::string>(stream["source"]) + " ! x264enc tune=zerolatency bitrate=" + bitrate + pipeline_tail;
rtsp_server_add_url(mountpoint.c_str(), pipeline.c_str(), NULL);
}
@ -63,12 +61,14 @@ void Image2RTSPNodelet::onInit() {
/* Keep record of number of clients connected to each topic.
* so we know to stop subscribing when no-one is connected. */
num_of_clients[mountpoint] = 0;
appsrc[mountpoint] = NULL;
caps = static_cast<std::string>(stream["caps"]);
// Setup the full pipeline
pipeline = "( appsrc name=imagesrc do-timestamp=true min-latency=0 max-latency=0 max-bytes=1000 is-live=true ! videoconvert ! videoscale ! " + caps + " ! x264enc tune=zerolatency bitrate=" + bitrate + pipeline_tail;
// Add the pipeline to the rtsp server
rtsp_server_add_url(mountpoint.c_str(), pipeline.c_str(), (GstElement **)&appsrc);
rtsp_server_add_url(mountpoint.c_str(), pipeline.c_str(), (GstElement **)&(appsrc[mountpoint]));
}
else
{
@ -109,35 +109,33 @@ GstCaps* Image2RTSPNodelet::gst_caps_new_from_image(const sensor_msgs::Image::Co
"format", G_TYPE_STRING, format->second.c_str(),
"width", G_TYPE_INT, msg->width,
"height", G_TYPE_INT, msg->height,
"framerate", GST_TYPE_FRACTION, 10, 1, // 0/1 = dynamic
"framerate", GST_TYPE_FRACTION, 10, 1,
nullptr);
}
void Image2RTSPNodelet::imageCallback(const sensor_msgs::Image::ConstPtr& msg) {
void Image2RTSPNodelet::imageCallback(const sensor_msgs::Image::ConstPtr& msg, const std::string& topic) {
GstBuffer *buf;
GstCaps *caps;
char *gst_type, *gst_format=(char *)"";
// g_print("Image encoding: %s\n", msg->encoding.c_str());
if (appsrc != NULL) {
if (appsrc[topic] != NULL) {
// Set caps from message
caps = gst_caps_new_from_image(msg);
gst_app_src_set_caps(appsrc, caps);
gst_app_src_set_caps(appsrc[topic], caps);
buf = gst_buffer_new_allocate(nullptr, msg->data.size(), nullptr);
gst_buffer_fill(buf, 0, msg->data.data(), msg->data.size());
GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_LIVE);
gst_app_src_push_buffer(appsrc, buf);
gst_app_src_push_buffer(appsrc[topic], buf);
}
}
void Image2RTSPNodelet::url_connected(string url) {
std::string mountpoint, source;
std::string mountpoint, source, type;
NODELET_INFO("Client connected: %s", url.c_str());
ros::NodeHandle& nh = getPrivateNodeHandle();
@ -151,15 +149,16 @@ void Image2RTSPNodelet::url_connected(string url) {
for(XmlRpc::XmlRpcValue::ValueStruct::const_iterator it = streams.begin(); it != streams.end(); ++it)
{
XmlRpc::XmlRpcValue stream = streams[it->first];
type = static_cast<std::string>(stream["type"]);
mountpoint = static_cast<std::string>(stream["mountpoint"]);
source = static_cast<std::string>(stream["source"]);
// Check which stream the client has connected to
if (url==mountpoint) {
if (type=="topic" && url==mountpoint) {
if (num_of_clients[url]==0) {
// Subscribe to the ROS topic
sub = nh.subscribe(source, 10, &Image2RTSPNodelet::imageCallback, this);
subs[url] = nh.subscribe<sensor_msgs::Image>(source, 1, boost::bind(&Image2RTSPNodelet::imageCallback, this, _1, url));
}
num_of_clients[url]++;
}
@ -188,8 +187,8 @@ void Image2RTSPNodelet::url_disconnected(string url) {
if (num_of_clients[url] > 0) num_of_clients[url]--;
if (num_of_clients[url] == 0) {
// No-one else is connected. Stop the subscription.
sub.shutdown();
appsrc = NULL;
subs[url].shutdown();
appsrc[url] = NULL;
}
}
}

View File

@ -94,11 +94,11 @@ GstRTSPServer *Image2RTSPNodelet::rtsp_server_create() {
/* called when a new media pipeline is constructed. We can query the
* pipeline and configure our appsrc */
static void media_configure(GstRTSPMediaFactory *factory, GstRTSPMedia *media, GstElement **appsrc)
{
if (appsrc) {
{ if (appsrc) {
GstElement *pipeline = gst_rtsp_media_get_element(media);
*appsrc = gst_bin_get_by_name(GST_BIN(pipeline), "imagesrc");
/* this instructs appsrc that we will be dealing with timed buffer */
gst_util_set_object_arg(G_OBJECT(*appsrc), "format", "time");
@ -137,9 +137,6 @@ void Image2RTSPNodelet::rtsp_server_add_url(const char *url, const char *sPipeli
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
ROS_INFO("%s", url);
ROS_INFO("%s", sPipeline);
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points(rtsp_server);