Buffer writing to appsrc working now

This commit is contained in:
Sam Armstrong 2018-09-24 16:15:17 +10:00
parent a00ed1da91
commit 9acf1b3528
12 changed files with 380 additions and 597 deletions

View File

@ -17,11 +17,9 @@ pkg_check_modules(GST REQUIRED
find_package(catkin REQUIRED COMPONENTS
cv_bridge
image_transport
roscpp
sensor_msgs
std_msgs
nodelet
)
## System dependencies are found with CMake's conventions
@ -116,7 +114,7 @@ find_package(catkin REQUIRED COMPONENTS
catkin_package(
# INCLUDE_DIRS include
# LIBRARIES ros_rtsp
CATKIN_DEPENDS cv_bridge image_transport roscpp sensor_msgs std_msgs
CATKIN_DEPENDS roscpp sensor_msgs
# DEPENDS system_lib
)
@ -136,6 +134,8 @@ include_directories(
# add_library(${PROJECT_NAME}
# src/${PROJECT_NAME}/ros_rtsp.cpp
# )
add_library(image_to_rtsp_nodelet src/image2rtsp.cpp src/video.cpp)
## Add cmake target dependencies of the library
## as an example, code may need to be generated before libraries
@ -145,9 +145,6 @@ include_directories(
## Declare a C++ executable
## With catkin_make all packages are built within a single CMake context
## The recommended prefix ensures that target names across packages don't collide
add_executable(camera_publisher src/camera_publisher.cpp)
add_executable(camera_subscriber src/camera_subscriber.cpp)
add_executable(rtsp_server src/test-appsrc.cpp)
## Rename C++ executable without prefix
## The above recommended prefix causes long target names, the following renames the
@ -157,22 +154,11 @@ add_executable(rtsp_server src/test-appsrc.cpp)
## Add cmake target dependencies of the executable
## same as for the library above
add_dependencies(camera_publisher ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
add_dependencies(camera_subscriber ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
add_dependencies(rtsp_server ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
## Specify libraries to link a library or executable target against
target_link_libraries(camera_publisher
${catkin_LIBRARIES}
${OpenCV_LIBRARIES}
)
target_link_libraries(camera_subscriber
${catkin_LIBRARIES}
${OpenCV_LIBRARIES}
)
target_link_libraries(rtsp_server
${catkin_LIBRARIES}
${GST_LIBRARIES}
target_link_libraries(image_to_rtsp_nodelet
${catkin_LIBRARIES}
${GST_LIBRARIES}
)

26
include/image2rtsp.h Normal file
View File

@ -0,0 +1,26 @@
#ifndef IMAGE_TO_RTSP_H
#define IMAGE_TO_RTSP_H
namespace image2rtsp {
class Image2RTSPNodelet : public nodelet::Nodelet {
public:
GstRTSPServer *rtsp_server;
void onInit();
void url_connected(std::string url);
void url_disconnected(std::string url);
void print_info(char *s);
void print_error(char *s);
private:
ros::Subscriber sub;
GstAppSrc *appsrc;
int num;
GstCaps* gst_caps_new_from_image(const sensor_msgs::Image::ConstPtr &msg);
void imageCallback(const sensor_msgs::Image::ConstPtr& msg);
void video_mainloop_start();
void rtsp_server_add_url(const char *url, const char *sPipeline, GstElement **appsrc);
GstRTSPServer *rtsp_server_create();
};
}
#endif

21
launch/test_rtsp.launch Normal file
View File

@ -0,0 +1,21 @@
<launch>
<!-- Start the image publisher -->
<node name="usb_cam" pkg="usb_cam" type="usb_cam_node" output="screen" >
<param name="video_device" value="/dev/video0" />
<param name="image_width" value="640" />
<param name="image_height" value="480" />
<param name="pixel_format" value="yuyv" />
<param name="camera_frame_id" value="yuyv" />
<param name="io_method" value="mmap"/>
<param name="autofocus" value="true"/>
</node>
<!-- <node pkg="ros_rtsp" name="camera_publisher" type="camera_publisher" args="0"/> -->
<node pkg="nodelet" type="nodelet" name="standalone_nodelet" args="manager" output="screen"/>
<node pkg="nodelet" type="nodelet" name="Image2RTSPNodelet" args="load image2rtsp/Image2RTSPNodelet standalone_nodelet" output="screen">
<param name="topic_1" type="str" value="/usb_cam/image_raw" />
<param name="mountpoint_1" type="str" value="/front" />
<param name="pipeline_1" type="str" value="( appsrc name=imagesrc do-timestamp=true max-latency=50 max-bytes=1000 is-live=true ! videoconvert ! videoscale ! video/x-raw,width=800,height=640 ! videoconvert ! videorate ! video/x-raw,framerate=10/1 ! queue ! x264enc tune=zerolatency bitrate=1000 key-int-max=90 ! video/x-h264, profile=baseline ! rtph264pay name=pay0 pt=96 )" />
</node>
</launch>

7
nodelet_plugins.xml Normal file
View File

@ -0,0 +1,7 @@
<library path="lib/libimage_to_rtsp_nodelet">
<class name="image2rtsp/Image2RTSPNodelet" type="image2rtsp::Image2RTSPNodelet" base_class_type="nodelet::Nodelet">
<description>
Nodelet to transfert the /camera/image topic from openni2_camera over rtsp.
</description>
</class>
</library>

View File

@ -49,26 +49,20 @@
<!-- Use doc_depend for packages you need only for building documentation: -->
<!-- <doc_depend>doxygen</doc_depend> -->
<buildtool_depend>catkin</buildtool_depend>
<build_depend>cv_bridge</build_depend>
<build_depend>image_transport</build_depend>
<build_depend>roscpp</build_depend>
<build_depend>sensor_msgs</build_depend>
<build_depend>std_msgs</build_depend>
<build_export_depend>cv_bridge</build_export_depend>
<build_export_depend>image_transport</build_export_depend>
<build_depend>nodelet</build_depend>
<build_export_depend>roscpp</build_export_depend>
<build_export_depend>sensor_msgs</build_export_depend>
<build_export_depend>std_msgs</build_export_depend>
<exec_depend>cv_bridge</exec_depend>
<exec_depend>image_transport</exec_depend>
<exec_depend>roscpp</exec_depend>
<exec_depend>sensor_msgs</exec_depend>
<exec_depend>std_msgs</exec_depend>
<exec_depend>nodelet</exec_depend>
<!-- The export tag contains other, unspecified, tags -->
<export>
<!-- Other tools can request additional information be placed here -->
<nodelet plugin="${prefix}/nodelet_plugins.xml" />
</export>
</package>

View File

@ -1,42 +0,0 @@
#include <ros/ros.h>
#include <image_transport/image_transport.h>
#include <opencv2/highgui/highgui.hpp>
#include <cv_bridge/cv_bridge.h>
#include <sstream> // for converting the command line parameter to integer
int main(int argc, char** argv)
{
// Check if video source has been passed as a parameter
if(argv[1] == NULL) return 1;
ros::init(argc, argv, "image_publisher");
ros::NodeHandle nh;
image_transport::ImageTransport it(nh);
image_transport::Publisher pub = it.advertise("camera/image", 1);
// Convert the passed as command line parameter index for the video device to an integer
std::istringstream video_sourceCmd(argv[1]);
int video_source;
// Check if it is indeed a number
if(!(video_sourceCmd >> video_source)) return 1;
cv::VideoCapture cap(video_source);
// Check if video device can be opened with the given index
if(!cap.isOpened()) return 1;
cv::Mat frame;
sensor_msgs::ImagePtr msg;
ros::Rate loop_rate(30);
while (nh.ok()) {
cap >> frame;
// Check if grabbed frame is actually full with some content
if(!frame.empty()) {
msg = cv_bridge::CvImage(std_msgs::Header(), "bgr8", frame).toImageMsg();
pub.publish(msg);
cv::waitKey(1);
}
ros::spinOnce();
loop_rate.sleep();
}
}

View File

@ -1,29 +0,0 @@
#include <ros/ros.h>
#include <image_transport/image_transport.h>
#include <opencv2/highgui/highgui.hpp>
#include <cv_bridge/cv_bridge.h>
void imageCallback(const sensor_msgs::ImageConstPtr& msg)
{
try
{
cv::imshow("view", cv_bridge::toCvShare(msg, "bgr8")->image);
cv::waitKey(30);
}
catch (cv_bridge::Exception& e)
{
ROS_ERROR("Could not convert from '%s' to 'bgr8'.", msg->encoding.c_str());
}
}
int main(int argc, char **argv)
{
ros::init(argc, argv, "image_listener");
ros::NodeHandle nh;
cv::namedWindow("view");
cv::startWindowThread();
image_transport::ImageTransport it(nh);
image_transport::Subscriber sub = it.subscribe("camera/image", 1, imageCallback);
ros::spin();
cv::destroyWindow("view");
}

182
src/image2rtsp.cpp Normal file
View File

@ -0,0 +1,182 @@
#include <string>
#include <stdio.h>
#include <nodelet/nodelet.h>
#include <pluginlib/class_list_macros.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/rtsp-server/rtsp-server.h>
#include <ros/ros.h>
#include "sensor_msgs/Image.h"
#include <sensor_msgs/image_encodings.h>
#include <image2rtsp.h>
using namespace std;
using namespace image2rtsp;
void Image2RTSPNodelet::onInit() {
string mountpoint_1;
string pipeline_1;
NODELET_DEBUG("Initializing image2rtsp nodelet...");
if (getenv((char*)"GST_DEBUG") == NULL) {
// set GST_DEBUG to warning if unset
putenv((char*)"GST_DEBUG=*:2");
}
num = 0;
appsrc = NULL;
ros::NodeHandle& node = getPrivateNodeHandle();
video_mainloop_start();
rtsp_server = rtsp_server_create();
node.getParam("mountpoint_1", mountpoint_1);
node.getParam("pipeline_1", pipeline_1);
rtsp_server_add_url(mountpoint_1.c_str(), pipeline_1.c_str(), (GstElement **)&appsrc);
}
/* Borrowed from https://github.com/ProjectArtemis/gst_video_server/blob/master/src/server_nodelet.cpp */
GstCaps* Image2RTSPNodelet::gst_caps_new_from_image(const sensor_msgs::Image::ConstPtr &msg)
{
// http://gstreamer.freedesktop.org/data/doc/gstreamer/head/pwg/html/section-types-definitions.html
static const ros::M_string known_formats = {{
{sensor_msgs::image_encodings::RGB8, "RGB"},
{sensor_msgs::image_encodings::RGB16, "RGB16"},
{sensor_msgs::image_encodings::RGBA8, "RGBA"},
{sensor_msgs::image_encodings::RGBA16, "RGBA16"},
{sensor_msgs::image_encodings::BGR8, "BGR"},
{sensor_msgs::image_encodings::BGR16, "BGR16"},
{sensor_msgs::image_encodings::BGRA8, "BGRA"},
{sensor_msgs::image_encodings::BGRA16, "BGRA16"},
{sensor_msgs::image_encodings::MONO8, "GRAY8"},
{sensor_msgs::image_encodings::MONO16, "GRAY16_LE"},
}};
if (msg->is_bigendian) {
ROS_ERROR("GST: big endian image format is not supported");
return nullptr;
}
auto format = known_formats.find(msg->encoding);
if (format == known_formats.end()) {
ROS_ERROR("GST: image format '%s' unknown", msg->encoding.c_str());
return nullptr;
}
return gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, format->second.c_str(),
"width", G_TYPE_INT, msg->width,
"height", G_TYPE_INT, msg->height,
"framerate", GST_TYPE_FRACTION, 10, 1, // 0/1 = dynamic
nullptr);
}
void Image2RTSPNodelet::imageCallback(const sensor_msgs::Image::ConstPtr& msg) {
GstBuffer *buf;
void *imgdata;
GstMapInfo map;
static GstClockTime timestamp=0;
GstCaps *caps;
char *gst_type, *gst_format=(char *)"";
g_print("Image encoding: %s\n", msg->encoding.c_str());
if (appsrc != NULL) {
// Set caps from message
caps = gst_caps_new_from_image(msg);
gst_app_src_set_caps(appsrc, caps);
buf = gst_buffer_new_allocate(nullptr, msg->data.size(), nullptr);
gst_buffer_fill(buf, 0, msg->data.data(), msg->data.size());
GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_LIVE);
// gst_buffer_map(buf, &map, GST_MAP_READ);
// imgdata = map.data;
// GST_BUFFER_PTS(buf) = timestamp;
// GST_BUFFER_DURATION(buf) = gst_util_uint64_scale_int(1, GST_SECOND, 15);
// timestamp += GST_BUFFER_DURATION(buf);
// memcpy(imgdata, &msg->data[0], msg->step*msg->height);
// gst_buffer_unmap(buf, &map);
gst_app_src_push_buffer(appsrc, buf);
}
// /* Create the sample from the ros msg */
// GstFlowReturn ret;
// // unfortunately we may not move image data because it is immutable. copying.
// auto buffer = gst_buffer_new_allocate(nullptr, msg->data.size(), nullptr);
// g_assert(buffer);
// /* NOTE(vooon):
// * I found that best is to use `do-timestamp=true` parameter
// * and forgot about stamping stamp problem (PTS).
// */
// //auto ts = gst_time_from_stamp(msg->header.stamp);
// //NODELET_INFO("TS: %lld, %lld", ts, gst_util_uint64_scale_int(1, GST_SECOND, 40));
// gst_buffer_fill(buffer, 0, msg->data.data(), msg->data.size());
// GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_FLAG_LIVE);
// //GST_BUFFER_PTS(buffer) = ts;
// auto caps = gst_caps_new_from_image(msg);
// g_print("Got caps from msg\n");
// if (caps == nullptr) {
// gst_object_unref(GST_OBJECT(buffer));
// return;
// }
// auto sample = gst_sample_new(buffer, caps, nullptr, nullptr);
// g_print("Assigned a sample\n");
// gst_buffer_unref(buffer);
// gst_caps_unref(caps);
// /* Get the pipeline to push to */
// ret = gst_app_src_push_sample (GST_APP_SRC (appsrc), sample);
// gst_object_unref (sample);
}
void Image2RTSPNodelet::url_connected(string url) {
string topic;
NODELET_INFO("Client connected: %s", url.c_str());
if (url == "/front") {
if (num == 0) {
ros::NodeHandle& node = getPrivateNodeHandle();
node.getParam("topic_1", topic);
sub = node.subscribe(topic, 10, &Image2RTSPNodelet::imageCallback, this);
}
num++;
}
}
void Image2RTSPNodelet::url_disconnected(string url) {
NODELET_INFO("Client disconnected: %s", url.c_str());
if (url == "/front") {
if (num > 0) num--;
if (num == 0) {
sub.shutdown();
appsrc = NULL;
}
}
}
void Image2RTSPNodelet::print_info(char *s) {
NODELET_INFO(s);
}
void Image2RTSPNodelet::print_error(char *s) {
NODELET_ERROR(s);
}
PLUGINLIB_EXPORT_CLASS(image2rtsp::Image2RTSPNodelet, nodelet::Nodelet)

View File

@ -1,268 +0,0 @@
/* GStreamer
* Copyright (C) 2008 Wim Taymans <wim.taymans at gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include <string>
#include <thread>
#include "test-appsrc.h"
GstElement *appsrc;
GstBuffer *buffer;
static gboolean
timeout(GstRTSPServer *server)
{
GstRTSPSessionPool *pool;
pool = gst_rtsp_server_get_session_pool(server);
gst_rtsp_session_pool_cleanup(pool);
g_object_unref(pool);
return TRUE;
}
/* called when we need to give data to appsrc */
static void
need_data (GstElement * appsrc, guint unused)
{
g_print("NEED DATA. Source is dry.");
}
/* called when a new media pipeline is constructed. We can query the
* pipeline and configure our appsrc */
static void
media_constructed(GstRTSPMediaFactory *factory, GstRTSPMedia *media,
gpointer user_data)
{
GstElement *element;
g_print ("YAY MEDIA BEING CONSTRUCTED!!!\n");
/* Setup for multicasting */
guint i, n_streams;
n_streams = gst_rtsp_media_n_streams(media);
for (i = 0; i < n_streams; i++)
{
GstRTSPAddressPool *pool;
GstRTSPStream *stream;
gchar *min, *max;
stream = gst_rtsp_media_get_stream(media, i);
/* make a new address pool */
pool = gst_rtsp_address_pool_new();
min = g_strdup_printf("224.3.0.%d", (2 * i) + 1);
max = g_strdup_printf("224.3.0.%d", (2 * i) + 2);
gst_rtsp_address_pool_add_range(pool, min, max,
5000 + (10 * i), 5010 + (10 * i), 1);
g_free(min);
g_free(max);
gst_rtsp_stream_set_address_pool(stream, pool);
g_object_unref(pool);
}
/* get the element used for providing the streams of the media */
element = gst_rtsp_media_get_element (media);
/* get our appsrc, we named it 'mysrc' with the name property */
appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), "mysrc");
/* this instructs appsrc that we will be dealing with timed buffer */
gst_util_set_object_arg (G_OBJECT (appsrc), "format", "time");
g_signal_connect (appsrc, "need-data", (GCallback) need_data, NULL);
}
/* Borrowed from https://github.com/ProjectArtemis/gst_video_server/blob/master/src/server_nodelet.cpp */
namespace enc = sensor_msgs::image_encodings;
GstCaps* gst_caps_new_from_image(const sensor_msgs::Image::ConstPtr &msg)
{
// http://gstreamer.freedesktop.org/data/doc/gstreamer/head/pwg/html/section-types-definitions.html
static const ros::M_string known_formats = {{
{enc::RGB8, "RGB"},
{enc::RGB16, "RGB16"},
{enc::RGBA8, "RGBA"},
{enc::RGBA16, "RGBA16"},
{enc::BGR8, "BGR"},
{enc::BGR16, "BGR16"},
{enc::BGRA8, "BGRA"},
{enc::BGRA16, "BGRA16"},
{enc::MONO8, "GRAY8"},
{enc::MONO16, "GRAY16_LE"},
}};
if (msg->is_bigendian) {
ROS_ERROR("GST: big endian image format is not supported");
return nullptr;
}
auto format = known_formats.find(msg->encoding);
g_print("Format found = %s\n", format);
if (format == known_formats.end()) {
ROS_ERROR("GST: image format '%s' unknown", msg->encoding.c_str());
return nullptr;
}
return gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, format->second.c_str(),
"width", G_TYPE_INT, msg->width,
"height", G_TYPE_INT, msg->height,
"framerate", GST_TYPE_FRACTION, 0, 1, // 0/1 = dynamic
nullptr);
}
/* Borrowed from https://github.com/ProjectArtemis/gst_video_server/blob/master/src/server_nodelet.cpp */
GstSample* gst_sample_new_from_image(const sensor_msgs::Image::ConstPtr &msg)
{
g_print("Image details: %s\n", msg->encoding.c_str());
// unfortunately we may not move image data because it is immutable. copying.
auto buffer = gst_buffer_new_allocate(nullptr, msg->data.size(), nullptr);
g_assert(buffer);
/* NOTE(vooon):
* I found that best is to use `do-timestamp=true` parameter
* and forgot about stamping stamp problem (PTS).
*/
//auto ts = gst_time_from_stamp(msg->header.stamp);
//NODELET_INFO("TS: %lld, %lld", ts, gst_util_uint64_scale_int(1, GST_SECOND, 40));
gst_buffer_fill(buffer, 0, msg->data.data(), msg->data.size());
GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_FLAG_LIVE);
//GST_BUFFER_PTS(buffer) = ts;
auto caps = gst_caps_new_from_image(msg);
g_print("Got caps from msg\n");
if (caps == nullptr) {
gst_object_unref(GST_OBJECT(buffer));
return nullptr;
}
auto sample = gst_sample_new(buffer, caps, nullptr, nullptr);
g_print("Assigned a sample\n");
gst_buffer_unref(buffer);
gst_caps_unref(caps);
return sample;
}
void imageCallback(const sensor_msgs::ImageConstPtr& msg)
{
GstSample *sample;
GstFlowReturn ret;
g_print("Got new ROS image callback\n");
/* Create the sample from the ros msg */
sample = gst_sample_new_from_image(msg);
/* Get the pipeline to push to */
ret = gst_app_src_push_sample (GST_APP_SRC (appsrc), sample);
gst_object_unref (sample);
}
int main(int argc, char *argv[])
{
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
/* create a server instance */
server = gst_rtsp_server_new();
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points(server);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new();
gst_rtsp_media_factory_set_launch(factory, "( "
"appsrc name=mysrc ! videoconvert ! videoscale ! video/x-raw,framerate=15/1,width=1280,height=720 ! videoconvert ! "
"x264enc tune=zerolatency bitrate=1000 key-int-max=90 ! "
"rtph264pay name=pay0 pt=96"
" )");
gst_rtsp_media_factory_set_shared(factory, TRUE);
/* notify when our media is ready, This is called whenever someone asks for
* the media and a new pipeline with our appsrc is created */
g_signal_connect(factory, "media-constructed", (GCallback)media_constructed,
NULL);
/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory(mounts, "/test", factory);
/* don't need the ref to the mounts anymore */
g_object_unref(mounts);
/* attach the server to the default maincontext */
if (gst_rtsp_server_attach(server, NULL) == 0)
{
g_print("failed to attach the server\n");
return -1;
}
g_timeout_add_seconds(2, (GSourceFunc)timeout, server);
/* start serving */
g_print("stream ready at rtsp://127.0.0.1:8554/test\n");
/* Initialise the ros subscriber node */
ros::init(argc, argv, "image_listener");
ros::NodeHandle nh;
image_transport::ImageTransport it(nh);
image_transport::Subscriber sub = it.subscribe("camera/image", 1, imageCallback);
/* Spin up ROS, but use the non-blocking spinner so
* our g_main_loop can still listen for RTSP connections */
ros::AsyncSpinner spinner(1);
spinner.start();
g_main_loop_run(loop);
spinner.stop();
return 0;
}

View File

@ -1,24 +0,0 @@
#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>
#include <gst/app/gstappsrc.h>
#include <ros/ros.h>
#include <image_transport/image_transport.h>
#include <sensor_msgs/image_encodings.h>
static gboolean
timeout(GstRTSPServer *server);
static void
media_constructed(GstRTSPMediaFactory *factory, GstRTSPMedia *media,
gpointer user_data);
GstCaps*
gst_caps_new_from_image(const sensor_msgs::Image::ConstPtr &msg);
GstSample*
gst_sample_new_from_image(const sensor_msgs::Image::ConstPtr &msg);
int
ros_main(int argc, char *argv[]);

View File

@ -1,204 +0,0 @@
/* GStreamer
* Copyright (C) 2008 Wim Taymans <wim.taymans at gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include <ros/ros.h>
#include <image_transport/image_transport.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/rtsp-server/rtsp-server.h>
static gboolean
timeout(GstRTSPServer *server)
{
GstRTSPSessionPool *pool;
pool = gst_rtsp_server_get_session_pool(server);
gst_rtsp_session_pool_cleanup(pool);
g_object_unref(pool);
return TRUE;
}
static void
media_constructed(GstRTSPMediaFactory *factory, GstRTSPMedia *media)
{
guint i, n_streams;
n_streams = gst_rtsp_media_n_streams(media);
for (i = 0; i < n_streams; i++)
{
GstRTSPAddressPool *pool;
GstRTSPStream *stream;
gchar *min, *max;
stream = gst_rtsp_media_get_stream(media, i);
/* make a new address pool */
pool = gst_rtsp_address_pool_new();
min = g_strdup_printf("224.3.0.%d", (2 * i) + 1);
max = g_strdup_printf("224.3.0.%d", (2 * i) + 2);
gst_rtsp_address_pool_add_range(pool, min, max,
5000 + (10 * i), 5010 + (10 * i), 1);
g_free(min);
g_free(max);
gst_rtsp_stream_set_address_pool(stream, pool);
g_object_unref(pool);
}
}
/* called when there is a new buffer ready for
* processing */
static void
wrapper_imageCallback(void* pt2Object, const sensor_msgs::ImageConstPtr& msg)
{
myImages* mySelf = (myImages*) pt2Object; // explicitly cast to a pointer to class myImages
mySelf->imageCallback(msg ); // call member
}
static GstFlowReturn
imageCallback(const sensor_msgs::ImageConstPtr& msg)
{
GstSample *sample;
GstFlowReturn ret;
GstCaps *caps;
char *gst_type, *gst_format = (char *)"";
GstBuffer *buf;
void *imgdata;
GstMapInfo map;
static GstClockTime timestamp=0;
appsrc_front = gst_bin_get_by_name(GST_BIN(data->sink), "appsrc_front");
if (appsrc_front != NULL)
{
// Set caps from message
get_format(msg->encoding.c_str(), &gst_type, &gst_format);
caps = gst_caps_new_simple (gst_type,
"format", G_TYPE_STRING, gst_format,
"width", G_TYPE_INT, msg->width,
"height", G_TYPE_INT, msg->height,
NULL);
gst_app_src_set_caps(appsrc_front, caps);
buf = gst_buffer_new_and_alloc(msg->step*msg->height);
gst_buffer_map(buf, &map, GST_MAP_READ);
imgdata = map.data;
GST_BUFFER_PTS(buf) = timestamp;
GST_BUFFER_DURATION(buf) = gst_util_uint64_scale_int(1, GST_SECOND, 15);
timestamp += GST_BUFFER_DURATION(buf);
memcpy(imgdata, &msg->data[0], msg->step*msg->height);
gst_buffer_unmap(buf, &map);
gst_app_src_push_buffer(appsrc_rgb, buf);
}
/* get the sample from appsink */
sample = msg; //Get from ROS
/* get source an push new sample */
source = gst_bin_get_by_name(GST_BIN(source), "appsrc_front");
ret = gst_app_src_push_sample(GST_APP_SRC(source), sample);
gst_object_unref(source);
return ret;
}
int main(int argc, char **argv)
{
std::thread rosthr(ros_main)
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory_front, *factory_back;
gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
/* create a server instance */
server = gst_rtsp_server_new();
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points(server);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory_front = gst_rtsp_media_factory_new();
gst_rtsp_media_factory_set_launch(factory_front, "( "
"appsrc name=appsrc_front ! videoconvert ! videoscale ! video/x-raw,framerate=15/1,width=1280,height=720 ! "
"x264enc tune=zerolatency bitrate=1000 key-int-max=90 ! "
"rtph264pay name=pay0 pt=96 "
")");
gst_rtsp_media_factory_set_shared(factory_front, TRUE);
g_signal_connect(factory_front, "media-constructed", (GCallback)media_constructed, NULL);
/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory(mounts, "/front", factory_front);
/* don't need the ref to the mapper anymore */
g_object_unref(mounts);
/* attach the server to the default maincontext */
if (gst_rtsp_server_attach(server, NULL) == 0)
{
g_print("failed to attach the server\n");
return -1;
}
g_timeout_add_seconds(2, (GSourceFunc)timeout, server);
/* start serving */
g_print("stream ready at rtsp://127.0.0.1:8554/front\n");
// Use the ros::spin main loop instead.
// g_main_loop_run(loop);
ros::init(argc, argv, "image_listener");
ros::NodeHandle nh;
image_transport::ImageTransport it(nh);
image_transport::Subscriber sub = it.subscribe("camera/image", 1, imageCallback);
ros::spin();
return 0;
}

134
src/video.cpp Normal file
View File

@ -0,0 +1,134 @@
#include <stdint.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <string>
#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>
#include <gst/app/gstappsrc.h>
#include <ros/ros.h>
#include <nodelet/nodelet.h>
#include "sensor_msgs/Image.h"
#include <image2rtsp.h>
using namespace std;
using namespace image2rtsp;
static void *mainloop(void *arg) {
GMainLoop *loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(loop);
g_main_destroy(loop);
return NULL;
}
void Image2RTSPNodelet::video_mainloop_start() {
pthread_t tloop;
gst_init(NULL, NULL);
pthread_create(&tloop, NULL, &mainloop, NULL);
}
static void client_options(GstRTSPClient *client, GstRTSPContext *state, Image2RTSPNodelet *nodelet) {
if (state->uri) {
nodelet->url_connected(state->uri->abspath);
}
}
static void client_teardown(GstRTSPClient *client, GstRTSPContext *state, Image2RTSPNodelet *nodelet) {
if (state->uri) {
nodelet->url_disconnected(state->uri->abspath);
}
}
static void new_client(GstRTSPServer *server, GstRTSPClient *client, Image2RTSPNodelet *nodelet) {
nodelet->print_info((char *)"New RTSP client");
g_signal_connect(client, "options-request", G_CALLBACK(client_options), nodelet);
g_signal_connect(client, "teardown-request", G_CALLBACK(client_teardown), nodelet);
}
/* this function is periodically run to clean up the expired sessions from the pool. */
static gboolean session_cleanup(Image2RTSPNodelet *nodelet, gboolean ignored)
{
GstRTSPServer *server = nodelet->rtsp_server;
GstRTSPSessionPool *pool;
int num;
pool = gst_rtsp_server_get_session_pool(server);
num = gst_rtsp_session_pool_cleanup(pool);
g_object_unref(pool);
if (num > 0) {
char s[32];
snprintf(s, 32, (char *)"Sessions cleaned: %d", num);
nodelet->print_info(s);
}
return TRUE;
}
GstRTSPServer *Image2RTSPNodelet::rtsp_server_create() {
GstRTSPServer *server;
/* create a server instance */
server = gst_rtsp_server_new();
/* attach the server to the default maincontext */
gst_rtsp_server_attach(server, NULL);
g_signal_connect(server, "client-connected", G_CALLBACK(new_client), this);
/* add a timeout for the session cleanup */
g_timeout_add_seconds(2, (GSourceFunc)session_cleanup, this);
return server;
}
/* called when a new media pipeline is constructed. We can query the
* pipeline and configure our appsrc */
static void media_configure(GstRTSPMediaFactory *factory, GstRTSPMedia *media, GstElement **appsrc)
{
GstElement *pipeline = gst_rtsp_media_get_element(media);
*appsrc = gst_bin_get_by_name(GST_BIN(pipeline), "imagesrc");
/* this instructs appsrc that we will be dealing with timed buffer */
gst_util_set_object_arg(G_OBJECT(*appsrc), "format", "time");
gst_object_unref(pipeline);
}
void Image2RTSPNodelet::rtsp_server_add_url(const char *url, const char *sPipeline, GstElement **appsrc) {
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points(rtsp_server);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new();
gst_rtsp_media_factory_set_launch(factory, sPipeline);
/* notify when our media is ready, This is called whenever someone asks for
* the media and a new pipeline is created */
g_signal_connect(factory, "media-configure", (GCallback)media_configure, appsrc);
gst_rtsp_media_factory_set_shared(factory, TRUE);
/* attach the factory to the url */
gst_rtsp_mount_points_add_factory(mounts, url, factory);
/* don't need the ref to the mounts anymore */
g_object_unref(mounts);
}