They work perfect if I see them separatly, but when I want to watch both at the same time rviz or rqt crashes.
Does these RTP/etc let you see the image as a topic ? because all I want is to have the topic to use image_pipeline but in the pc instead of the jetson.
By the way, I had modified your code for video_source for to stream both cameras in the same node and apparently it works good, not perfect, but sufficiently fluid for being good
this is how the code end, if you are interested in reading it just let me now any bug that you can find:
#include "ros_compat.h"
#include "image_converter.h"
#include <jetson-utils/videoSource.h>
// globals
videoSource* stream1 = NULL;
videoSource* stream2 = NULL;
imageConverter* image_cvt1 = NULL;
imageConverter* image_cvt2 = NULL;
Publisher<sensor_msgs::Image> image_pub1 = NULL;
Publisher<sensor_msgs::Image> image_pub2 = NULL;
// aquire and publish camera frame
bool aquireFrame()
{
imageConverter::PixelType* nextFrame1 = NULL;
imageConverter::PixelType* nextFrame2 = NULL;
// get the latest frame
if( !stream1->Capture(&nextFrame1, 1000) )
{
ROS_ERROR("failed to capture next frame1");
return false;
}
if( !stream2->Capture(&nextFrame2, 1000) )
{
ROS_ERROR("failed to capture next frame2");
return false;
}
// assure correct image size
if( !image_cvt1->Resize(stream1->GetWidth(), stream1->GetHeight(), imageConverter::ROSOutputFormat) )
{
ROS_ERROR("failed to resize camera image converter 1");
return false;
}
if( !image_cvt2->Resize(stream2->GetWidth(), stream2->GetHeight(), imageConverter::ROSOutputFormat) )
{
ROS_ERROR("failed to resize camera image converter 2");
return false;
}
// populate the message
sensor_msgs::Image msg1;
sensor_msgs::Image msg2;
if( !image_cvt1->Convert(msg1, imageConverter::ROSOutputFormat, nextFrame1) )
{
ROS_ERROR("failed to convert video stream1 frame to sensor_msgs::Image");
return false;
}
if( !image_cvt2->Convert(msg2, imageConverter::ROSOutputFormat, nextFrame2) )
{
ROS_ERROR("failed to convert video stream2 frame to sensor_msgs::Image");
return false;
}
// populate timestamp in header field
msg1.header.stamp = ROS_TIME_NOW();
msg1.header.frame_id = "camera_frame";
msg2.header.stamp = msg1.header.stamp;
msg2.header.frame_id = msg1.header.frame_id;
// publish the message
image_pub1->publish(msg1);
image_pub2->publish(msg2);
ROS_DEBUG("published %ux%u video frame", stream1->GetWidth(), stream1->GetHeight());
return true;
}
// node main loop
int main(int argc, char **argv)
{
/*
* create node instance
*/
ROS_CREATE_NODE("video_source");
/*
* declare parameters
*/
videoOptions video_options;
std::string resource_left_str;
std::string resource_right_str;
std::string codec_str;
std::string flip_str;
std::string frame_id;
std::string topic;
int video_width = video_options.width;
int video_height = video_options.height;
int latency = video_options.latency;
ROS_DECLARE_PARAMETER("resource_left", resource_left_str);
ROS_DECLARE_PARAMETER("resource_right", resource_right_str);
ROS_DECLARE_PARAMETER("codec", codec_str);
ROS_DECLARE_PARAMETER("width", video_width);
ROS_DECLARE_PARAMETER("height", video_height);
ROS_DECLARE_PARAMETER("framerate", video_options.frameRate);
ROS_DECLARE_PARAMETER("loop", video_options.loop);
ROS_DECLARE_PARAMETER("flip", flip_str);
ROS_DECLARE_PARAMETER("latency", latency);
ROS_DECLARE_PARAMETER("frame_id", frame_id);
ROS_DECLARE_PARAMETER("topic", topic);
/*
* retrieve parameters
*/
ROS_GET_PARAMETER("resource_left", resource_left_str);
ROS_GET_PARAMETER("resource_right", resource_right_str);
ROS_GET_PARAMETER("codec", codec_str);
ROS_GET_PARAMETER("width", video_width);
ROS_GET_PARAMETER("height", video_height);
ROS_GET_PARAMETER("framerate", video_options.frameRate);
ROS_GET_PARAMETER("loop", video_options.loop);
ROS_GET_PARAMETER("flip", flip_str);
ROS_GET_PARAMETER("latency", latency);
ROS_GET_PARAMETER("frame_id", frame_id);
ROS_GET_PARAMETER("topic", topic);
if( resource_left_str.size() == 0 )
{
ROS_ERROR("left resource param wasn't set - please set the node's resource parameter to the input device/filename/URL");
return 0;
}
if( resource_right_str.size() == 0 )
{
ROS_ERROR("right resource param wasn't set - please set the node's resource parameter to the input device/filename/URL");
return 0;
}
if( codec_str.size() != 0 )
video_options.codec = videoOptions::CodecFromStr(codec_str.c_str());
if( flip_str.size() != 0 )
video_options.flipMethod = videoOptions::FlipMethodFromStr(flip_str.c_str());
video_options.width = video_width;
video_options.height = video_height;
video_options.latency = latency;
ROS_INFO("opening video source: %s", resource_left_str.c_str());
ROS_INFO("opening video source: %s", resource_right_str.c_str());
/*
* open video source
*/
stream1 = videoSource::Create(resource_left_str.c_str(), video_options);
stream2 = videoSource::Create(resource_right_str.c_str(), video_options);
if( !stream1 )
{
ROS_ERROR("failed to open video source 1");
return 0;
}
if( !stream2 )
{
ROS_ERROR("failed to open video source 2");
return 0;
}
/*
* create image converter
*/
image_cvt1 = new imageConverter();
image_cvt2 = new imageConverter();
if( !image_cvt1 )
{
ROS_ERROR("failed to create imageConverter 1");
return 0;
}
if( !image_cvt2 )
{
ROS_ERROR("failed to create imageConverter 2");
return 0;
}
/*
* advertise publisher topics
*/
ROS_CREATE_PUBLISHER(sensor_msgs::Image, "/left/image_raw", 2, image_pub1);
ROS_CREATE_PUBLISHER(sensor_msgs::Image, "/right/image_raw", 2, image_pub2);
/*
* start the camera stream1ing
*/
if( !stream1->Open() )
{
ROS_ERROR("failed to start stream1ing video source");
return 0;
}
if( !stream2->Open() )
{
ROS_ERROR("failed to start stream2ing video source");
return 0;
}
/*
* start publishing video frames
*/
while( ROS_OK() )
{
if( !aquireFrame() )
{
if( !stream1->IsStreaming() )
{
ROS_INFO("stream 1 is closed or reached EOS, exiting node...");
break;
}
if( !stream2->IsStreaming() )
{
ROS_INFO("stream 2 is closed or reached EOS, exiting node...");
break;
}
}
if( ROS_OK() )
ROS_SPIN_ONCE();
}
/*
* free resources
*/
delete stream1;
delete stream2;
delete image_cvt1;
delete image_cvt2;
return 0;
}