|
@@ -36,6 +36,9 @@
|
|
|
|
|
|
#include <chrono>
|
|
|
|
|
|
+#include <sensor_msgs/point_cloud2_iterator.hpp>
|
|
|
+#include <pcl_conversions/pcl_conversions.h>
|
|
|
+
|
|
|
//#include "modulecomm.h"
|
|
|
using namespace std;
|
|
|
|
|
@@ -44,9 +47,239 @@ using namespace std;
|
|
|
adc_perception_lidar_cnn::adc_perception_lidar_cnn() : Node("adc_perception_lidar_cnn")
|
|
|
{
|
|
|
|
|
|
+ using std::placeholders::_1;
|
|
|
+
|
|
|
+ std::string protofile = "/home/nvidia" ;//
|
|
|
+ protofile += "/models/lidar/model.prototxt";
|
|
|
+ std::string weightfile = "/home/nvidia";
|
|
|
+ weightfile += "/models/lidar/model.caffemodel";
|
|
|
+
|
|
|
+ double range,width,height;
|
|
|
+ range = 60.0;
|
|
|
+ width = 1024;
|
|
|
+ height = 1024;
|
|
|
+
|
|
|
+ mcnn.init(protofile,weightfile,range,0.6,width,height,false,true,0);
|
|
|
+
|
|
|
+ pointcloud_sub_ = this->create_subscription<sensor_msgs::msg::PointCloud2>(
|
|
|
+ "input/pointcloud", rclcpp::SensorDataQoS().keep_last(1),
|
|
|
+ std::bind(&adc_perception_lidar_cnn::pointCloudCallback, this, _1));
|
|
|
+ dynamic_objects_pub_ =
|
|
|
+ this->create_publisher<tier4_perception_msgs::msg::DetectedObjectsWithFeature>(
|
|
|
+ "output/labeled_clusters", rclcpp::QoS{1});
|
|
|
+
|
|
|
+
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+struct Quaternion {
|
|
|
+ double w, x, y, z;
|
|
|
+};
|
|
|
+
|
|
|
+struct EulerAngles {
|
|
|
+ double roll, pitch, yaw;
|
|
|
+};
|
|
|
+
|
|
|
+EulerAngles ToEulerAngles(Quaternion q) {
|
|
|
+ EulerAngles angles;
|
|
|
+
|
|
|
+ // roll (x-axis rotation)
|
|
|
+ double sinr_cosp = 2 * (q.w * q.x + q.y * q.z);
|
|
|
+ double cosr_cosp = 1 - 2 * (q.x * q.x + q.y * q.y);
|
|
|
+ angles.roll = std::atan2(sinr_cosp, cosr_cosp);
|
|
|
+
|
|
|
+ // pitch (y-axis rotation)
|
|
|
+ double sinp = 2 * (q.w * q.y - q.z * q.x);
|
|
|
+ if (std::abs(sinp) >= 1)
|
|
|
+ angles.pitch = std::copysign(M_PI / 2, sinp); // use 90 degrees if out of range
|
|
|
+ else
|
|
|
+ angles.pitch = std::asin(sinp);
|
|
|
+
|
|
|
+ // yaw (z-axis rotation)
|
|
|
+ double siny_cosp = 2 * (q.w * q.z + q.x * q.y);
|
|
|
+ double cosy_cosp = 1 - 2 * (q.y * q.y + q.z * q.z);
|
|
|
+ angles.yaw = std::atan2(siny_cosp, cosy_cosp);
|
|
|
+
|
|
|
+ return angles;
|
|
|
+}
|
|
|
+
|
|
|
+double steear_now = 0;
|
|
|
+
|
|
|
+Quaternion ToQuaternion(double yaw, double pitch, double roll) // yaw (Z), pitch (Y), roll (X)
|
|
|
+{
|
|
|
+ // Abbreviations for the various angular functions
|
|
|
+ double cy = cos(yaw * 0.5);
|
|
|
+ double sy = sin(yaw * 0.5);
|
|
|
+ double cp = cos(pitch * 0.5);
|
|
|
+ double sp = sin(pitch * 0.5);
|
|
|
+ double cr = cos(roll * 0.5);
|
|
|
+ double sr = sin(roll * 0.5);
|
|
|
+
|
|
|
+ Quaternion q;
|
|
|
+ q.w = cy * cp * cr + sy * sp * sr;
|
|
|
+ q.x = cy * cp * sr - sy * sp * cr;
|
|
|
+ q.y = sy * cp * sr + cy * sp * cr;
|
|
|
+ q.z = sy * cp * cr - cy * sp * sr;
|
|
|
+
|
|
|
+ return q;
|
|
|
+}
|
|
|
+
|
|
|
+bool adc_perception_lidar_cnn::detectDynamicObjects(
|
|
|
+ const sensor_msgs::msg::PointCloud2 & input,
|
|
|
+ tier4_perception_msgs::msg::DetectedObjectsWithFeature & output)
|
|
|
+{
|
|
|
+ pcl::PointCloud<pcl::PointXYZI> pcl_input;
|
|
|
+ pcl::fromROSMsg(input, pcl_input);
|
|
|
+
|
|
|
+ pcl::PointCloud<pcl::PointXYZI>::Ptr pc_ptr(new pcl::PointCloud<pcl::PointXYZI>);
|
|
|
+ *pc_ptr = pcl_input;
|
|
|
|
|
|
+ pcl::PointIndices valid_idx;
|
|
|
+ auto &indices = valid_idx.indices;
|
|
|
+ indices.resize(pc_ptr->size());
|
|
|
+ std::iota(indices.begin(), indices.end(), 0);
|
|
|
+ std::vector<Obstacle> objvec;
|
|
|
+ mcnn.segment(pc_ptr, valid_idx, objvec);
|
|
|
|
|
|
+ int i;
|
|
|
|
|
|
+ tier4_perception_msgs::msg::DetectedObjectsWithFeature output_dynamic_object_msg;
|
|
|
+ for(i=0;i<objvec.size();i++)
|
|
|
+ {
|
|
|
|
|
|
+ Obstacle x;
|
|
|
+ x = objvec.at(i);
|
|
|
+ pcl::PointCloud<pcl::PointXYZI> in_cluster = *x.cloud_ptr;
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+ float min_x = std::numeric_limits<float>::max();
|
|
|
+ float max_x = -std::numeric_limits<float>::max();
|
|
|
+ float min_y = std::numeric_limits<float>::max();
|
|
|
+ float max_y = -std::numeric_limits<float>::max();
|
|
|
+ float min_z = std::numeric_limits<float>::max();
|
|
|
+ float max_z = -std::numeric_limits<float>::max();
|
|
|
+ float average_x = 0, average_y = 0, average_z = 0;
|
|
|
+ float length, width, height;
|
|
|
+
|
|
|
+ pcl::PointXYZ min_point;
|
|
|
+ pcl::PointXYZ max_point;
|
|
|
+ // pcl::PointXYZ average_point;
|
|
|
+ pcl::PointXYZ centroid;
|
|
|
+
|
|
|
+ centroid.x = 0;
|
|
|
+ centroid.y = 0;
|
|
|
+ centroid.z = 0;
|
|
|
+
|
|
|
+ for (auto pit = in_cluster.points.begin(); pit != in_cluster.points.end(); ++pit)
|
|
|
+ {
|
|
|
+ average_x += pit->x;
|
|
|
+ average_y += pit->y;
|
|
|
+ average_z += pit->z;
|
|
|
+ centroid.x += pit->x;
|
|
|
+ centroid.y += pit->y;
|
|
|
+ centroid.z += pit->z;
|
|
|
+
|
|
|
+ if (pit->x < min_x)
|
|
|
+ min_x = pit->x;
|
|
|
+ if (pit->y < min_y)
|
|
|
+ min_y = pit->y;
|
|
|
+ if (pit->z < min_z)
|
|
|
+ min_z = pit->z;
|
|
|
+ if (pit->x > max_x)
|
|
|
+ max_x = pit->x;
|
|
|
+ if (pit->y > max_y)
|
|
|
+ max_y = pit->y;
|
|
|
+ if (pit->z > max_z)
|
|
|
+ max_z = pit->z;
|
|
|
+ }
|
|
|
+
|
|
|
+ // min, max points
|
|
|
+ min_point.x = min_x;
|
|
|
+ min_point.y = min_y;
|
|
|
+ min_point.z = min_z;
|
|
|
+ max_point.x = max_x;
|
|
|
+ max_point.y = max_y;
|
|
|
+ max_point.z = max_z;
|
|
|
+
|
|
|
+ if (in_cluster.points.size() > 0)
|
|
|
+ {
|
|
|
+ centroid.x /= in_cluster.points.size();
|
|
|
+ centroid.y /= in_cluster.points.size();
|
|
|
+ centroid.z /= in_cluster.points.size();
|
|
|
+ }
|
|
|
+ length = max_point.x - min_point.x;
|
|
|
+ width = max_point.y - min_point.y;
|
|
|
+ height = max_point.z - min_point.z;
|
|
|
+
|
|
|
+ geometry_msgs::msg::PoseWithCovariance pose;
|
|
|
+ geometry_msgs::msg::TwistWithCovariance twist;
|
|
|
+
|
|
|
+ pose.pose.position.set__x(min_point.y + width / 2);
|
|
|
+ pose.pose.position.set__y((min_point.x + length / 2) *(-1.0));
|
|
|
+ pose.pose.position.set__z(min_point.z + height / 2);
|
|
|
+
|
|
|
+ double pitch,roll,yaw;
|
|
|
+ pitch = 0;
|
|
|
+ roll = 0;
|
|
|
+ yaw = 0 - M_PI/2.0;
|
|
|
+ Quaternion quat = ToQuaternion(yaw,pitch,roll);
|
|
|
+
|
|
|
+ pose.pose.orientation.set__w(quat.w);
|
|
|
+ pose.pose.orientation.set__x(quat.x);
|
|
|
+ pose.pose.orientation.set__y(quat.y);
|
|
|
+ pose.pose.orientation.set__z(quat.z);
|
|
|
+
|
|
|
+ twist.twist.linear.set__x(0);
|
|
|
+
|
|
|
+ tier4_perception_msgs::msg::DetectedObjectWithFeature feature_object;
|
|
|
+ autoware_perception_msgs::msg::ObjectClassification xclass;
|
|
|
+ xclass.set__label(autoware_perception_msgs::msg::ObjectClassification::UNKNOWN);
|
|
|
+ if(x.GetTypeString() == "pedestrian")
|
|
|
+ xclass.set__label(autoware_perception_msgs::msg::ObjectClassification::PEDESTRIAN);
|
|
|
+ if(x.GetTypeString() == "car")
|
|
|
+ xclass.set__label(autoware_perception_msgs::msg::ObjectClassification::CAR);
|
|
|
+ if(x.GetTypeString() == "bike")
|
|
|
+ xclass.set__label(autoware_perception_msgs::msg::ObjectClassification::BICYCLE);
|
|
|
+ xclass.set__probability(1.0);
|
|
|
+ feature_object.object.classification.push_back(xclass);
|
|
|
+ feature_object.object.kinematics.pose_with_covariance = pose;
|
|
|
+ feature_object.object.kinematics.twist_with_covariance = twist;
|
|
|
+ feature_object.object.kinematics.orientation_availability =
|
|
|
+ autoware_perception_msgs::msg::DetectedObjectKinematics::UNAVAILABLE;
|
|
|
+ feature_object.object.kinematics.has_twist = false;
|
|
|
+ // feature_object.object.shape = object.shape;
|
|
|
+
|
|
|
+ // shape
|
|
|
+
|
|
|
+ autoware_perception_msgs::msg::Shape shape;
|
|
|
+ shape.type = autoware_perception_msgs::msg::Shape::BOUNDING_BOX;
|
|
|
+ shape.dimensions.x = length;
|
|
|
+ shape.dimensions.y = width;
|
|
|
+ shape.dimensions.z = height;
|
|
|
+
|
|
|
+ feature_object.object.set__shape(shape);
|
|
|
+
|
|
|
+ // std::cout<<" set shape. "<<std::endl;
|
|
|
+
|
|
|
+ output_dynamic_object_msg.feature_objects.push_back(feature_object);
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ rclcpp::Time current_time = this->now();
|
|
|
+
|
|
|
+ output_dynamic_object_msg.header.frame_id = "base_link";
|
|
|
+ output_dynamic_object_msg.header.stamp = current_time;
|
|
|
+ dynamic_objects_pub_->publish(output_dynamic_object_msg);
|
|
|
+
|
|
|
+ return true;
|
|
|
}
|
|
|
|
|
|
+void adc_perception_lidar_cnn::pointCloudCallback(
|
|
|
+ const sensor_msgs::msg::PointCloud2::ConstSharedPtr msg)
|
|
|
+{
|
|
|
+
|
|
|
+ tier4_perception_msgs::msg::DetectedObjectsWithFeature output_msg;
|
|
|
+ detectDynamicObjects(*msg, output_msg);
|
|
|
+ dynamic_objects_pub_->publish(output_msg);
|
|
|
+}
|