-
-
-## Acknowledgements
-
-Expand
-
-* [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet)
-* [https://github.com/WongKinYiu/yolor](https://github.com/WongKinYiu/yolor)
-* [https://github.com/WongKinYiu/PyTorch_YOLOv4](https://github.com/WongKinYiu/PyTorch_YOLOv4)
-* [https://github.com/WongKinYiu/ScaledYOLOv4](https://github.com/WongKinYiu/ScaledYOLOv4)
-* [https://github.com/Megvii-BaseDetection/YOLOX](https://github.com/Megvii-BaseDetection/YOLOX)
-* [https://github.com/ultralytics/yolov3](https://github.com/ultralytics/yolov3)
-* [https://github.com/ultralytics/yolov5](https://github.com/ultralytics/yolov5)
-* [https://github.com/DingXiaoH/RepVGG](https://github.com/DingXiaoH/RepVGG)
-* [https://github.com/JUGGHM/OREPA_CVPR2022](https://github.com/JUGGHM/OREPA_CVPR2022)
-* [https://github.com/TexasInstruments/edgeai-yolov5/tree/yolo-pose](https://github.com/TexasInstruments/edgeai-yolov5/tree/yolo-pose)
-
-
diff --git a/air_moveit_config/scripts/yolov7/requirements.txt b/air_moveit_config/scripts/yolov7/requirements.txt
deleted file mode 100644
index f4d2182..0000000
--- a/air_moveit_config/scripts/yolov7/requirements.txt
+++ /dev/null
@@ -1,39 +0,0 @@
-# Usage: pip install -r requirements.txt
-
-# Base ----------------------------------------
-matplotlib>=3.2.2
-numpy>=1.18.5,<1.24.0
-opencv-python>=4.1.1
-Pillow>=7.1.2
-PyYAML>=5.3.1
-requests>=2.23.0
-scipy>=1.4.1
-torch>=1.7.0,!=1.12.0
-torchvision>=0.8.1,!=0.13.0
-tqdm>=4.41.0
-protobuf<4.21.3
-
-# Logging -------------------------------------
-tensorboard>=2.4.1
-# wandb
-
-# Plotting ------------------------------------
-pandas>=1.1.4
-seaborn>=0.11.0
-
-# Export --------------------------------------
-# coremltools>=4.1 # CoreML export
-# onnx>=1.9.0 # ONNX export
-# onnx-simplifier>=0.3.6 # ONNX simplifier
-# scikit-learn==0.19.2 # CoreML quantization
-# tensorflow>=2.4.1 # TFLite export
-# tensorflowjs>=3.9.0 # TF.js export
-# openvino-dev # OpenVINO export
-
-# Extras --------------------------------------
-ipython # interactive notebook
-psutil # system utilization
-thop # FLOPs computation
-# albumentations>=1.0.3
-# pycocotools>=2.0 # COCO mAP
-# roboflow
diff --git a/air_moveit_config/src/add_cylinder.cpp b/air_moveit_config/src/add_cylinder.cpp
new file mode 100644
index 0000000..25e748e
--- /dev/null
+++ b/air_moveit_config/src/add_cylinder.cpp
@@ -0,0 +1,105 @@
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+moveit::planning_interface::PlanningSceneInterface planning_scene_interface;
+
+ros::Subscriber sub;
+
+// Callback function for the subscriber
+void cylinderCallback(const geometry_msgs::Vector3::ConstPtr& dimensions)
+{
+ // Get the cylinder dimensions from the message
+ float radius = dimensions->x;
+ float height = dimensions->y;
+ float x = dimensions->z;
+
+ // Create the cylinder shape
+ shape_msgs::SolidPrimitive primitive;
+ primitive.type = primitive.CYLINDER;
+ primitive.dimensions.resize(2);
+ primitive.dimensions[0] = height;
+ primitive.dimensions[1] = radius;
+
+ // Specify the pose of the cylinder
+ geometry_msgs::Pose pose;
+ pose.position.x = x;
+ pose.position.y = 0.0;
+ pose.position.z = 0.5;
+
+ moveit_msgs::CollisionObject collision_object;
+ collision_object.header.frame_id = "base_link";
+ collision_object.id = "cylinder";
+
+ collision_object.primitives.push_back(primitive);
+ collision_object.primitive_poses.push_back(pose);
+ collision_object.operation = collision_object.ADD;
+
+ std::vector collision_objects;
+ collision_objects.push_back(collision_object);
+
+ // Add the cylinder to the planning scene
+ planning_scene_interface.addCollisionObjects(collision_objects);
+
+ // Unsubscribe after receiving the first message
+ sub.shutdown();
+}
+
+int x_offset, y_offset, z_offset;
+
+void callback(const sensor_msgs::PointCloud2ConstPtr& msg)
+{
+ // Check if the XYZ fields have been set
+ if (x_offset == -1)
+ {
+ for (int i = 0; i < msg->fields.size(); i++)
+ {
+ if (msg->fields[i].name == "x")
+ {
+ x_offset = i;
+ }
+ else if (msg->fields[i].name == "y")
+ {
+ y_offset = i;
+ }
+ else if (msg->fields[i].name == "z")
+ {
+ z_offset = i;
+ }
+ }
+ }
+
+ // Convert the PointCloud2 message to PCL data type
+ pcl::PointCloud cloud;
+ pcl::fromROSMsg(*msg, cloud);
+
+ // Get the XYZ data for a specific pixel
+ int r = 100; // specify the row of the pixel you are interested in
+ int c = 200; // specify the column of the pixel you are interested in
+
+ pcl::PointXYZ p = cloud.at(c, r);
+ float x = p.x;
+ float y = p.y;
+ float z = p.z;
+
+ ROS_INFO("The XYZ data for pixel at (%d, %d) is: (%f, %f, %f)", r, c, x, y, z);
+ return x, y, z;
+}
+
+int main(int argc, char** argv)
+{
+ ros::init(argc, argv, "detect_and_add_cylinder_collision_object_demo");
+ ros::NodeHandle node_handle;
+
+ // Subscribe to the cylinder dimensions topic
+ sub = node_handle.subscribe("cylinder_dimensions", 10, cylinderCallback);
+
+ ros::spin();
+
+ return 0;
+}
\ No newline at end of file
diff --git a/air_moveit_config/src/add_scene.cpp b/air_moveit_config/src/add_scene.cpp
new file mode 100644
index 0000000..10f783b
--- /dev/null
+++ b/air_moveit_config/src/add_scene.cpp
@@ -0,0 +1,170 @@
+/*********************************************************************
+ * Software License Agreement (BSD License)
+ *
+ * Copyright (c) 2012, Willow Garage, Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of Willow Garage nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *********************************************************************/
+
+/* Author: Ioan Sucan, Ridhwan Luthra*/
+
+// ROS
+#include
+
+// MoveIt
+#include
+#include
+
+// TF2
+#include
+
+// The circle constant tau = 2*pi. One tau is one rotation in radians.
+const double tau = 2 * M_PI;
+
+
+void addCollisionObjects(moveit::planning_interface::PlanningSceneInterface& planning_scene_interface)
+{
+ // BEGIN_SUB_TUTORIAL table1
+ //
+ // Creating Environment
+ // ^^^^^^^^^^^^^^^^^^^^
+ // Create vector to hold 3 collision objects.
+ std::vector collision_objects;
+ collision_objects.resize(3);
+
+ // Add the first table where the cube will originally be kept.
+ collision_objects[0].id = "table1";
+ collision_objects[0].header.frame_id = "base_link";
+
+ /* Define the primitive and its dimensions. */
+ collision_objects[0].primitives.resize(1);
+ collision_objects[0].primitives[0].type = collision_objects[0].primitives[0].BOX;
+ collision_objects[0].primitives[0].dimensions.resize(3);
+ collision_objects[0].primitives[0].dimensions[0] = 0.2;
+ collision_objects[0].primitives[0].dimensions[1] = 0.4;
+ collision_objects[0].primitives[0].dimensions[2] = 0.4;
+
+ /* Define the pose of the table. */
+ collision_objects[0].primitive_poses.resize(1);
+ collision_objects[0].primitive_poses[0].position.x = 0.5;
+ collision_objects[0].primitive_poses[0].position.y = 0;
+ collision_objects[0].primitive_poses[0].position.z = 0.2;
+ collision_objects[0].primitive_poses[0].orientation.w = 1.0;
+ // END_SUB_TUTORIAL
+
+ collision_objects[0].operation = collision_objects[0].ADD;
+
+ // BEGIN_SUB_TUTORIAL table2
+ // Add the second table where we will be placing the cube.
+ collision_objects[1].id = "table2";
+ collision_objects[1].header.frame_id = "base_link";
+
+ /* Define the primitive and its dimensions. */
+ collision_objects[1].primitives.resize(1);
+ collision_objects[1].primitives[0].type = collision_objects[1].primitives[0].BOX;
+ collision_objects[1].primitives[0].dimensions.resize(3);
+ collision_objects[1].primitives[0].dimensions[0] = 0.4;
+ collision_objects[1].primitives[0].dimensions[1] = 0.2;
+ collision_objects[1].primitives[0].dimensions[2] = 0.4;
+
+ /* Define the pose of the table. */
+ collision_objects[1].primitive_poses.resize(1);
+ collision_objects[1].primitive_poses[0].position.x = 0;
+ collision_objects[1].primitive_poses[0].position.y = 0.5;
+ collision_objects[1].primitive_poses[0].position.z = 0.2;
+ collision_objects[1].primitive_poses[0].orientation.w = 1.0;
+ // END_SUB_TUTORIAL
+
+ collision_objects[1].operation = collision_objects[1].ADD;
+
+ // BEGIN_SUB_TUTORIAL object
+ // Define the object that we will be manipulating
+ collision_objects[2].header.frame_id = "base_link";
+ collision_objects[2].id = "object";
+
+ /* Define the primitive and its dimensions. */
+ collision_objects[2].primitives.resize(1);
+ collision_objects[2].primitives[0].type = collision_objects[1].primitives[0].BOX;
+ collision_objects[2].primitives[0].dimensions.resize(3);
+ collision_objects[2].primitives[0].dimensions[0] = 0.02;
+ collision_objects[2].primitives[0].dimensions[1] = 0.02;
+ collision_objects[2].primitives[0].dimensions[2] = 0.2;
+
+ /* Define the pose of the object. */
+ collision_objects[2].primitive_poses.resize(1);
+ collision_objects[2].primitive_poses[0].position.x = 0.5;
+ collision_objects[2].primitive_poses[0].position.y = 0;
+ collision_objects[2].primitive_poses[0].position.z = 0.5;
+ collision_objects[2].primitive_poses[0].orientation.w = 1.0;
+ // END_SUB_TUTORIAL
+
+ collision_objects[2].operation = collision_objects[2].ADD;
+
+ planning_scene_interface.applyCollisionObjects(collision_objects);
+}
+
+int main(int argc, char** argv)
+{
+ ros::init(argc, argv, "add_object");
+ ros::NodeHandle nh;
+ ros::AsyncSpinner spinner(1);
+ spinner.start();
+
+ ros::WallDuration(1.0).sleep();
+ moveit::planning_interface::PlanningSceneInterface planning_scene_interface;
+
+ addCollisionObjects(planning_scene_interface);
+
+ // Wait a bit for ROS things to initialize
+ ros::WallDuration(1.0).sleep();
+ ROS_INFO("Object added!!!!!");
+ return 0;
+}
+
+// BEGIN_TUTORIAL
+// CALL_SUB_TUTORIAL table1
+// CALL_SUB_TUTORIAL table2
+// CALL_SUB_TUTORIAL object
+//
+// Pick Pipeline
+// ^^^^^^^^^^^^^
+// CALL_SUB_TUTORIAL pick1
+// openGripper function
+// """"""""""""""""""""
+// CALL_SUB_TUTORIAL open_gripper
+// CALL_SUB_TUTORIAL pick2
+// closedGripper function
+// """"""""""""""""""""""
+// CALL_SUB_TUTORIAL closed_gripper
+// CALL_SUB_TUTORIAL pick3
+//
+// Place Pipeline
+// ^^^^^^^^^^^^^^
+// CALL_SUB_TUTORIAL place
+// END_TUTORIAL
\ No newline at end of file
diff --git a/air_moveit_config/src/add_sphere.cpp b/air_moveit_config/src/add_sphere.cpp
new file mode 100644
index 0000000..8a74ec1
--- /dev/null
+++ b/air_moveit_config/src/add_sphere.cpp
@@ -0,0 +1,57 @@
+#include
+#include
+#include
+#include
+#include
+
+ros::Subscriber sub;
+
+
+// Callback function for the subscriber
+void sphereCallback(const std_msgs::Float32MultiArray::ConstPtr xyz)
+{
+ moveit::planning_interface::PlanningSceneInterface planning_scene_interface;
+ // Get the sphere dimensions from the message
+ float radius = 0.05;
+ float x = xyz->data[0];
+ float y = xyz->data[1];
+ float z = xyz->data[2];
+ ROS_INFO("Object added to planning scene: X: %f Y: %f Z: %f",x,y,z);
+ // Create the sphere shape
+ shape_msgs::SolidPrimitive primitive;
+ primitive.type = primitive.SPHERE;
+ primitive.dimensions.resize(1);
+ primitive.dimensions[0] = radius;
+
+ // Specify the pose of the sphere
+ geometry_msgs::Pose pose;
+ pose.position.x = z; // x to y
+ pose.position.y = -x; // y to -z
+ pose.position.z = -y; //z to x
+
+ moveit_msgs::CollisionObject collision_object;
+ collision_object.header.frame_id = "camera_link";
+ collision_object.id = "sphere";
+
+ collision_object.primitives.push_back(primitive);
+ collision_object.primitive_poses.push_back(pose);
+ collision_object.operation = collision_object.ADD;
+
+ std::vector collision_objects;
+ collision_objects.push_back(collision_object);
+
+ // Add the sphere to the planning scene
+ planning_scene_interface.applyCollisionObjects(collision_objects);
+
+ // Unsubscribe after receiving the first message
+}
+
+int main(int argc, char** argv)
+{
+ ros::init(argc, argv, "detect_and_add_sphere_collision_object_demo");
+ ros::NodeHandle node_handle;
+ // Subscribe to the sphere dimensions topic
+ sub = node_handle.subscribe("/coordinates_of_object", 10, sphereCallback);
+ ros::spin();
+ return 0;
+}
\ No newline at end of file
diff --git a/air_moveit_config/src/detect_and_add.cpp b/air_moveit_config/src/detect_and_add.cpp
new file mode 100644
index 0000000..ba25c9a
--- /dev/null
+++ b/air_moveit_config/src/detect_and_add.cpp
@@ -0,0 +1,89 @@
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include "../include/detect_and_add.hpp"
+
+air_object::air_object(ros::NodeHandle& nh)
+ : m_NodeHandle(nh)
+{
+ m_center_x = 0;
+ m_center_y = 0;
+ m_center_y_buffer = 0;
+ m_center_x_buffer = 0;
+ m_yolo_sub = m_NodeHandle.subscribe("/yolov7/yolov7", 1000, &air_object::yolo_callback, this);
+ m_pcl_sub = m_NodeHandle.subscribe("/camera/depth/color/points",1000, &air_object::pcl_callback, this); // Subscribe to the cylinder dimensions topic
+ m_coordinate_pub = nh.advertise("/coordinates_of_object", 1000);
+ ROS_INFO("INITIALIZING COMPLETED!!!!!!!!");
+ }
+
+void air_object::yolo_callback(const vision_msgs::Detection2DArrayConstPtr& d2d_arr_msg)
+{
+ if(d2d_arr_msg->detections.empty())
+ {
+ ROS_WARN("NO OBJECT DETECTED!!!");
+ return;
+ }
+ vision_msgs::Detection2DArray d2dArr = *d2d_arr_msg;
+ auto detection = d2dArr.detections.at(0);
+ ROS_INFO("Object Detected...");
+ m_center_x = (int) detection.bbox.center.x;
+ m_center_y = (int) detection.bbox.center.y;
+ m_center_x_buffer = m_center_x;
+ m_center_y_buffer = m_center_y;
+
+ ROS_INFO("The center of detected object is (%d, %d) !!", m_center_x, m_center_y);
+}
+
+
+void air_object::pcl_callback(const sensor_msgs::PointCloud2ConstPtr& msg)
+{
+ // Convert the PointCloud2 message to PCL data type
+ if(m_center_x == 0 && m_center_y == 0){
+ ROS_ERROR("PCL ERROR");
+ return;
+ }
+ pcl::PointCloud cloud;
+ pcl::fromROSMsg(*msg, cloud);
+
+ if(cloud.empty())
+ {
+ return;
+ }
+ // Get the XYZ data for a specific pixel
+ int r = m_center_y; // specify the row of the pixel you are interested in
+ int c = m_center_x; // specify the column of the pixel you are interested in
+
+ pcl::PointXYZ p = cloud.at(c, r);
+ m_x = p.x;
+ m_y = p.y;
+ m_z = p.z;
+
+ if(m_x == 0 && m_y == 0 && m_z == 0){
+ ROS_ERROR("OBJECT DETECTED BUT CANNOT ACCESS DEPTH!!!!!!!!");
+ return;
+ }
+
+ ROS_INFO("The XYZ data for pixel at (%d, %d) is: (%f, %f, %f)", c, r, m_x, m_y, m_z);
+
+ m_xyz.data.resize(3);
+ m_xyz.data[0] = m_x;
+ m_xyz.data[1] = m_y;
+ m_xyz.data[2] = m_z;
+ m_coordinate_pub.publish(m_xyz);
+}
+
+int main(int argc, char** argv)
+{
+ ros::init(argc, argv, "detect_and_add_cylinder_collision_object_demo");
+ ros::NodeHandle nh;
+ air_object obj(nh);
+
+ ros::spin();
+
+ return 0;
+}
diff --git a/air_moveit_config/src/detect_and_add_cylinder_collision_object_demo.cpp b/air_moveit_config/src/detect_and_add_cylinder_collision_object_demo.cpp
new file mode 100644
index 0000000..6a1bccb
--- /dev/null
+++ b/air_moveit_config/src/detect_and_add_cylinder_collision_object_demo.cpp
@@ -0,0 +1,381 @@
+/*********************************************************************
+ * Software License Agreement (BSD License)
+ *
+ * Copyright (c) 2018, Ridhwan Luthra.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of Ridhwan Luthra nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *********************************************************************/
+
+/* Author: Ridhwan Luthra */
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include
+#include
+
+class CylinderSegment
+{
+ ros::NodeHandle nh_;
+ moveit::planning_interface::PlanningSceneInterface planning_scene_interface_;
+ ros::Subscriber cloud_subscriber_;
+ ros::Timer dummy;
+public:
+ CylinderSegment(ros::NodeHandle& nh) : nh_(nh)
+ {
+
+ cloud_subscriber_ = nh_.subscribe(
+ "/camera/depth/color/points",
+ 1,
+ &CylinderSegment::cloudCB,
+ this
+ );
+
+ dummy = nh_.createTimer(
+ ros::Duration(0.1),
+ [](const ros::TimerEvent& evt){ROS_INFO("TIMER??");}
+ );
+
+ }
+
+ /** \brief Given the parameters of the cylinder add the cylinder to the planning scene. */
+ void addCylinder()
+ {
+ // BEGIN_SUB_TUTORIAL add_cylinder
+ //
+ // Adding Cylinder to Planning Scene
+ // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ // Define a collision object ROS message.
+ moveit_msgs::CollisionObject collision_object;
+ collision_object.header.frame_id = "camera_link";
+ collision_object.id = "cylinder";
+
+ // Define a cylinder which will be added to the world.
+ shape_msgs::SolidPrimitive primitive;
+ primitive.type = primitive.CYLINDER;
+ primitive.dimensions.resize(2);
+ /* Setting height of cylinder. */
+ primitive.dimensions[0] = cylinder_params.height;
+ /* Setting radius of cylinder. */
+ primitive.dimensions[1] = cylinder_params.radius;
+
+ // Define a pose for the cylinder (specified relative to frame_id).
+ geometry_msgs::Pose cylinder_pose;
+ /* Computing and setting quaternion from axis angle representation. */
+ Eigen::Vector3d cylinder_z_direction(cylinder_params.direction_vec[0], cylinder_params.direction_vec[1],
+ cylinder_params.direction_vec[2]);
+ Eigen::Vector3d origin_z_direction(0., 0., 1.);
+ Eigen::Vector3d axis;
+ axis = origin_z_direction.cross(cylinder_z_direction);
+ axis.normalize();
+ double angle = acos(cylinder_z_direction.dot(origin_z_direction));
+ cylinder_pose.orientation.z = axis.x() * sin(angle / 2);
+ cylinder_pose.orientation.y = axis.y() * sin(angle / 2);
+ cylinder_pose.orientation.x = axis.z() * sin(angle / 2);
+ cylinder_pose.orientation.w = cos(angle / 2);
+
+ // Setting the position of cylinder.
+ cylinder_pose.position.y = cylinder_params.center_pt[0];
+ cylinder_pose.position.z = cylinder_params.center_pt[1];
+ cylinder_pose.position.x = cylinder_params.center_pt[2];
+
+ // Add cylinder as collision object
+ collision_object.primitives.push_back(primitive);
+ collision_object.primitive_poses.push_back(cylinder_pose);
+ collision_object.operation = collision_object.ADD;
+ planning_scene_interface_.applyCollisionObject(collision_object);
+ // END_SUB_TUTORIAL
+ }
+
+ /** \brief Given the pointcloud containing just the cylinder,
+ compute its center point and its height and store in cylinder_params.
+ @param cloud - point cloud containing just the cylinder. */
+ void extractLocationHeight(const pcl::PointCloud::Ptr& cloud)
+ {
+ double max_angle_y = -std::numeric_limits::infinity();
+ double min_angle_y = std::numeric_limits::infinity();
+
+ double lowest_point[3] = { 0.0, 0.0, 0.0 };
+ double highest_point[3] = { 0.0, 0.0, 0.0 };
+ // BEGIN_SUB_TUTORIAL extract_location_height
+ // Consider a point inside the point cloud and imagine that point is formed on a XY plane where the perpendicular
+ // distance from the plane to the camera is Z. |br|
+ // The perpendicular drawn from the camera to the plane hits at center of the XY plane. |br|
+ // We have the x and y coordinate of the point which is formed on the XY plane. |br|
+ // X is the horizontal axis and Y is the vertical axis. |br|
+ // C is the center of the plane which is Z meter away from the center of camera and A is any point on the plane.
+ // |br|
+ // Now we know Z is the perpendicular distance from the point to the camera. |br|
+ // If you need to find the actual distance d from the point to the camera, you should calculate the hypotenuse-
+ // |code_start| hypot(point.z, point.x);\ |code_end| |br|
+ // angle the point made horizontally- |code_start| atan2(point.z,point.x);\ |code_end| |br|
+ // angle the point made Vertically- |code_start| atan2(point.z, point.y);\ |code_end| |br|
+ // Loop over the entire pointcloud.
+ for (auto const point : cloud->points)
+ {
+ const double angle = atan2(point.z, point.y);
+ /* Find the coordinates of the highest point */
+ if (angle < min_angle_y)
+ {
+ min_angle_y = angle;
+ lowest_point[0] = point.x;
+ lowest_point[1] = point.y;
+ lowest_point[2] = point.z;
+ }
+ /* Find the coordinates of the lowest point */
+ else if (angle > max_angle_y)
+ {
+ max_angle_y = angle;
+ highest_point[0] = point.x;
+ highest_point[1] = point.y;
+ highest_point[2] = point.z;
+ }
+ }
+ /* Store the center point of cylinder */
+ cylinder_params.center_pt[0] = (highest_point[0] + lowest_point[0]) / 2;
+ cylinder_params.center_pt[1] = (highest_point[1] + lowest_point[1]) / 2;
+ cylinder_params.center_pt[2] = (highest_point[2] + lowest_point[2]) / 2;
+ /* Store the height of cylinder */
+ cylinder_params.height =
+ sqrt(pow((lowest_point[0] - highest_point[0]), 2) + pow((lowest_point[1] - highest_point[1]), 2) +
+ pow((lowest_point[2] - highest_point[2]), 2));
+ // END_SUB_TUTORIAL
+ }
+
+ /** \brief Given a pointcloud extract the ROI defined by the user.
+ @param cloud - Pointcloud whose ROI needs to be extracted. */
+ void passThroughFilter(const pcl::PointCloud::Ptr& cloud)
+ {
+ pcl::PassThrough pass;
+ pass.setInputCloud(cloud);
+ pass.setFilterFieldName("z");
+ // min and max values in z axis to keep
+ pass.setFilterLimits(0.3, 1.1);
+ pass.filter(*cloud);
+ }
+
+ /** \brief Given the pointcloud and pointer cloud_normals compute the point normals and store in cloud_normals.
+ @param cloud - Pointcloud.
+ @param cloud_normals - The point normals once computer will be stored in this. */
+ void computeNormals(const pcl::PointCloud::Ptr& cloud,
+ const pcl::PointCloud::Ptr& cloud_normals)
+ {
+ pcl::search::KdTree::Ptr tree(new pcl::search::KdTree());
+ pcl::NormalEstimation ne;
+ ne.setSearchMethod(tree);
+ ne.setInputCloud(cloud);
+ // Set the number of k nearest neighbors to use for the feature estimation.
+ ne.setKSearch(50);
+ std::cout << cloud_normals->size() << std::endl;
+ ne.compute(*cloud_normals);
+ }
+
+ /** \brief Given the point normals and point indices, extract the normals for the indices.
+ @param cloud_normals - Point normals.
+ @param inliers_plane - Indices whose normals need to be extracted. */
+ void extractNormals(const pcl::PointCloud::Ptr& cloud_normals,
+ const pcl::PointIndices::Ptr& inliers_plane)
+ {
+ pcl::ExtractIndices extract_normals;
+ extract_normals.setNegative(true);
+ extract_normals.setInputCloud(cloud_normals);
+ extract_normals.setIndices(inliers_plane);
+ extract_normals.filter(*cloud_normals);
+ }
+
+ /** \brief Given the pointcloud and indices of the plane, remove the planar region from the pointcloud.
+ @param cloud - Pointcloud.
+ @param inliers_plane - Indices representing the plane. */
+ void removePlaneSurface(const pcl::PointCloud::Ptr& cloud, const pcl::PointIndices::Ptr& inliers_plane)
+ {
+ // create a SAC segmenter without using normals
+ pcl::SACSegmentation segmentor;
+ segmentor.setOptimizeCoefficients(true);
+ segmentor.setModelType(pcl::SACMODEL_PLANE);
+ segmentor.setMethodType(pcl::SAC_RANSAC);
+ /* run at max 1000 iterations before giving up */
+ segmentor.setMaxIterations(1000);
+ /* tolerance for variation from model */
+ segmentor.setDistanceThreshold(0.01);
+ segmentor.setInputCloud(cloud);
+ /* Create the segmentation object for the planar model and set all the parameters */
+ pcl::ModelCoefficients::Ptr coefficients_plane(new pcl::ModelCoefficients);
+ segmentor.segment(*inliers_plane, *coefficients_plane);
+ /* Extract the planar inliers from the input cloud */
+ pcl::ExtractIndices extract_indices;
+ extract_indices.setInputCloud(cloud);
+ extract_indices.setIndices(inliers_plane);
+ /* Remove the planar inliers, extract the rest */
+ extract_indices.setNegative(true);
+ extract_indices.filter(*cloud);
+ }
+
+ /** \brief Given the pointcloud, pointer to pcl::ModelCoefficients and point normals extract the cylinder from the
+ pointcloud and store the cylinder parameters in coefficients_cylinder.
+ @param cloud - Pointcloud whose plane is removed.
+ @param coefficients_cylinder - Cylinder parameters used to define an infinite cylinder will be stored here.
+ @param cloud_normals - Point normals corresponding to the plane on which cylinder is kept */
+ void extractCylinder(const pcl::PointCloud::Ptr& cloud,
+ const pcl::ModelCoefficients::Ptr& coefficients_cylinder,
+ const pcl::PointCloud::Ptr& cloud_normals)
+ {
+ // Create the segmentation object for cylinder segmentation and set all the parameters
+ pcl::SACSegmentationFromNormals segmentor;
+ pcl::PointIndices::Ptr inliers_cylinder(new pcl::PointIndices);
+ segmentor.setOptimizeCoefficients(true);
+ segmentor.setModelType(pcl::SACMODEL_CYLINDER);
+ segmentor.setMethodType(pcl::SAC_RANSAC);
+ // Set the normal angular distance weight
+ segmentor.setNormalDistanceWeight(0.1);
+ // run at max 1000 iterations before giving up
+ segmentor.setMaxIterations(1000);
+ // tolerance for variation from model
+ segmentor.setDistanceThreshold(0.008);
+ // min max values of radius in meters to consider
+ segmentor.setRadiusLimits(0.01, 0.1);
+ segmentor.setInputCloud(cloud);
+ segmentor.setInputNormals(cloud_normals);
+
+ // Obtain the cylinder inliers and coefficients
+ segmentor.segment(*inliers_cylinder, *coefficients_cylinder);
+
+ // Extract the cylinder inliers from the input cloud
+ pcl::ExtractIndices extract;
+ extract.setInputCloud(cloud);
+ extract.setIndices(inliers_cylinder);
+ extract.setNegative(false);
+ extract.filter(*cloud);
+ }
+
+ void cloudCB(const sensor_msgs::PointCloud2ConstPtr& input)
+ {
+ // BEGIN_SUB_TUTORIAL callback
+ //
+ // Perception Related
+ // ^^^^^^^^^^^^^^^^^^
+ // This section uses a standard PCL-based processing pipeline to estimate a cylinder's pose in the point cloud.
+ //
+ // First, we convert from sensor_msgs to pcl::PointXYZ which is needed for most of the processing.
+ ROS_INFO("DENIYORUM");
+ pcl::PointCloud::Ptr cloud(new pcl::PointCloud);
+ pcl::fromROSMsg(*input, *cloud);
+ // Use a passthrough filter to get the region of interest.
+ // The filter removes points outside the specified range.
+ passThroughFilter(cloud);
+ // Compute point normals for later sample consensus step.
+ pcl::PointCloud::Ptr cloud_normals(new pcl::PointCloud);
+ computeNormals(cloud, cloud_normals);
+ // inliers_plane will hold the indices of the point cloud that correspond to a plane.
+ pcl::PointIndices::Ptr inliers_plane(new pcl::PointIndices);
+ // Detect and remove points on the (planar) surface on which the cylinder is resting.
+ removePlaneSurface(cloud, inliers_plane);
+ // Remove surface points from normals as well
+ extractNormals(cloud_normals, inliers_plane);
+ // ModelCoefficients will hold the parameters using which we can define a cylinder of infinite length.
+ // It has a public attribute |code_start| values\ |code_end| of type |code_start| std::vector\ |code_end|\ .
+ // |br|
+ // |code_start| values[0-2]\ |code_end| hold a point on the center line of the cylinder. |br|
+ // |code_start| values[3-5]\ |code_end| hold direction vector of the z-axis. |br|
+ // |code_start| values[6]\ |code_end| is the radius of the cylinder.
+ pcl::ModelCoefficients::Ptr coefficients_cylinder(new pcl::ModelCoefficients);
+ /* Extract the cylinder using SACSegmentation. */
+ extractCylinder(cloud, coefficients_cylinder, cloud_normals);
+ // END_SUB_TUTORIAL
+ if (cloud->points.empty() || coefficients_cylinder->values.size() != 7)
+ {
+ ROS_ERROR_STREAM_NAMED("cylinder_segment", "Can't find the cylindrical component.");
+ return;
+ }
+
+ ROS_INFO("Detected Cylinder - Adding CollisionObject to PlanningScene");
+
+ // BEGIN_TUTORIAL
+ // CALL_SUB_TUTORIAL callback
+ //
+ // Storing Relevant Cylinder Values
+ // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ // The information that we have in |code_start| coefficients_cylinder\ |code_end| is not enough to define our
+ // cylinder.
+ // It does not have the actual location of the cylinder nor the actual height. |br|
+ // We define a struct to hold the parameters that are actually needed for defining a collision object completely.
+ // |br|
+ // CALL_SUB_TUTORIAL param_struct
+ /* Store the radius of the cylinder. */
+ cylinder_params.radius = coefficients_cylinder->values[6];
+ /* Store direction vector of z-axis of cylinder. */
+ cylinder_params.direction_vec[0] = coefficients_cylinder->values[3];
+ cylinder_params.direction_vec[1] = coefficients_cylinder->values[4];
+ cylinder_params.direction_vec[2] = coefficients_cylinder->values[5];
+ //
+ // Extracting Location and Height
+ // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ // Compute the center point of the cylinder using standard geometry
+ extractLocationHeight(cloud);
+ // CALL_SUB_TUTORIAL extract_location_height
+ // Use the parameters extracted to add the cylinder to the planning scene as a collision object.
+ addCylinder();
+ // CALL_SUB_TUTORIAL add_cylinder
+ // END_TUTORIAL
+ }
+
+private:
+ // BEGIN_SUB_TUTORIAL param_struct
+ // There are 4 fields and a total of 7 parameters used to define this.
+ struct AddCylinderParams
+ {
+ /* Radius of the cylinder. */
+ double radius;
+ /* Direction vector along the z-axis of the cylinder. */
+ double direction_vec[3];
+ /* Center point of the cylinder. */
+ double center_pt[3];
+ /* Height of the cylinder. */
+ double height;
+ };
+ // Declare a variable of type AddCylinderParams and store relevant values from ModelCoefficients.
+ AddCylinderParams cylinder_params;
+ // END_SUB_TUTORIAL
+};
+
+int main(int argc, char** argv)
+{
+ // Initialize ROS
+ ros::init(argc, argv, "cylinder_segment");
+ ros::NodeHandle nh;
+ // Start the segmentor
+ CylinderSegment segmentor(nh);
+
+ // Spin
+ ros::spin();
+}
diff --git a/air_moveit_config/src/sub_col.cpp b/air_moveit_config/src/sub_col.cpp
new file mode 100644
index 0000000..4aaaae6
--- /dev/null
+++ b/air_moveit_config/src/sub_col.cpp
@@ -0,0 +1,27 @@
+#include
+#include
+#include
+
+void planningSceneWorldCallback(const moveit_msgs::PlanningScene::ConstPtr& msg)
+{
+ if (msg->world.collision_objects.size() > 0) {
+ geometry_msgs::Point position = msg->world.collision_objects[0].pose.position;
+ ROS_INFO("Received position: (%f, %f, %f)", position.x, position.y, position.z);
+ }
+ else{
+ ROS_INFO("No collision object detected in planning scene!");
+ }
+}
+
+int main(int argc, char** argv)
+{
+ ros::init(argc, argv, "planning_scene_world_subscriber");
+ ros::NodeHandle nh;
+
+ // Subscribe to the PlanningSceneWorld topic
+ ros::Subscriber sub = nh.subscribe("/move_group/monitored_planning_scene", 10, planningSceneWorldCallback);
+
+ ros::spin();
+
+ return 0;
+}
\ No newline at end of file
diff --git a/qb_hand_moveit_config b/qb_hand_moveit_config
new file mode 160000
index 0000000..d4d0222
--- /dev/null
+++ b/qb_hand_moveit_config
@@ -0,0 +1 @@
+Subproject commit d4d02227b5e6194b9fe326eeab4712d2c3a7968e
diff --git a/qbdevice-ros/qb_device_bringup/launch/robot_control_node_bringup.launch b/qbdevice-ros/qb_device_bringup/launch/robot_control_node_bringup.launch
index 2e0c067..2425799 100644
--- a/qbdevice-ros/qb_device_bringup/launch/robot_control_node_bringup.launch
+++ b/qbdevice-ros/qb_device_bringup/launch/robot_control_node_bringup.launch
@@ -2,8 +2,9 @@
+
-
+
robot_activate_control: $(arg robot_activate_control)
diff --git a/qbdevice-ros/qb_device_bringup/launch/robot_opt_bringup.launch b/qbdevice-ros/qb_device_bringup/launch/robot_opt_bringup.launch
index adb508c..ecba05e 100644
--- a/qbdevice-ros/qb_device_bringup/launch/robot_opt_bringup.launch
+++ b/qbdevice-ros/qb_device_bringup/launch/robot_opt_bringup.launch
@@ -1,7 +1,7 @@
-
+
diff --git a/qbhand-ros/qb_hand_control/launch/control_qbhand.launch b/qbhand-ros/qb_hand_control/launch/control_qbhand.launch
index 079c40d..39a1136 100644
--- a/qbhand-ros/qb_hand_control/launch/control_qbhand.launch
+++ b/qbhand-ros/qb_hand_control/launch/control_qbhand.launch
@@ -23,11 +23,11 @@
-
+
-
+
-
+
diff --git a/qbhand-ros/qb_hand_gazebo/CMakeLists.txt b/qbhand-ros/qb_hand_gazebo/CMakeLists.txt
deleted file mode 100644
index d194497..0000000
--- a/qbhand-ros/qb_hand_gazebo/CMakeLists.txt
+++ /dev/null
@@ -1,114 +0,0 @@
-cmake_minimum_required(VERSION 3.0.2 FATAL_ERROR)
-project(qb_hand_gazebo VERSION 3.0.2 LANGUAGES CXX)
-
-# Dependency Settings
-find_package(catkin REQUIRED
- COMPONENTS
- roscpp
- gazebo_ros_control
- controller_manager
- qb_device_gazebo
- qb_device_hardware_interface
-)
-find_package(gazebo REQUIRED)
-
-include_directories(include
- ${catkin_INCLUDE_DIRS}
- ${GAZEBO_INCLUDE_DIRS}
-)
-
-link_directories(${GAZEBO_LIBRARY_DIRS})
-
-catkin_package(
- INCLUDE_DIRS
- include
- LIBRARIES
- qb_hand_gazebo_plugin
- qb_hand_gazebo_hardware_interface
- CATKIN_DEPENDS
- roscpp
- controller_manager
- gazebo_ros_control
- qb_device_gazebo
- qb_device_hardware_interface
-)
-
-# Exported libraries
-add_library(qb_hand_gazebo_plugin
- src/qb_hand_gazebo_plugin.cpp
-)
-
-target_link_libraries(qb_hand_gazebo_plugin
- PUBLIC
- ${catkin_LIBRARIES}
- ${GAZEBO_LIBRARIES}
- qb_hand_gazebo_hardware_interface
-)
-
-add_dependencies(qb_hand_gazebo_plugin
- ${${PROJECT_NAME}_EXPORTED_TARGETS}
- ${catkin_EXPORTED_TARGETS}
-)
-
-add_library(qb_hand_gazebo_hardware_interface
- src/qb_hand_gazebo_hardware_interface.cpp
-)
-
-target_link_libraries(qb_hand_gazebo_plugin
- PUBLIC
- ${catkin_LIBRARIES}
- ${GAZEBO_LIBRARIES}
-)
-
-add_dependencies(qb_hand_gazebo_plugin
- ${${PROJECT_NAME}_EXPORTED_TARGETS}
- ${catkin_EXPORTED_TARGETS}
-)
-
-# C++ Settings
-if (CMAKE_VERSION VERSION_LESS "3.1")
- target_compile_options(qb_hand_gazebo_plugin
- PUBLIC
- "-std=c++17"
- )
- target_compile_options(qb_hand_gazebo_hardware_interface
- PUBLIC
- "-std=c++17"
- )
-else ()
- set_property(
- TARGET
- qb_hand_gazebo_plugin
- qb_hand_gazebo_hardware_interface
- PROPERTY CXX_STANDARD 17
- )
-
- set_property(
- TARGET
- qb_hand_gazebo_plugin
- qb_hand_gazebo_hardware_interface
- PROPERTY CXX_STANDARD_REQUIRED ON
- )
-endif ()
-
-# Installation
-install(
- TARGETS qb_hand_gazebo_plugin qb_hand_gazebo_hardware_interface
- ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
- LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
- RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION}
-)
-
-install(
- DIRECTORY
- include/qb_hand_gazebo/
- DESTINATION
- ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
-)
-
-install(
- FILES
- plugin.xml
- DESTINATION
- ${CATKIN_PACKAGE_SHARE_DESTINATION}
-)
diff --git a/qbhand-ros/qb_hand_gazebo/include/qb_hand_gazebo/qb_hand_gazebo_hardware_interface.h b/qbhand-ros/qb_hand_gazebo/include/qb_hand_gazebo/qb_hand_gazebo_hardware_interface.h
deleted file mode 100644
index 5e91ff2..0000000
--- a/qbhand-ros/qb_hand_gazebo/include/qb_hand_gazebo/qb_hand_gazebo_hardware_interface.h
+++ /dev/null
@@ -1,99 +0,0 @@
-/***
- * Software License Agreement: BSD 3-Clause License
- *
- * Copyright (c) 2016-2018, qbrobotics®
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
- * following conditions are met:
- *
- * * Redistributions of source code must retain the above copyright notice, this list of conditions and the
- * following disclaimer.
- *
- * * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
- * following disclaimer in the documentation and/or other materials provided with the distribution.
- *
- * * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
- * products derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
- * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
- * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
- * USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef QB_HAND_GAZEBO_HARDWARE_INTERFACE_H
-#define QB_HAND_GAZEBO_HARDWARE_INTERFACE_H
-
-// ROS libraries
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-
-// internal libraries
-#include
-#include
-#include
-#include
-
-namespace qb_hand_gazebo_hardware_interface {
-
-template
-T clamp(const T &value, const T &absolute) {
- return std::max(std::min(value, std::abs(absolute)), -std::abs(absolute));
- // it is worth noticing that:
- // std::min(NaN, const_value) = NaN
- // std::min(const_value, NaN) = const_value
-}
-template
-T clamp(const T &value, const T &lower, const T &upper) {
- return std::max(std::min(value, upper), lower);
- // it is worth noticing that:
- // std::min(NaN, const_value) = NaN
- // std::min(const_value, NaN) = const_value
-}
-bool startsWith(const std::string &string, const std::string &prefix) {
- return string.size() >= prefix.size() && string.compare(0, prefix.size(), prefix) == 0;
-}
-std::string trailNamespace(const std::string &string) {
- auto pos = string.find_last_of('/');
- if (pos == std::string::npos) {
- return string;
- }
- return string.substr(pos+1);
-}
-
-class qbHandHWSim : public gazebo_ros_control::RobotHWSim {
- public:
- qbHandHWSim() = default;
- ~qbHandHWSim() override = default;
-
- bool initSim(const std::string &robot_namespace, ros::NodeHandle model_nh, gazebo::physics::ModelPtr parent_model, const urdf::Model *const urdf_model, std::vector transmissions) override;
- void readSim(ros::Time time, ros::Duration period) override;
- void writeSim(ros::Time time, ros::Duration period) override;
-
- private:
- ros::NodeHandle model_nh_;
- urdf::Model urdf_model_;
- qb_device_hardware_interface::qbDeviceHWResources joints_;
- qb_device_hardware_interface::qbDeviceHWInterfaces interfaces_;
- qb_device_joint_limits_interface::qbDeviceJointLimitsResources joint_limits_;
- std::vector sim_joints_;
-};
-typedef std::shared_ptr qbHandHWSimPtr;
-} // namespace qb_hand_gazebo_hardware_interface
-
-#endif // QB_HAND_GAZEBO_HARDWARE_INTERFACE_H
\ No newline at end of file
diff --git a/qbhand-ros/qb_hand_gazebo/include/qb_hand_gazebo/qb_hand_gazebo_plugin.h b/qbhand-ros/qb_hand_gazebo/include/qb_hand_gazebo/qb_hand_gazebo_plugin.h
deleted file mode 100644
index 5923972..0000000
--- a/qbhand-ros/qb_hand_gazebo/include/qb_hand_gazebo/qb_hand_gazebo_plugin.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/***
- * Software License Agreement: BSD 3-Clause License
- *
- * Copyright (c) 2016-2018, qbrobotics®
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
- * following conditions are met:
- *
- * * Redistributions of source code must retain the above copyright notice, this list of conditions and the
- * following disclaimer.
- *
- * * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
- * following disclaimer in the documentation and/or other materials provided with the distribution.
- *
- * * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
- * products derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
- * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
- * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
- * USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef QB_HAND_GAZEBO_PLUGIN_H
-#define QB_HAND_GAZEBO_PLUGIN_H
-
-// ROS libraries
-#include
-#include
-#include
-
-// Gazebo libraries
-#include
-#include
-
-// internal libraries
-#include
-#include
-
-namespace qb_hand_gazebo_plugin {
-class qbHandGazeboPlugin : public gazebo::ModelPlugin {
-
- public:
- qbHandGazeboPlugin() : ModelPlugin() {}
- ~qbHandGazeboPlugin() override;
- void Load(gazebo::physics::ModelPtr parent, sdf::ElementPtr sdf) override;
- void Update(const gazebo::common::UpdateInfo &info);
-
- private:
- std::string getURDF(const std::string ¶m_name);
- bool parseTransmissionsFromURDF(const std::string &urdf_string);
-
- gazebo::event::ConnectionPtr update_connection_;
- gazebo::physics::ModelPtr parent_model_;
- sdf::ElementPtr sdf_;
- ros::NodeHandle model_nh_;
- ros::NodeHandle model_nh_control_;
- ros::Duration control_period_;
- ros::Time last_sim_time_ros_;
- std::vector transmissions_;
- std::shared_ptr robot_hw_sim_;
- std::shared_ptr controller_manager_;
- std::string robot_description_;
- std::string robot_hw_sim_name_;
-};
-} // namespace qb_hand_gazebo_plugin
-
-#endif // QB_HAND_GAZEBO_PLUGIN_H
\ No newline at end of file
diff --git a/qbhand-ros/qb_hand_gazebo/package.xml b/qbhand-ros/qb_hand_gazebo/package.xml
deleted file mode 100644
index fb6458a..0000000
--- a/qbhand-ros/qb_hand_gazebo/package.xml
+++ /dev/null
@@ -1,28 +0,0 @@
-
- qb_hand_gazebo
- 3.0.2
-
- This package contains the Gazebo ROS control plugins for qbrobotics® SoftHand device.
-
- qbrobotics®
- Alessandro Tondo
- BSD 3-Clause
-
- https://bitbucket.org/qbrobotics/qbhand-ros/issues
- https://bitbucket.org/qbrobotics/qbhand-ros
- http://wiki.ros.org/qb_hand_gazebo
-
- catkin
- roscpp
- gazebo_ros_control
- controller_manager
- qb_device_gazebo
- qb_device_hardware_interface
-
-
-
-
-
-
- doxygen
-
\ No newline at end of file
diff --git a/qbhand-ros/qb_hand_gazebo/plugin.xml b/qbhand-ros/qb_hand_gazebo/plugin.xml
deleted file mode 100644
index dc51498..0000000
--- a/qbhand-ros/qb_hand_gazebo/plugin.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
- A type of RobotHWSim
-
-
-
-
\ No newline at end of file
diff --git a/qbhand-ros/qb_hand_gazebo/src/qb_hand_gazebo_hardware_interface.cpp b/qbhand-ros/qb_hand_gazebo/src/qb_hand_gazebo_hardware_interface.cpp
deleted file mode 100644
index a0c1c71..0000000
--- a/qbhand-ros/qb_hand_gazebo/src/qb_hand_gazebo_hardware_interface.cpp
+++ /dev/null
@@ -1,97 +0,0 @@
-/***
- * Software License Agreement: BSD 3-Clause License
- *
- * Copyright (c) 2016-2018, qbrobotics®
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
- * following conditions are met:
- *
- * * Redistributions of source code must retain the above copyright notice, this list of conditions and the
- * following disclaimer.
- *
- * * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
- * following disclaimer in the documentation and/or other materials provided with the distribution.
- *
- * * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
- * products derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
- * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
- * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
- * USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include
-
-using namespace qb_hand_gazebo_hardware_interface;
-
-bool qbHandHWSim::initSim(const std::string &robot_namespace, ros::NodeHandle model_nh, gazebo::physics::ModelPtr parent_model, const urdf::Model *const urdf_model, std::vector transmissions) {
- model_nh_ = ros::NodeHandle(robot_namespace);
- urdf_model_ = *urdf_model;
-
- std::vector joint_names;
- for (auto const &transmission : transmissions) {
- if (!startsWith(transmission.name_, trailNamespace(model_nh.getNamespace()))) {
- continue; // select only joints of the specific qb SoftHand
- }
-
- ROS_INFO_STREAM_NAMED("qb_hand_gazebo_hardware_interface","Initializing qbHandHWSim of '" << model_nh.getNamespace() << "'...");
- for (auto const &joint : transmission.joints_) {
- gazebo::physics::JointPtr sim_joint = parent_model->GetJoint(joint.name_);
- if (!sim_joint) {
- ROS_ERROR_STREAM_NAMED("qb_hand_gazebo_hardware_interface","This robot has a joint named '" << joint.name_ << "' which is not in the gazebo model.");
- return false;
- }
- sim_joints_.push_back(sim_joint);
- joint_names.push_back(joint.name_);
- ROS_INFO_STREAM_NAMED("qb_hand_gazebo_hardware_interface"," * Added joint '" << joint.name_ << "'.");
- }
- }
- joints_.setJoints(joint_names);
-
- //TODO: add a check on the number on transmission and check if multiple qb SoftHands can be spawned
- if (joints_.names.size() != 34 && joints_.names.size() != 35) { // considering also the virtual transmissions (34 joints for SoftHand and 35 joints for SoftHand 2 Motors)
- ROS_ERROR_STREAM_NAMED("qb_hand_gazebo_hardware_interface","Wrong number of joints [" << joints_.names.size() << "]");
- return false;
- }
-
- interfaces_.initialize(this, joints_);
- joint_limits_.initialize(model_nh_, joints_, urdf_model_, interfaces_.joint_position);
- return true;
-}
-
-void qbHandHWSim::readSim(ros::Time time, ros::Duration period) {
- for (int i=0; i= 8
- double position = sim_joints_.at(i)->Position(0);
-#else
- double position = sim_joints_.at(i)->GetAngle(0).Radian();
-#endif
- // read joints data from Gazebo
- joints_.positions.at(i) += angles::shortest_angular_distance(joints_.positions.at(i), position);
- joints_.velocities.at(i) = sim_joints_.at(i)->GetVelocity(0);
- joints_.efforts.at(i) = sim_joints_.at(i)->GetForce(0);
- }
-}
-
-void qbHandHWSim::writeSim(ros::Time time, ros::Duration period) {
- // enforce joint limits for all registered interfaces
- joint_limits_.enforceLimits(period);
-
- // send joint commands to Gazebo
- if (joints_.names.size() == 34) { // SoftHand
- for (int i = 0; i < sim_joints_.size(); i++) {
- sim_joints_.at(i)->SetForce(0, 0); //FIXME: use real dynamics
- }
- } else { // SoftHand 2 Motors
- for (int i = 0; i < sim_joints_.size(); i++) {
- sim_joints_.at(i)->SetForce(0, 0); //FIXME: use real dynamics
- }
- }
-}
-
-PLUGINLIB_EXPORT_CLASS(qb_hand_gazebo_hardware_interface::qbHandHWSim, gazebo_ros_control::RobotHWSim);
\ No newline at end of file
diff --git a/qbhand-ros/qb_hand_gazebo/src/qb_hand_gazebo_plugin.cpp b/qbhand-ros/qb_hand_gazebo/src/qb_hand_gazebo_plugin.cpp
deleted file mode 100644
index a667131..0000000
--- a/qbhand-ros/qb_hand_gazebo/src/qb_hand_gazebo_plugin.cpp
+++ /dev/null
@@ -1,122 +0,0 @@
-/***
- * Software License Agreement: BSD 3-Clause License
- *
- * Copyright (c) 2016-2018, qbrobotics®
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
- * following conditions are met:
- *
- * * Redistributions of source code must retain the above copyright notice, this list of conditions and the
- * following disclaimer.
- *
- * * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
- * following disclaimer in the documentation and/or other materials provided with the distribution.
- *
- * * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
- * products derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
- * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
- * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
- * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
- * USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include
-
-using namespace qb_hand_gazebo_plugin;
-
-qbHandGazeboPlugin::~qbHandGazeboPlugin() {
- update_connection_.reset();
-}
-
-void qbHandGazeboPlugin::Load(gazebo::physics::ModelPtr parent, sdf::ElementPtr sdf) {
- ROS_INFO_STREAM_NAMED("qb_hand_gazebo_plugin","Loading qb SoftHand Gazebo plugin...");
- parent_model_ = parent;
- sdf_ = sdf;
-
- if (!parent_model_) {
- ROS_ERROR_STREAM_NAMED("qb_hand_gazebo_plugin","Parent model is null.");
- return;
- }
- if(!ros::isInitialized()) {
- ROS_FATAL_STREAM_NAMED("qb_hand_gazebo_plugin","Unable to load qb SoftHand Gazebo plugin: a ROS node for Gazebo has not been initialized yet.");
- return;
- }
-
-#if GAZEBO_MAJOR_VERSION >= 8
- ros::Duration gazebo_period(parent_model_->GetWorld()->Physics()->GetMaxStepSize());
-#else
- ros::Duration gazebo_period(parent_model_->GetWorld()->GetPhysicsEngine()->GetMaxStepSize());
-#endif
- model_nh_ = ros::NodeHandle(sdf_->HasElement("robotName") ? sdf_->Get("robotName") : parent_model_->GetName());
- model_nh_control_ = ros::NodeHandle(model_nh_, "control");
- robot_description_ = sdf_->HasElement("robotDescription") ? sdf_->Get("robotDescription") : "robot_description";
- control_period_ = sdf_->HasElement("controlPeriod") ? ros::Duration(sdf_->Get("controlPeriod")) : gazebo_period;
- if (control_period_ < gazebo_period) {
- ROS_WARN_STREAM_NAMED("qb_hand_gazebo_plugin","Desired controller update period (" << control_period_ << " s) is faster than the Gazebo simulation period (" << gazebo_period << " s).");
- }
- ROS_INFO_STREAM_NAMED("qb_hand_gazebo_plugin", "Starting qb SoftHand Gazebo plugin in namespace: " << model_nh_.getNamespace());
-
- const std::string urdf_string = getURDF(robot_description_);
- if (!parseTransmissionsFromURDF(urdf_string)) {
- ROS_ERROR_STREAM_NAMED("qb_hand_gazebo_plugin", "Error while parsing transmissions in the URDF for the qb SoftHand Gazebo plugin.");
- return;
- }
- urdf::Model urdf_model;
- const urdf::Model *const urdf_model_ptr = urdf_model.initString(urdf_string) ? &urdf_model : nullptr;
- if (!urdf_model_ptr){
- ROS_FATAL_STREAM_NAMED("qb_hand_gazebo_plugin", "Error while initializing the URDF pointer.");
- return;
- }
- robot_hw_sim_ = std::make_shared();
- if (!robot_hw_sim_->initSim(model_nh_.getNamespace(), model_nh_, parent_model_, urdf_model_ptr, transmissions_)) {
- ROS_FATAL_STREAM_NAMED("qb_hand_gazebo_plugin", "Error while initializing the robot simulation interface");
- return;
- }
-
- controller_manager_.reset(new controller_manager::ControllerManager(robot_hw_sim_.get(), model_nh_control_));
- update_connection_ = gazebo::event::Events::ConnectWorldUpdateBegin(std::bind(&qbHandGazeboPlugin::Update, this, std::placeholders::_1));
- ROS_INFO_NAMED("qb_hand_gazebo_plugin", "qb SoftHand Gazebo plugin successfully loaded.");
-}
-
-void qbHandGazeboPlugin::Update(const gazebo::common::UpdateInfo &info) {
- ros::Time sim_time_ros(info.simTime.sec, info.simTime.nsec);
- ros::Duration sim_period_ros = sim_time_ros - last_sim_time_ros_;
- if (sim_period_ros < control_period_) {
- return;
- }
-
- robot_hw_sim_->readSim(sim_time_ros, sim_period_ros);
- controller_manager_->update(sim_time_ros, sim_period_ros);
- robot_hw_sim_->writeSim(sim_time_ros, sim_period_ros);
- last_sim_time_ros_ = sim_time_ros;
-}
-
-std::string qbHandGazeboPlugin::getURDF(const std::string ¶m_name) {
- std::string urdf_string;
-
- while (urdf_string.empty()) {
- std::string search_param_name;
- if (model_nh_.searchParam(param_name, search_param_name)) {
- ROS_INFO_STREAM_ONCE_NAMED("qb_hand_gazebo_plugin", "qb SoftHand Gazebo plugin is waiting for model URDF in parameter [" << search_param_name << "] on the ROS param server.");
- model_nh_.getParam(search_param_name, urdf_string);
- } else {
- ROS_INFO_STREAM_ONCE_NAMED("qb_hand_gazebo_plugin", "qb SoftHand Gazebo plugin is waiting for model URDF in parameter [" << robot_description_ << "] on the ROS param server.");
- model_nh_.getParam(param_name, urdf_string);
- }
-
- usleep(100000);
- }
- ROS_INFO_STREAM_NAMED("qb_hand_gazebo_plugin", "Received URDF from param server, parsing...");
- return urdf_string;
-}
-
-bool qbHandGazeboPlugin::parseTransmissionsFromURDF(const std::string &urdf_string) {
- return transmission_interface::TransmissionParser::parse(urdf_string, transmissions_);
-}
-
-GZ_REGISTER_MODEL_PLUGIN(qbHandGazeboPlugin);
\ No newline at end of file
diff --git a/realsense-ros/realsense2_camera/launch/demo_pointcloud.launch b/realsense-ros/realsense2_camera/launch/demo_pointcloud.launch
index 18084c9..10c6fe0 100644
--- a/realsense-ros/realsense2_camera/launch/demo_pointcloud.launch
+++ b/realsense-ros/realsense2_camera/launch/demo_pointcloud.launch
@@ -1,6 +1,6 @@
-
+
diff --git a/realsense-ros/realsense2_camera/launch/rs_camera.launch b/realsense-ros/realsense2_camera/launch/rs_camera.launch
index cb927b6..48a3fbb 100644
--- a/realsense-ros/realsense2_camera/launch/rs_camera.launch
+++ b/realsense-ros/realsense2_camera/launch/rs_camera.launch
@@ -2,7 +2,7 @@
-
+
@@ -19,11 +19,11 @@
-
-
+
+
-
+
@@ -43,11 +43,11 @@
-
+
-
+
@@ -64,9 +64,9 @@
-
+
-
+ true
diff --git a/yolov7-ros/CMakeLists.txt b/yolov7-ros/CMakeLists.txt
new file mode 100644
index 0000000..e914c71
--- /dev/null
+++ b/yolov7-ros/CMakeLists.txt
@@ -0,0 +1,213 @@
+cmake_minimum_required(VERSION 3.0.2)
+project(yolov7_ros)
+
+## Compile as C++11, supported in ROS Kinetic and newer
+# add_compile_options(-std=c++11)
+
+## Find catkin macros and libraries
+## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
+## is used, also find other catkin packages
+find_package(catkin REQUIRED COMPONENTS
+ geometry_msgs
+ roscpp
+ rospy
+ sensor_msgs
+ std_msgs
+ vision_msgs
+ message_generation
+)
+
+## System dependencies are found with CMake's conventions
+# find_package(Boost REQUIRED COMPONENTS system)
+
+
+## Uncomment this if the package has a setup.py. This macro ensures
+## modules and global scripts declared therein get installed
+## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
+# catkin_python_setup()
+
+################################################
+## Declare ROS messages, services and actions ##
+################################################
+
+## To declare and build messages, services or actions from within this
+## package, follow these steps:
+## * Let MSG_DEP_SET be the set of packages whose message types you use in
+## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
+## * In the file package.xml:
+## * add a build_depend tag for "message_generation"
+## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET
+## * If MSG_DEP_SET isn't empty the following dependency has been pulled in
+## but can be declared for certainty nonetheless:
+## * add a exec_depend tag for "message_runtime"
+## * In this file (CMakeLists.txt):
+## * add "message_generation" and every package in MSG_DEP_SET to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * add "message_runtime" and every package in MSG_DEP_SET to
+## catkin_package(CATKIN_DEPENDS ...)
+## * uncomment the add_*_files sections below as needed
+## and list every .msg/.srv/.action file to be processed
+## * uncomment the generate_messages entry below
+## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
+
+## Generate messages in the 'msg' folder
+#add_message_files(
+# FILES
+#)
+
+## Generate services in the 'srv' folder
+# add_service_files(
+# FILES
+# Service1.srv
+# Service2.srv
+# )
+
+## Generate actions in the 'action' folder
+# add_action_files(
+# FILES
+# Action1.action
+# Action2.action
+# )
+
+## Generate added messages and services with any dependencies listed here
+generate_messages(
+ DEPENDENCIES
+ geometry_msgs
+ sensor_msgs
+ std_msgs
+ vision_msgs
+ yolov7_ros
+)
+
+################################################
+## Declare ROS dynamic reconfigure parameters ##
+################################################
+
+## To declare and build dynamic reconfigure parameters within this
+## package, follow these steps:
+## * In the file package.xml:
+## * add a build_depend and a exec_depend tag for "dynamic_reconfigure"
+## * In this file (CMakeLists.txt):
+## * add "dynamic_reconfigure" to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * uncomment the "generate_dynamic_reconfigure_options" section below
+## and list every .cfg file to be processed
+
+## Generate dynamic reconfigure parameters in the 'cfg' folder
+# generate_dynamic_reconfigure_options(
+# cfg/DynReconf1.cfg
+# cfg/DynReconf2.cfg
+# )
+
+###################################
+## catkin specific configuration ##
+###################################
+## The catkin_package macro generates cmake config files for your package
+## Declare things to be passed to dependent projects
+## INCLUDE_DIRS: uncomment this if your package contains header files
+## LIBRARIES: libraries you create in this project that dependent projects also need
+## CATKIN_DEPENDS: catkin_packages dependent projects also need
+## DEPENDS: system dependencies of this project that dependent projects also need
+catkin_package(
+ CATKIN_DEPENDS message_runtime
+)
+# INCLUDE_DIRS include
+# LIBRARIES yolov7_ros
+# CATKIN_DEPENDS geometry_msgs roscpp rospy sensor_msgs std_msgs vision_msgs
+# DEPENDS system_lib
+
+###########
+## Build ##
+###########
+
+## Specify additional locations of header files
+## Your package locations should be listed before other locations
+include_directories(
+# include
+ ${catkin_INCLUDE_DIRS}
+)
+
+## Declare a C++ library
+# add_library(${PROJECT_NAME}
+# src/${PROJECT_NAME}/yolov7_ros.cpp
+# )
+
+## Add cmake target dependencies of the library
+## as an example, code may need to be generated before libraries
+## either from message generation or dynamic reconfigure
+# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Declare a C++ executable
+## With catkin_make all packages are built within a single CMake context
+## The recommended prefix ensures that target names across packages don't collide
+# add_executable(${PROJECT_NAME}_node src/yolov7_ros_node.cpp)
+
+## Rename C++ executable without prefix
+## The above recommended prefix causes long target names, the following renames the
+## target back to the shorter version for ease of user use
+## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
+# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
+
+## Add cmake target dependencies of the executable
+## same as for the library above
+# add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Specify libraries to link a library or executable target against
+# target_link_libraries(${PROJECT_NAME}_node
+# ${catkin_LIBRARIES}
+# )
+
+#############
+## Install ##
+#############
+
+# all install targets should use catkin DESTINATION variables
+# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
+
+## Mark executable scripts (Python etc.) for installation
+## in contrast to setup.py, you can choose the destination
+# catkin_install_python(PROGRAMS
+# scripts/my_python_script
+# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark executables for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html
+# install(TARGETS ${PROJECT_NAME}_node
+# RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark libraries for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html
+# install(TARGETS ${PROJECT_NAME}
+# ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION}
+# )
+
+## Mark cpp header files for installation
+# install(DIRECTORY include/${PROJECT_NAME}/
+# DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
+# FILES_MATCHING PATTERN "*.h"
+# PATTERN ".svn" EXCLUDE
+# )
+
+## Mark other files for installation (e.g. launch and bag files, etc.)
+# install(FILES
+# # myfile1
+# # myfile2
+# DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+# )
+
+#############
+## Testing ##
+#############
+
+## Add gtest based cpp test target and link libraries
+# catkin_add_gtest(${PROJECT_NAME}-test test/test_yolov7_ros.cpp)
+# if(TARGET ${PROJECT_NAME}-test)
+# target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
+# endif()
+
+## Add folders to be run by python nosetests
+# catkin_add_nosetests(test)
diff --git a/air_moveit_config/scripts/yolov7/LICENSE.md b/yolov7-ros/LICENSE
similarity index 100%
rename from air_moveit_config/scripts/yolov7/LICENSE.md
rename to yolov7-ros/LICENSE
diff --git a/yolov7-ros/README.md b/yolov7-ros/README.md
new file mode 100644
index 0000000..9897dcf
--- /dev/null
+++ b/yolov7-ros/README.md
@@ -0,0 +1,60 @@
+# ROS package for official YOLOv7
+
+![YOLOv7 Berkeley DeepDrive](berkeley_example.png)
+
+This repo contains a ROS noetic package for the official YOLOv7. It wraps the
+[official implementation](https://github.com/WongKinYiu/yolov7) into a ROS node (so most credit
+goes to the YOLOv7 creators).
+
+### Note
+There are currently two YOLOv7 variants out there. This repo contains the
+implementation from the paper [YOLOv7: Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors](https://arxiv.org/abs/2207.02696).
+
+## Requirements & Getting Started
+
+Following ROS packages are required:
+- [vision_msgs](http://wiki.ros.org/vision_msgs)
+- [geometry_msgs](http://wiki.ros.org/geometry_msgs)
+
+First, clone the repo into your catkin workspace and build the package:
+```
+git clone https://github.com/lukazso/yolov7-ros.git ~/catkin_ws/src/
+cd ~/catkin_ws
+catkin build yolov7_ros
+```
+
+The Python requirements are listed in the `requirements.txt`. You can simply
+install them as
+```
+pip install -r requirements.txt
+```
+
+Download the YOLOv7 weights from the [official repository](https://github.com/WongKinYiu/yolov7).
+
+**Berkeley DeepDrive weights:** I trained YoloV7 with a basic hyperparameter set (no special hyperparameter optimization) on the [Berkeley DeepDrive dataset](https://bdd-data.berkeley.edu/). You can download the weights [here](https://drive.google.com/drive/folders/1OfC1dQx2db0dmmQA15_WScUptbYcfsZ8?usp=sharing).
+
+The package has been tested under Ubuntu 20.04 and Python 3.8.10.
+
+## Usage
+Before you launch the node, adjust the parameters in the
+[launch file](launch/yolov7.launch). For example, you need to set the path to your
+YOLOv7 weights and the image topic to which this node should listen to. The launch
+file also contains a description for each parameter.
+
+```
+roslaunch yolov7_ros yolov7.launch
+```
+
+Each time a new image is received it is then fed into YOLOv7.
+
+## Visualization
+You can visualize the yolo results if you set the `visualize` flag in the [launch file](launch/yolov7.launch). Also, with the `classes_path` parameter you can provide a `.txt` file with the class labels. An example file is provided in [berkeley.txt](class_labels/berkeley.txt) or [coco.txt](class_labels/coco.txt).
+
+### Notes
+- The detections are published using the [vision_msgs/Detection2DArray](http://docs.ros.org/en/api/vision_msgs/html/msg/Detection2DArray.html) message type.
+- The detections will be published under `/yolov7/out_topic`.
+- If you set the `visualize` parameter to `true`, the detections will be drawn into
+ the image, which is then published under `/yolov7/out_topic/visualization`.
+
+## Coming Soon
+- ROS2 implementation
diff --git a/yolov7-ros/class_labels/berkeley.txt b/yolov7-ros/class_labels/berkeley.txt
new file mode 100644
index 0000000..1c8463b
--- /dev/null
+++ b/yolov7-ros/class_labels/berkeley.txt
@@ -0,0 +1,8 @@
+car
+person
+mouse
+bottle
+vase
+keyboard
+chair
+couch
\ No newline at end of file
diff --git a/yolov7-ros/class_labels/coco.txt b/yolov7-ros/class_labels/coco.txt
new file mode 100644
index 0000000..1f42c8e
--- /dev/null
+++ b/yolov7-ros/class_labels/coco.txt
@@ -0,0 +1,80 @@
+person
+bicycle
+car
+motorcycle
+airplane
+bus
+train
+truck
+boat
+traffic light
+fire hydrant
+stop sign
+parking meter
+bench
+bird
+cat
+dog
+horse
+sheep
+cow
+elephant
+bear
+zebra
+giraffe
+backpack
+umbrella
+handbag
+tie
+suitcase
+frisbee
+skis
+snowboard
+sports ball
+kite
+baseball bat
+baseball glove
+skateboard
+surfboard
+tennis racket
+bottle
+wine glass
+cup
+fork
+knife
+spoon
+bowl
+banana
+apple
+sandwich
+orange
+broccoli
+carrot
+hot dog
+pizza
+donut
+cake
+chair
+couch
+potted plant
+bed
+dining table
+toilet
+tv
+laptop
+mouse
+remote
+keyboard
+cell phone
+microwave
+oven
+toaster
+sink
+refrigerator
+book
+clock
+vase
+scissors
+teddy bear
+hair drier
+toothbrush
\ No newline at end of file
diff --git a/yolov7-ros/launch/yolov7.launch b/yolov7-ros/launch/yolov7.launch
new file mode 100644
index 0000000..dc0fe7a
--- /dev/null
+++ b/yolov7-ros/launch/yolov7.launch
@@ -0,0 +1,27 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/yolov7-ros/package.xml b/yolov7-ros/package.xml
new file mode 100644
index 0000000..1fa91ca
--- /dev/null
+++ b/yolov7-ros/package.xml
@@ -0,0 +1,77 @@
+
+
+ yolov7_ros
+ 0.0.0
+ The yolov7_ros package
+
+
+
+
+ ros
+
+
+
+
+
+ TODO
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ message_generation
+
+
+
+
+
+ message_runtime
+
+
+
+
+ catkin
+ geometry_msgs
+ roscpp
+ rospy
+ sensor_msgs
+ std_msgs
+ vision_msgs
+ geometry_msgs
+ roscpp
+ rospy
+ sensor_msgs
+ std_msgs
+ vision_msgs
+ geometry_msgs
+ roscpp
+ rospy
+ sensor_msgs
+ std_msgs
+ vision_msgs
+
+
+
+
+
+
+
+
diff --git a/yolov7-ros/requirements.txt b/yolov7-ros/requirements.txt
new file mode 100644
index 0000000..cf2104e
--- /dev/null
+++ b/yolov7-ros/requirements.txt
@@ -0,0 +1,8 @@
+torch>=1.10.0
+torchvision
+opencv-python
+seaborn
+thop
+requests
+tqdm
+pyyaml
diff --git a/yolov7-ros/src/__init__.py b/yolov7-ros/src/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/air_moveit_config/scripts/yolov7/cfg/baseline/r50-csp.yaml b/yolov7-ros/src/cfg/baseline/r50-csp.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/baseline/r50-csp.yaml
rename to yolov7-ros/src/cfg/baseline/r50-csp.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/baseline/x50-csp.yaml b/yolov7-ros/src/cfg/baseline/x50-csp.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/baseline/x50-csp.yaml
rename to yolov7-ros/src/cfg/baseline/x50-csp.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/baseline/yolor-csp-x.yaml b/yolov7-ros/src/cfg/baseline/yolor-csp-x.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/baseline/yolor-csp-x.yaml
rename to yolov7-ros/src/cfg/baseline/yolor-csp-x.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/baseline/yolor-csp.yaml b/yolov7-ros/src/cfg/baseline/yolor-csp.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/baseline/yolor-csp.yaml
rename to yolov7-ros/src/cfg/baseline/yolor-csp.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/baseline/yolor-d6.yaml b/yolov7-ros/src/cfg/baseline/yolor-d6.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/baseline/yolor-d6.yaml
rename to yolov7-ros/src/cfg/baseline/yolor-d6.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/baseline/yolor-e6.yaml b/yolov7-ros/src/cfg/baseline/yolor-e6.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/baseline/yolor-e6.yaml
rename to yolov7-ros/src/cfg/baseline/yolor-e6.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/baseline/yolor-p6.yaml b/yolov7-ros/src/cfg/baseline/yolor-p6.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/baseline/yolor-p6.yaml
rename to yolov7-ros/src/cfg/baseline/yolor-p6.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/baseline/yolor-w6.yaml b/yolov7-ros/src/cfg/baseline/yolor-w6.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/baseline/yolor-w6.yaml
rename to yolov7-ros/src/cfg/baseline/yolor-w6.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/baseline/yolov3-spp.yaml b/yolov7-ros/src/cfg/baseline/yolov3-spp.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/baseline/yolov3-spp.yaml
rename to yolov7-ros/src/cfg/baseline/yolov3-spp.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/baseline/yolov3.yaml b/yolov7-ros/src/cfg/baseline/yolov3.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/baseline/yolov3.yaml
rename to yolov7-ros/src/cfg/baseline/yolov3.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/baseline/yolov4-csp.yaml b/yolov7-ros/src/cfg/baseline/yolov4-csp.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/baseline/yolov4-csp.yaml
rename to yolov7-ros/src/cfg/baseline/yolov4-csp.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/deploy/yolov7-d6.yaml b/yolov7-ros/src/cfg/deploy/yolov7-d6.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/deploy/yolov7-d6.yaml
rename to yolov7-ros/src/cfg/deploy/yolov7-d6.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/deploy/yolov7-e6.yaml b/yolov7-ros/src/cfg/deploy/yolov7-e6.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/deploy/yolov7-e6.yaml
rename to yolov7-ros/src/cfg/deploy/yolov7-e6.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/deploy/yolov7-e6e.yaml b/yolov7-ros/src/cfg/deploy/yolov7-e6e.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/deploy/yolov7-e6e.yaml
rename to yolov7-ros/src/cfg/deploy/yolov7-e6e.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/deploy/yolov7-tiny-silu.yaml b/yolov7-ros/src/cfg/deploy/yolov7-tiny-silu.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/deploy/yolov7-tiny-silu.yaml
rename to yolov7-ros/src/cfg/deploy/yolov7-tiny-silu.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/deploy/yolov7-tiny.yaml b/yolov7-ros/src/cfg/deploy/yolov7-tiny.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/deploy/yolov7-tiny.yaml
rename to yolov7-ros/src/cfg/deploy/yolov7-tiny.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/deploy/yolov7-w6.yaml b/yolov7-ros/src/cfg/deploy/yolov7-w6.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/deploy/yolov7-w6.yaml
rename to yolov7-ros/src/cfg/deploy/yolov7-w6.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/deploy/yolov7.yaml b/yolov7-ros/src/cfg/deploy/yolov7.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/deploy/yolov7.yaml
rename to yolov7-ros/src/cfg/deploy/yolov7.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/deploy/yolov7x.yaml b/yolov7-ros/src/cfg/deploy/yolov7x.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/deploy/yolov7x.yaml
rename to yolov7-ros/src/cfg/deploy/yolov7x.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/training/yolov7-d6.yaml b/yolov7-ros/src/cfg/training/yolov7-d6.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/training/yolov7-d6.yaml
rename to yolov7-ros/src/cfg/training/yolov7-d6.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/training/yolov7-e6.yaml b/yolov7-ros/src/cfg/training/yolov7-e6.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/training/yolov7-e6.yaml
rename to yolov7-ros/src/cfg/training/yolov7-e6.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/training/yolov7-e6e.yaml b/yolov7-ros/src/cfg/training/yolov7-e6e.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/training/yolov7-e6e.yaml
rename to yolov7-ros/src/cfg/training/yolov7-e6e.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/training/yolov7-tiny.yaml b/yolov7-ros/src/cfg/training/yolov7-tiny.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/training/yolov7-tiny.yaml
rename to yolov7-ros/src/cfg/training/yolov7-tiny.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/training/yolov7-w6.yaml b/yolov7-ros/src/cfg/training/yolov7-w6.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/training/yolov7-w6.yaml
rename to yolov7-ros/src/cfg/training/yolov7-w6.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/training/yolov7.yaml b/yolov7-ros/src/cfg/training/yolov7.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/training/yolov7.yaml
rename to yolov7-ros/src/cfg/training/yolov7.yaml
diff --git a/air_moveit_config/scripts/yolov7/cfg/training/yolov7x.yaml b/yolov7-ros/src/cfg/training/yolov7x.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/cfg/training/yolov7x.yaml
rename to yolov7-ros/src/cfg/training/yolov7x.yaml
diff --git a/air_moveit_config/scripts/yolov7/data/coco.yaml b/yolov7-ros/src/data/coco.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/data/coco.yaml
rename to yolov7-ros/src/data/coco.yaml
diff --git a/air_moveit_config/scripts/yolov7/data/hyp.scratch.custom.yaml b/yolov7-ros/src/data/hyp.scratch.custom.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/data/hyp.scratch.custom.yaml
rename to yolov7-ros/src/data/hyp.scratch.custom.yaml
diff --git a/air_moveit_config/scripts/yolov7/data/hyp.scratch.p5.yaml b/yolov7-ros/src/data/hyp.scratch.p5.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/data/hyp.scratch.p5.yaml
rename to yolov7-ros/src/data/hyp.scratch.p5.yaml
diff --git a/air_moveit_config/scripts/yolov7/data/hyp.scratch.p6.yaml b/yolov7-ros/src/data/hyp.scratch.p6.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/data/hyp.scratch.p6.yaml
rename to yolov7-ros/src/data/hyp.scratch.p6.yaml
diff --git a/air_moveit_config/scripts/yolov7/data/hyp.scratch.tiny.yaml b/yolov7-ros/src/data/hyp.scratch.tiny.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/data/hyp.scratch.tiny.yaml
rename to yolov7-ros/src/data/hyp.scratch.tiny.yaml
diff --git a/air_moveit_config/scripts/yolov7/deploy/triton-inference-server/README.md b/yolov7-ros/src/deploy/triton-inference-server/README.md
similarity index 98%
rename from air_moveit_config/scripts/yolov7/deploy/triton-inference-server/README.md
rename to yolov7-ros/src/deploy/triton-inference-server/README.md
index 13af4da..22b01e3 100644
--- a/air_moveit_config/scripts/yolov7/deploy/triton-inference-server/README.md
+++ b/yolov7-ros/src/deploy/triton-inference-server/README.md
@@ -11,9 +11,6 @@ There are no additional dependencies needed to run this deployment, except a wor
See https://github.com/WongKinYiu/yolov7#export for more info.
```bash
-#install onnx-simplifier not listed in general yolov7 requirements.txt
-pip3 install onnx-simplifier
-
# Pytorch Yolov7 -> ONNX with grid, EfficientNMS plugin and dynamic batch size
python export.py --weights ./yolov7.pt --grid --end2end --dynamic-batch --simplify --topk-all 100 --iou-thres 0.65 --conf-thres 0.35 --img-size 640 640
# ONNX -> TensorRT with trtexec and docker
diff --git a/air_moveit_config/scripts/yolov7/deploy/triton-inference-server/boundingbox.py b/yolov7-ros/src/deploy/triton-inference-server/boundingbox.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/deploy/triton-inference-server/boundingbox.py
rename to yolov7-ros/src/deploy/triton-inference-server/boundingbox.py
diff --git a/air_moveit_config/scripts/yolov7/deploy/triton-inference-server/client.py b/yolov7-ros/src/deploy/triton-inference-server/client.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/deploy/triton-inference-server/client.py
rename to yolov7-ros/src/deploy/triton-inference-server/client.py
diff --git a/air_moveit_config/scripts/yolov7/deploy/triton-inference-server/labels.py b/yolov7-ros/src/deploy/triton-inference-server/labels.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/deploy/triton-inference-server/labels.py
rename to yolov7-ros/src/deploy/triton-inference-server/labels.py
diff --git a/air_moveit_config/scripts/yolov7/deploy/triton-inference-server/processing.py b/yolov7-ros/src/deploy/triton-inference-server/processing.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/deploy/triton-inference-server/processing.py
rename to yolov7-ros/src/deploy/triton-inference-server/processing.py
diff --git a/air_moveit_config/scripts/yolov7/deploy/triton-inference-server/render.py b/yolov7-ros/src/deploy/triton-inference-server/render.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/deploy/triton-inference-server/render.py
rename to yolov7-ros/src/deploy/triton-inference-server/render.py
diff --git a/air_moveit_config/scripts/yolov7/detect.py b/yolov7-ros/src/detect.py
old mode 100755
new mode 100644
similarity index 89%
rename from air_moveit_config/scripts/yolov7/detect.py
rename to yolov7-ros/src/detect.py
index 179c3ca..5648a91
--- a/air_moveit_config/scripts/yolov7/detect.py
+++ b/yolov7-ros/src/detect.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
import argparse
import time
from pathlib import Path
@@ -14,10 +13,6 @@
scale_coords, xyxy2xywh, strip_optimizer, set_logging, increment_path
from utils.plots import plot_one_box
from utils.torch_utils import select_device, load_classifier, time_synchronized, TracedModel
-import rospy
-from std_msgs.msg import Int32MultiArray
-
-
def detect(save_img=False):
@@ -26,7 +21,6 @@ def detect(save_img=False):
webcam = source.isnumeric() or source.endswith('.txt') or source.lower().startswith(
('rtsp://', 'rtmp://', 'http://', 'https://'))
-
# Directories
save_dir = Path(increment_path(Path(opt.project) / opt.name, exist_ok=opt.exist_ok)) # increment run
(save_dir / 'labels' if save_txt else save_dir).mkdir(parents=True, exist_ok=True) # make dir
@@ -90,8 +84,7 @@ def detect(save_img=False):
# Inference
t1 = time_synchronized()
- with torch.no_grad(): # Calculating gradients would cause a GPU memory leak
- pred = model(img, augment=opt.augment)[0]
+ pred = model(img, augment=opt.augment)[0]
t2 = time_synchronized()
# Apply NMS
@@ -132,20 +125,11 @@ def detect(save_img=False):
if save_img or view_img: # Add bbox to image
label = f'{names[int(cls)]} {conf:.2f}'
- plot_one_box(xyxy, im0, label=label, color=colors[int(cls)], line_thickness=2)
+ plot_one_box(xyxy, im0, label=label, color=colors[int(cls)], line_thickness=1)
# Print time (inference + NMS)
print(f'{s}Done. ({(1E3 * (t2 - t1)):.1f}ms) Inference, ({(1E3 * (t3 - t2)):.1f}ms) NMS')
- x1 = int(xyxy[0].item())
- y1 = int(xyxy[1].item())
- x2 = int(xyxy[2].item())
- y2 = int(xyxy[3].item())
- bbox_points=Int32MultiArray()
- bbox_points.data=[x1, y1, x2, y2]
- pub.publish(bbox_points)
- #rospy.spinOnce()
- #rospy.sleep()
- print(bbox_points)
+
# Stream results
if view_img:
cv2.imshow(str(p), im0)
@@ -176,17 +160,16 @@ def detect(save_img=False):
#print(f"Results saved to {save_dir}{s}")
print(f'Done. ({time.time() - t0:.3f}s)')
- return xyxy
if __name__ == '__main__':
parser = argparse.ArgumentParser()
- parser.add_argument('--weights', nargs='+', type=str, default='/home/cenk/catkin_ws/src/air_moveit_config/scripts/yolov7/yolov7.pt', help='model.pt path(s)')
- parser.add_argument('--source', type=str, default='0', help='source') # file/folder, 0 for webcam
+ parser.add_argument('--weights', nargs='+', type=str, default='yolov7.pt', help='model.pt path(s)')
+ parser.add_argument('--source', type=str, default='inference/images', help='source') # file/folder, 0 for webcam
parser.add_argument('--img-size', type=int, default=640, help='inference size (pixels)')
parser.add_argument('--conf-thres', type=float, default=0.25, help='object confidence threshold')
parser.add_argument('--iou-thres', type=float, default=0.45, help='IOU threshold for NMS')
- parser.add_argument('--device', default='cpu', help='cuda device, i.e. 0 or 0,1,2,3 or cpu')
+ parser.add_argument('--device', default='', help='cuda device, i.e. 0 or 0,1,2,3 or cpu')
parser.add_argument('--view-img', action='store_true', help='display results')
parser.add_argument('--save-txt', action='store_true', help='save results to *.txt')
parser.add_argument('--save-conf', action='store_true', help='save confidences in --save-txt labels')
@@ -202,8 +185,7 @@ def detect(save_img=False):
opt = parser.parse_args()
print(opt)
#check_requirements(exclude=('pycocotools', 'thop'))
- pub=rospy.Publisher('inference_vals',Int32MultiArray,queue_size=10)
- rospy.init_node('yolov7_Publisher')
+
with torch.no_grad():
if opt.update: # update all models (to fix SourceChangeWarning)
for opt.weights in ['yolov7.pt']:
diff --git a/yolov7-ros/src/detect_ros.py b/yolov7-ros/src/detect_ros.py
new file mode 100755
index 0000000..5fca31e
--- /dev/null
+++ b/yolov7-ros/src/detect_ros.py
@@ -0,0 +1,214 @@
+#!/usr/bin/python3
+
+from models.experimental import attempt_load
+from utils.general import non_max_suppression
+from utils.ros import create_detection_msg
+from visualizer import draw_detections
+
+import os
+from typing import Tuple, Union, List
+
+import torch
+import cv2
+from torchvision.transforms import ToTensor
+import numpy as np
+import rospy
+
+from vision_msgs.msg import Detection2DArray, Detection2D, BoundingBox2D
+from sensor_msgs.msg import Image
+from cv_bridge import CvBridge
+
+
+def parse_classes_file(path):
+ classes = []
+ with open(path, "r") as f:
+ for line in f:
+ line = line.replace("\n", "")
+ classes.append(line)
+ return classes
+
+
+def rescale(ori_shape: Tuple[int, int], boxes: Union[torch.Tensor, np.ndarray],
+ target_shape: Tuple[int, int]):
+ """Rescale the output to the original image shape
+ :param ori_shape: original width and height [width, height].
+ :param boxes: original bounding boxes as a torch.Tensor or np.array or shape
+ [num_boxes, >=4], where the first 4 entries of each element have to be
+ [x1, y1, x2, y2].
+ :param target_shape: target width and height [width, height].
+ """
+ xscale = target_shape[1] / ori_shape[1]
+ yscale = target_shape[0] / ori_shape[0]
+
+ boxes[:, [0, 2]] *= xscale
+ boxes[:, [1, 3]] *= yscale
+
+ return boxes
+
+
+class YoloV7:
+ def __init__(self, weights, conf_thresh: float = 0.5, iou_thresh: float = 0.45,
+ device: str = "cuda"):
+ self.conf_thresh = conf_thresh
+ self.iou_thresh = iou_thresh
+ self.device = device
+ self.model = attempt_load(weights, map_location=device)
+ self.model.eval()
+
+ @torch.no_grad()
+ def inference(self, img: torch.Tensor):
+ """
+ :param img: tensor [c, h, w]
+ :returns: tensor of shape [num_boxes, 6], where each item is represented as
+ [x1, y1, x2, y2, confidence, class_id]
+ """
+ img = img.unsqueeze(0)
+ pred_results = self.model(img)[0]
+ detections = non_max_suppression(
+ pred_results, conf_thres=self.conf_thresh, iou_thres=self.iou_thresh
+ )
+ if detections:
+ detections = detections[0]
+ return detections
+
+
+class Yolov7Publisher:
+ def __init__(self, img_topic: str, weights: str, conf_thresh: float = 0.5,
+ iou_thresh: float = 0.45, pub_topic: str = "yolov7_detections",
+ device: str = "cpu",
+ img_size: Union[Tuple[int, int], None] = (640, 640),
+ queue_size: int = 1, visualize: bool = False,
+ class_labels: Union[List, None] = None):
+ """
+ :param img_topic: name of the image topic to listen to
+ :param weights: path/to/yolo_weights.pt
+ :param conf_thresh: confidence threshold
+ :param iou_thresh: intersection over union threshold
+ :param pub_topic: name of the output topic (will be published under the
+ namespace '/yolov7')
+ :param device: device to do inference on (e.g., 'cuda' or 'cpu')
+ :param queue_size: queue size for publishers
+ :visualize: flag to enable publishing the detections visualized in the image
+ :param img_size: (height, width) to which the img is resized before being
+ fed into the yolo network. Final output coordinates will be rescaled to
+ the original img size.
+ :param class_labels: List of length num_classes, containing the class
+ labels. The i-th element in this list corresponds to the i-th
+ class id. Only for viszalization. If it is None, then no class
+ labels are visualized.
+ """
+ self.img_size = img_size
+ self.device = device
+ self.class_labels = class_labels
+
+ vis_topic = pub_topic + "visualization" if pub_topic.endswith("/") else \
+ pub_topic + "/visualization"
+ self.visualization_publisher = rospy.Publisher(
+ vis_topic, Image, queue_size=queue_size
+ ) if visualize else None
+
+ self.bridge = CvBridge()
+
+ self.tensorize = ToTensor()
+ self.model = YoloV7(
+ weights=weights, conf_thresh=conf_thresh, iou_thresh=iou_thresh,
+ device=device
+ )
+ self.img_subscriber = rospy.Subscriber(
+ img_topic, Image, self.process_img_msg
+ )
+ self.detection_publisher = rospy.Publisher(
+ pub_topic, Detection2DArray, queue_size=queue_size
+ )
+ rospy.loginfo("YOLOv7 initialization complete. Ready to start inference")
+
+ def process_img_msg(self, img_msg: Image):
+ """ callback function for publisher """
+ np_img_orig = self.bridge.imgmsg_to_cv2(
+ img_msg, desired_encoding='bgr8'
+ )
+
+ # handle possible different img formats
+ if len(np_img_orig.shape) == 2:
+ np_img_orig = np.stack([np_img_orig] * 3, axis=2)
+
+ h_orig, w_orig, c = np_img_orig.shape
+
+ # automatically resize the image to the next smaller possible size
+ w_scaled, h_scaled = self.img_size
+ np_img_resized = cv2.resize(np_img_orig, (w_scaled, h_scaled))
+
+ # conversion to torch tensor (copied from original yolov7 repo)
+ img = np_img_resized.transpose((2, 0, 1))[::-1] # HWC to CHW, BGR to RGB
+ img = torch.from_numpy(np.ascontiguousarray(img))
+ img = img.float() # uint8 to fp16/32
+ img /= 255 # 0 - 255 to 0.0 - 1.
+ img = img.to(self.device)
+
+ # inference & rescaling the output to original img size
+ detections = self.model.inference(img)
+ detections[:, :4] = rescale(
+ [h_scaled, w_scaled], detections[:, :4], [h_orig, w_orig])
+ detections[:, :4] = detections[:, :4].round()
+
+ # publishing
+ detection_msg = create_detection_msg(img_msg, detections)
+ self.detection_publisher.publish(detection_msg)
+
+ # visualizing if required
+ if self.visualization_publisher:
+ bboxes = [[int(x1), int(y1), int(x2), int(y2)]
+ for x1, y1, x2, y2 in detections[:, :4].tolist()]
+ classes = [int(c) for c in detections[:, 5].tolist()]
+ vis_img = draw_detections(np_img_orig, bboxes, classes,
+ self.class_labels)
+ vis_msg = self.bridge.cv2_to_imgmsg(vis_img)
+ self.visualization_publisher.publish(vis_msg)
+
+
+if __name__ == "__main__":
+ rospy.init_node("yolov7_node")
+
+ ns = rospy.get_name() + "/"
+
+ weights_path = rospy.get_param(ns + "weights_path")
+ classes_path = rospy.get_param(ns + "classes_path")
+ img_topic = rospy.get_param(ns + "img_topic")
+ out_topic = rospy.get_param(ns + "out_topic")
+ conf_thresh = rospy.get_param(ns + "conf_thresh")
+ iou_thresh = rospy.get_param(ns + "iou_thresh")
+ queue_size = rospy.get_param(ns + "queue_size")
+ img_size = rospy.get_param(ns + "img_size")
+ visualize = rospy.get_param(ns + "visualize")
+ device = rospy.get_param(ns + "device")
+
+ # some sanity checks
+ if not os.path.isfile(weights_path):
+ raise FileExistsError(f"Weights not found ({weights_path}).")
+
+ if classes_path:
+ if not os.path.isfile(classes_path):
+ raise FileExistsError(f"Classes file not found ({classes_path}).")
+ classes = parse_classes_file(classes_path)
+ else:
+ rospy.loginfo("No class file provided. Class labels will not be visualized.")
+ classes = None
+
+ if not ("cuda" in device or "cpu" in device):
+ raise ValueError("Check your device.")
+
+
+ publisher = Yolov7Publisher(
+ img_topic=img_topic,
+ pub_topic=out_topic,
+ weights=weights_path,
+ device=device,
+ visualize=visualize,
+ conf_thresh=conf_thresh,
+ iou_thresh=iou_thresh,
+ img_size=(img_size, img_size),
+ queue_size=queue_size,
+ class_labels=classes
+ )
+
+ rospy.spin()
diff --git a/air_moveit_config/scripts/yolov7/export.py b/yolov7-ros/src/export.py
similarity index 99%
rename from air_moveit_config/scripts/yolov7/export.py
rename to yolov7-ros/src/export.py
index cf918aa..0fba541 100644
--- a/air_moveit_config/scripts/yolov7/export.py
+++ b/yolov7-ros/src/export.py
@@ -146,7 +146,7 @@
if opt.grid:
if opt.end2end:
print('\nStarting export end2end onnx model for %s...' % 'TensorRT' if opt.max_wh is None else 'onnxruntime')
- model = End2End(model,opt.topk_all,opt.iou_thres,opt.conf_thres,opt.max_wh,device,len(labels))
+ model = End2End(model,opt.topk_all,opt.iou_thres,opt.conf_thres,opt.max_wh,device)
if opt.end2end and opt.max_wh is None:
output_names = ['num_dets', 'det_boxes', 'det_scores', 'det_classes']
shapes = [opt.batch_size, 1, opt.batch_size, opt.topk_all, 4,
diff --git a/air_moveit_config/scripts/yolov7/hubconf.py b/yolov7-ros/src/hubconf.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/hubconf.py
rename to yolov7-ros/src/hubconf.py
diff --git a/air_moveit_config/scripts/yolov7/models/__init__.py b/yolov7-ros/src/models/__init__.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/models/__init__.py
rename to yolov7-ros/src/models/__init__.py
diff --git a/air_moveit_config/scripts/yolov7/models/common.py b/yolov7-ros/src/models/common.py
similarity index 99%
rename from air_moveit_config/scripts/yolov7/models/common.py
rename to yolov7-ros/src/models/common.py
index edb5edc..111af70 100644
--- a/air_moveit_config/scripts/yolov7/models/common.py
+++ b/yolov7-ros/src/models/common.py
@@ -444,7 +444,7 @@ def forward(self, x):
class ImplicitM(nn.Module):
- def __init__(self, channel, mean=1., std=.02):
+ def __init__(self, channel, mean=0., std=.02):
super(ImplicitM, self).__init__()
self.channel = channel
self.mean = mean
diff --git a/air_moveit_config/scripts/yolov7/models/experimental.py b/yolov7-ros/src/models/experimental.py
similarity index 92%
rename from air_moveit_config/scripts/yolov7/models/experimental.py
rename to yolov7-ros/src/models/experimental.py
index 735d7aa..3fa5c12 100644
--- a/air_moveit_config/scripts/yolov7/models/experimental.py
+++ b/yolov7-ros/src/models/experimental.py
@@ -158,7 +158,7 @@ def symbolic(g,
class ONNX_ORT(nn.Module):
'''onnx module with ONNX-Runtime NMS operation.'''
- def __init__(self, max_obj=100, iou_thres=0.45, score_thres=0.25, max_wh=640, device=None, n_classes=80):
+ def __init__(self, max_obj=100, iou_thres=0.45, score_thres=0.25, max_wh=640, device=None):
super().__init__()
self.device = device if device else torch.device("cpu")
self.max_obj = torch.tensor([max_obj]).to(device)
@@ -168,17 +168,12 @@ def __init__(self, max_obj=100, iou_thres=0.45, score_thres=0.25, max_wh=640, de
self.convert_matrix = torch.tensor([[1, 0, 1, 0], [0, 1, 0, 1], [-0.5, 0, 0.5, 0], [0, -0.5, 0, 0.5]],
dtype=torch.float32,
device=self.device)
- self.n_classes=n_classes
def forward(self, x):
boxes = x[:, :, :4]
conf = x[:, :, 4:5]
scores = x[:, :, 5:]
- if self.n_classes == 1:
- scores = conf # for models with one class, cls_loss is 0 and cls_conf is always 0.5,
- # so there is no need to multiplicate.
- else:
- scores *= conf # conf = obj_conf * cls_conf
+ scores *= conf
boxes @= self.convert_matrix
max_score, category_id = scores.max(2, keepdim=True)
dis = category_id.float() * self.max_wh
@@ -194,7 +189,7 @@ def forward(self, x):
class ONNX_TRT(nn.Module):
'''onnx module with TensorRT NMS operation.'''
- def __init__(self, max_obj=100, iou_thres=0.45, score_thres=0.25, max_wh=None ,device=None, n_classes=80):
+ def __init__(self, max_obj=100, iou_thres=0.45, score_thres=0.25, max_wh=None ,device=None):
super().__init__()
assert max_wh is None
self.device = device if device else torch.device('cpu')
@@ -205,17 +200,12 @@ def __init__(self, max_obj=100, iou_thres=0.45, score_thres=0.25, max_wh=None ,d
self.plugin_version = '1'
self.score_activation = 0
self.score_threshold = score_thres
- self.n_classes=n_classes
def forward(self, x):
boxes = x[:, :, :4]
conf = x[:, :, 4:5]
scores = x[:, :, 5:]
- if self.n_classes == 1:
- scores = conf # for models with one class, cls_loss is 0 and cls_conf is always 0.5,
- # so there is no need to multiplicate.
- else:
- scores *= conf # conf = obj_conf * cls_conf
+ scores *= conf
num_det, det_boxes, det_scores, det_classes = TRT_NMS.apply(boxes, scores, self.background_class, self.box_coding,
self.iou_threshold, self.max_obj,
self.plugin_version, self.score_activation,
@@ -225,14 +215,14 @@ def forward(self, x):
class End2End(nn.Module):
'''export onnx or tensorrt model with NMS operation.'''
- def __init__(self, model, max_obj=100, iou_thres=0.45, score_thres=0.25, max_wh=None, device=None, n_classes=80):
+ def __init__(self, model, max_obj=100, iou_thres=0.45, score_thres=0.25, max_wh=None, device=None):
super().__init__()
device = device if device else torch.device('cpu')
assert isinstance(max_wh,(int)) or max_wh is None
self.model = model.to(device)
self.model.model[-1].end2end = True
self.patch_model = ONNX_TRT if max_wh is None else ONNX_ORT
- self.end2end = self.patch_model(max_obj, iou_thres, score_thres, max_wh, device, n_classes)
+ self.end2end = self.patch_model(max_obj, iou_thres, score_thres, max_wh, device)
self.end2end.eval()
def forward(self, x):
diff --git a/air_moveit_config/scripts/yolov7/models/yolo.py b/yolov7-ros/src/models/yolo.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/models/yolo.py
rename to yolov7-ros/src/models/yolo.py
diff --git a/air_moveit_config/scripts/yolov7/paper/yolov7.pdf b/yolov7-ros/src/paper/yolov7.pdf
similarity index 100%
rename from air_moveit_config/scripts/yolov7/paper/yolov7.pdf
rename to yolov7-ros/src/paper/yolov7.pdf
diff --git a/air_moveit_config/scripts/yolov7/scripts/get_coco.sh b/yolov7-ros/src/scripts/get_coco.sh
similarity index 100%
rename from air_moveit_config/scripts/yolov7/scripts/get_coco.sh
rename to yolov7-ros/src/scripts/get_coco.sh
diff --git a/air_moveit_config/scripts/yolov7/test.py b/yolov7-ros/src/test.py
similarity index 97%
rename from air_moveit_config/scripts/yolov7/test.py
rename to yolov7-ros/src/test.py
index 17b4806..60154a6 100644
--- a/air_moveit_config/scripts/yolov7/test.py
+++ b/yolov7-ros/src/test.py
@@ -39,8 +39,7 @@ def test(data,
compute_loss=None,
half_precision=True,
trace=False,
- is_coco=False,
- v5_metric=False):
+ is_coco=False):
# Initialize/load model and set device
training = model is not None
if training: # called by train.py
@@ -60,7 +59,7 @@ def test(data,
imgsz = check_img_size(imgsz, s=gs) # check img_size
if trace:
- model = TracedModel(model, device, imgsz)
+ model = TracedModel(model, device, opt.img_size)
# Half
half = device.type != 'cpu' and half_precision # half precision only supported on CUDA
@@ -90,9 +89,6 @@ def test(data,
dataloader = create_dataloader(data[task], imgsz, batch_size, gs, opt, pad=0.5, rect=True,
prefix=colorstr(f'{task}: '))[0]
- if v5_metric:
- print("Testing with YOLOv5 AP metric...")
-
seen = 0
confusion_matrix = ConfusionMatrix(nc=nc)
names = {k: v for k, v in enumerate(model.names if hasattr(model, 'names') else model.module.names)}
@@ -221,7 +217,7 @@ def test(data,
# Compute statistics
stats = [np.concatenate(x, 0) for x in zip(*stats)] # to numpy
if len(stats) and stats[0].any():
- p, r, ap, f1, ap_class = ap_per_class(*stats, plot=plots, v5_metric=v5_metric, save_dir=save_dir, names=names)
+ p, r, ap, f1, ap_class = ap_per_class(*stats, plot=plots, save_dir=save_dir, names=names)
ap50, ap = ap[:, 0], ap.mean(1) # AP@0.5, AP@0.5:0.95
mp, mr, map50, map = p.mean(), r.mean(), ap50.mean(), ap.mean()
nt = np.bincount(stats[3].astype(np.int64), minlength=nc) # number of targets per class
@@ -308,7 +304,6 @@ def test(data,
parser.add_argument('--name', default='exp', help='save to project/name')
parser.add_argument('--exist-ok', action='store_true', help='existing project/name ok, do not increment')
parser.add_argument('--no-trace', action='store_true', help='don`t trace model')
- parser.add_argument('--v5-metric', action='store_true', help='assume maximum recall as 1.0 in AP calculation')
opt = parser.parse_args()
opt.save_json |= opt.data.endswith('coco.yaml')
opt.data = check_file(opt.data) # check file
@@ -330,12 +325,11 @@ def test(data,
save_hybrid=opt.save_hybrid,
save_conf=opt.save_conf,
trace=not opt.no_trace,
- v5_metric=opt.v5_metric
)
elif opt.task == 'speed': # speed benchmarks
for w in opt.weights:
- test(opt.data, w, opt.batch_size, opt.img_size, 0.25, 0.45, save_json=False, plots=False, v5_metric=opt.v5_metric)
+ test(opt.data, w, opt.batch_size, opt.img_size, 0.25, 0.45, save_json=False, plots=False)
elif opt.task == 'study': # run over a range of settings and save/plot
# python test.py --task study --data coco.yaml --iou 0.65 --weights yolov7.pt
@@ -346,7 +340,7 @@ def test(data,
for i in x: # img-size
print(f'\nRunning {f} point {i}...')
r, _, t = test(opt.data, w, opt.batch_size, i, opt.conf_thres, opt.iou_thres, opt.save_json,
- plots=False, v5_metric=opt.v5_metric)
+ plots=False)
y.append(r + t) # results and times
np.savetxt(f, y, fmt='%10.4g') # save
os.system('zip -r study.zip study_*.txt')
diff --git a/air_moveit_config/scripts/yolov7/train.py b/yolov7-ros/src/train.py
similarity index 99%
rename from air_moveit_config/scripts/yolov7/train.py
rename to yolov7-ros/src/train.py
index 86c7e48..ea76399 100644
--- a/air_moveit_config/scripts/yolov7/train.py
+++ b/yolov7-ros/src/train.py
@@ -423,8 +423,7 @@ def train(hyp, opt, device, tb_writer=None):
plots=plots and final_epoch,
wandb_logger=wandb_logger,
compute_loss=compute_loss,
- is_coco=is_coco,
- v5_metric=opt.v5_metric)
+ is_coco=is_coco)
# Write
with open(results_file, 'a') as f:
@@ -503,8 +502,7 @@ def train(hyp, opt, device, tb_writer=None):
save_dir=save_dir,
save_json=True,
plots=False,
- is_coco=is_coco,
- v5_metric=opt.v5_metric)
+ is_coco=is_coco)
# Strip optimizers
final = best if best.exists() else last # final model
@@ -561,7 +559,6 @@ def train(hyp, opt, device, tb_writer=None):
parser.add_argument('--save_period', type=int, default=-1, help='Log model after every "save_period" epoch')
parser.add_argument('--artifact_alias', type=str, default="latest", help='version of dataset artifact to be used')
parser.add_argument('--freeze', nargs='+', type=int, default=[0], help='Freeze layers: backbone of yolov7=50, first3=0 1 2')
- parser.add_argument('--v5-metric', action='store_true', help='assume maximum recall as 1.0 in AP calculation')
opt = parser.parse_args()
# Set DDP variables
diff --git a/air_moveit_config/scripts/yolov7/train_aux.py b/yolov7-ros/src/train_aux.py
similarity index 99%
rename from air_moveit_config/scripts/yolov7/train_aux.py
rename to yolov7-ros/src/train_aux.py
index 0e8053f..6094beb 100644
--- a/air_moveit_config/scripts/yolov7/train_aux.py
+++ b/yolov7-ros/src/train_aux.py
@@ -420,8 +420,7 @@ def train(hyp, opt, device, tb_writer=None):
plots=plots and final_epoch,
wandb_logger=wandb_logger,
compute_loss=compute_loss,
- is_coco=is_coco,
- v5_metric=opt.v5_metric)
+ is_coco=is_coco)
# Write
with open(results_file, 'a') as f:
@@ -500,8 +499,7 @@ def train(hyp, opt, device, tb_writer=None):
save_dir=save_dir,
save_json=True,
plots=False,
- is_coco=is_coco,
- v5_metric=opt.v5_metric)
+ is_coco=is_coco)
# Strip optimizers
final = best if best.exists() else last # final model
@@ -557,7 +555,6 @@ def train(hyp, opt, device, tb_writer=None):
parser.add_argument('--bbox_interval', type=int, default=-1, help='Set bounding-box image logging interval for W&B')
parser.add_argument('--save_period', type=int, default=-1, help='Log model after every "save_period" epoch')
parser.add_argument('--artifact_alias', type=str, default="latest", help='version of dataset artifact to be used')
- parser.add_argument('--v5-metric', action='store_true', help='assume maximum recall as 1.0 in AP calculation')
opt = parser.parse_args()
# Set DDP variables
diff --git a/air_moveit_config/scripts/yolov7/utils/__init__.py b/yolov7-ros/src/utils/__init__.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/utils/__init__.py
rename to yolov7-ros/src/utils/__init__.py
diff --git a/air_moveit_config/scripts/yolov7/utils/activations.py b/yolov7-ros/src/utils/activations.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/utils/activations.py
rename to yolov7-ros/src/utils/activations.py
diff --git a/air_moveit_config/scripts/yolov7/utils/add_nms.py b/yolov7-ros/src/utils/add_nms.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/utils/add_nms.py
rename to yolov7-ros/src/utils/add_nms.py
diff --git a/air_moveit_config/scripts/yolov7/utils/autoanchor.py b/yolov7-ros/src/utils/autoanchor.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/utils/autoanchor.py
rename to yolov7-ros/src/utils/autoanchor.py
diff --git a/air_moveit_config/scripts/yolov7/utils/aws/__init__.py b/yolov7-ros/src/utils/aws/__init__.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/utils/aws/__init__.py
rename to yolov7-ros/src/utils/aws/__init__.py
diff --git a/air_moveit_config/scripts/yolov7/utils/aws/mime.sh b/yolov7-ros/src/utils/aws/mime.sh
similarity index 100%
rename from air_moveit_config/scripts/yolov7/utils/aws/mime.sh
rename to yolov7-ros/src/utils/aws/mime.sh
diff --git a/air_moveit_config/scripts/yolov7/utils/aws/resume.py b/yolov7-ros/src/utils/aws/resume.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/utils/aws/resume.py
rename to yolov7-ros/src/utils/aws/resume.py
diff --git a/air_moveit_config/scripts/yolov7/utils/aws/userdata.sh b/yolov7-ros/src/utils/aws/userdata.sh
similarity index 92%
rename from air_moveit_config/scripts/yolov7/utils/aws/userdata.sh
rename to yolov7-ros/src/utils/aws/userdata.sh
index 5a99d4b..5762ae5 100644
--- a/air_moveit_config/scripts/yolov7/utils/aws/userdata.sh
+++ b/yolov7-ros/src/utils/aws/userdata.sh
@@ -7,8 +7,8 @@
cd home/ubuntu
if [ ! -d yolor ]; then
echo "Running first-time script." # install dependencies, download COCO, pull Docker
- git clone -b main https://github.com/WongKinYiu/yolov7 && sudo chmod -R 777 yolov7
- cd yolov7
+ git clone -b paper https://github.com/WongKinYiu/yolor && sudo chmod -R 777 yolor
+ cd yolor
bash data/scripts/get_coco.sh && echo "Data done." &
sudo docker pull nvcr.io/nvidia/pytorch:21.08-py3 && echo "Docker done." &
python -m pip install --upgrade pip && pip install -r requirements.txt && python detect.py && echo "Requirements done." &
diff --git a/air_moveit_config/scripts/yolov7/utils/datasets.py b/yolov7-ros/src/utils/datasets.py
similarity index 99%
rename from air_moveit_config/scripts/yolov7/utils/datasets.py
rename to yolov7-ros/src/utils/datasets.py
index 5fe4f7b..b6bb8b0 100644
--- a/air_moveit_config/scripts/yolov7/utils/datasets.py
+++ b/yolov7-ros/src/utils/datasets.py
@@ -415,7 +415,7 @@ def __init__(self, path, img_size=640, batch_size=16, augment=False, hyp=None, r
x[:, 0] = 0
n = len(shapes) # number of images
- bi = np.floor(np.arange(n) / batch_size).astype(int) # batch index
+ bi = np.floor(np.arange(n) / batch_size).astype(np.int) # batch index
nb = bi[-1] + 1 # number of batches
self.batch = bi # batch index of image
self.n = n
@@ -443,7 +443,7 @@ def __init__(self, path, img_size=640, batch_size=16, augment=False, hyp=None, r
elif mini > 1:
shapes[i] = [1, 1 / mini]
- self.batch_shapes = np.ceil(np.array(shapes) * img_size / stride + pad).astype(int) * stride
+ self.batch_shapes = np.ceil(np.array(shapes) * img_size / stride + pad).astype(np.int) * stride
# Cache images into memory for faster training (WARNING: large datasets may exceed system RAM)
self.imgs = [None] * n
@@ -1200,7 +1200,7 @@ def pastein(image, labels, sample_labels, sample_images, sample_masks):
r_image = cv2.resize(sample_images[sel_ind], (r_w, r_h))
temp_crop = image[ymin:ymin+r_h, xmin:xmin+r_w]
m_ind = r_mask > 0
- if m_ind.astype(np.int32).sum() > 60:
+ if m_ind.astype(np.int).sum() > 60:
temp_crop[m_ind] = r_image[m_ind]
#print(sample_labels[sel_ind])
#print(sample_images[sel_ind].shape)
diff --git a/air_moveit_config/scripts/yolov7/utils/general.py b/yolov7-ros/src/utils/general.py
similarity index 99%
rename from air_moveit_config/scripts/yolov7/utils/general.py
rename to yolov7-ros/src/utils/general.py
index decdcc6..faf908f 100644
--- a/air_moveit_config/scripts/yolov7/utils/general.py
+++ b/yolov7-ros/src/utils/general.py
@@ -219,7 +219,7 @@ def labels_to_class_weights(labels, nc=80):
return torch.Tensor()
labels = np.concatenate(labels, 0) # labels.shape = (866643, 5) for COCO
- classes = labels[:, 0].astype(np.int32) # labels = [class xywh]
+ classes = labels[:, 0].astype(np.int) # labels = [class xywh]
weights = np.bincount(classes, minlength=nc) # occurrences per class
# Prepend gridpoint count (for uCE training)
@@ -234,7 +234,7 @@ def labels_to_class_weights(labels, nc=80):
def labels_to_image_weights(labels, nc=80, class_weights=np.ones(80)):
# Produces image weights based on class_weights and image contents
- class_counts = np.array([np.bincount(x[:, 0].astype(np.int32), minlength=nc) for x in labels])
+ class_counts = np.array([np.bincount(x[:, 0].astype(np.int), minlength=nc) for x in labels])
image_weights = (class_weights.reshape(1, nc) * class_counts).sum(1)
# index = random.choices(range(n), weights=image_weights, k=1) # weight image sample
return image_weights
@@ -310,7 +310,6 @@ def segments2boxes(segments):
def resample_segments(segments, n=1000):
# Up-sample an (n,2) segment
for i, s in enumerate(segments):
- s = np.concatenate((s, s[0:1, :]), axis=0)
x = np.linspace(0, len(s) - 1, n)
xp = np.arange(len(s))
segments[i] = np.concatenate([np.interp(x, xp, s[:, i]) for i in range(2)]).reshape(2, -1).T # segment xy
diff --git a/air_moveit_config/scripts/yolov7/utils/google_app_engine/Dockerfile b/yolov7-ros/src/utils/google_app_engine/Dockerfile
similarity index 100%
rename from air_moveit_config/scripts/yolov7/utils/google_app_engine/Dockerfile
rename to yolov7-ros/src/utils/google_app_engine/Dockerfile
diff --git a/air_moveit_config/scripts/yolov7/utils/google_app_engine/additional_requirements.txt b/yolov7-ros/src/utils/google_app_engine/additional_requirements.txt
similarity index 100%
rename from air_moveit_config/scripts/yolov7/utils/google_app_engine/additional_requirements.txt
rename to yolov7-ros/src/utils/google_app_engine/additional_requirements.txt
diff --git a/air_moveit_config/scripts/yolov7/utils/google_app_engine/app.yaml b/yolov7-ros/src/utils/google_app_engine/app.yaml
similarity index 100%
rename from air_moveit_config/scripts/yolov7/utils/google_app_engine/app.yaml
rename to yolov7-ros/src/utils/google_app_engine/app.yaml
diff --git a/air_moveit_config/scripts/yolov7/utils/google_utils.py b/yolov7-ros/src/utils/google_utils.py
similarity index 64%
rename from air_moveit_config/scripts/yolov7/utils/google_utils.py
rename to yolov7-ros/src/utils/google_utils.py
index 76320f9..f363408 100644
--- a/air_moveit_config/scripts/yolov7/utils/google_utils.py
+++ b/yolov7-ros/src/utils/google_utils.py
@@ -20,37 +20,37 @@ def attempt_download(file, repo='WongKinYiu/yolov7'):
# Attempt file download if does not exist
file = Path(str(file).strip().replace("'", '').lower())
- #if not file.exists():
- # try:
- # response = requests.get(f'https://api.github.com/repos/{repo}/releases/latest').json() # github api
- # assets = [x['name'] for x in response['assets']] # release assets
- # tag = response['tag_name'] # i.e. 'v1.0'
- # except: # fallback plan
- # assets = ['yolov7.pt', 'yolov7-tiny.pt', 'yolov7x.pt', 'yolov7-d6.pt', 'yolov7-e6.pt',
- # 'yolov7-e6e.pt', 'yolov7-w6.pt']
- # tag = subprocess.check_output('git tag', shell=True).decode().split()[-1]
-
- # name = file.name
- # if name in assets:
- # msg = f'{file} missing, try downloading from https://github.com/{repo}/releases/'
- # redundant = False # second download option
- # try: # GitHub
- # url = f'https://github.com/{repo}/releases/download/{tag}/{name}'
- # print(f'Downloading {url} to {file}...')
- # torch.hub.download_url_to_file(url, file)
- # assert file.exists() and file.stat().st_size > 1E6 # check
- # except Exception as e: # GCP
- # print(f'Download error: {e}')
- # assert redundant, 'No secondary mirror'
- # url = f'https://storage.googleapis.com/{repo}/ckpt/{name}'
- # print(f'Downloading {url} to {file}...')
- # os.system(f'curl -L {url} -o {file}') # torch.hub.download_url_to_file(url, weights)
- # finally:
- # if not file.exists() or file.stat().st_size < 1E6: # check
- # file.unlink(missing_ok=True) # remove partial downloads
- # print(f'ERROR: Download failure: {msg}')
- # print('')
- # return
+ if not file.exists():
+ try:
+ response = requests.get(f'https://api.github.com/repos/{repo}/releases/latest').json() # github api
+ assets = [x['name'] for x in response['assets']] # release assets
+ tag = response['tag_name'] # i.e. 'v1.0'
+ except: # fallback plan
+ assets = ['yolov7.pt', 'yolov7-tiny.pt', 'yolov7x.pt', 'yolov7-d6.pt', 'yolov7-e6.pt',
+ 'yolov7-e6e.pt', 'yolov7-w6.pt']
+ tag = subprocess.check_output('git tag', shell=True).decode().split()[-1]
+
+ name = file.name
+ if name in assets:
+ msg = f'{file} missing, try downloading from https://github.com/{repo}/releases/'
+ redundant = False # second download option
+ try: # GitHub
+ url = f'https://github.com/{repo}/releases/download/{tag}/{name}'
+ print(f'Downloading {url} to {file}...')
+ torch.hub.download_url_to_file(url, file)
+ assert file.exists() and file.stat().st_size > 1E6 # check
+ except Exception as e: # GCP
+ print(f'Download error: {e}')
+ assert redundant, 'No secondary mirror'
+ url = f'https://storage.googleapis.com/{repo}/ckpt/{name}'
+ print(f'Downloading {url} to {file}...')
+ os.system(f'curl -L {url} -o {file}') # torch.hub.download_url_to_file(url, weights)
+ finally:
+ if not file.exists() or file.stat().st_size < 1E6: # check
+ file.unlink(missing_ok=True) # remove partial downloads
+ print(f'ERROR: Download failure: {msg}')
+ print('')
+ return
def gdrive_download(id='', file='tmp.zip'):
diff --git a/air_moveit_config/scripts/yolov7/utils/loss.py b/yolov7-ros/src/utils/loss.py
similarity index 99%
rename from air_moveit_config/scripts/yolov7/utils/loss.py
rename to yolov7-ros/src/utils/loss.py
index 2b1d968..bf7ab65 100644
--- a/air_moveit_config/scripts/yolov7/utils/loss.py
+++ b/yolov7-ros/src/utils/loss.py
@@ -642,7 +642,7 @@ def build_targets(self, p, targets, imgs):
#indices, anch = self.find_4_positive(p, targets)
#indices, anch = self.find_5_positive(p, targets)
#indices, anch = self.find_9_positive(p, targets)
- device = torch.device(targets.device)
+
matching_bs = [[] for pp in p]
matching_as = [[] for pp in p]
matching_gjs = [[] for pp in p]
@@ -682,7 +682,7 @@ def build_targets(self, p, targets, imgs):
all_gj.append(gj)
all_gi.append(gi)
all_anch.append(anch[i][idx])
- from_which_layer.append((torch.ones(size=(len(b),)) * i).to(device))
+ from_which_layer.append(torch.ones(size=(len(b),)) * i)
fg_pred = pi[b, a, gj, gi]
p_obj.append(fg_pred[:, 4:5])
@@ -739,7 +739,7 @@ def build_targets(self, p, targets, imgs):
+ 3.0 * pair_wise_iou_loss
)
- matching_matrix = torch.zeros_like(cost, device=device)
+ matching_matrix = torch.zeros_like(cost)
for gt_idx in range(num_gt):
_, pos_idx = torch.topk(
@@ -753,7 +753,7 @@ def build_targets(self, p, targets, imgs):
_, cost_argmin = torch.min(cost[:, anchor_matching_gt > 1], dim=0)
matching_matrix[:, anchor_matching_gt > 1] *= 0.0
matching_matrix[cost_argmin, anchor_matching_gt > 1] = 1.0
- fg_mask_inboxes = (matching_matrix.sum(0) > 0.0).to(device)
+ fg_mask_inboxes = matching_matrix.sum(0) > 0.0
matched_gt_inds = matching_matrix[:, fg_mask_inboxes].argmax(0)
from_which_layer = from_which_layer[fg_mask_inboxes]
diff --git a/air_moveit_config/scripts/yolov7/utils/metrics.py b/yolov7-ros/src/utils/metrics.py
similarity index 94%
rename from air_moveit_config/scripts/yolov7/utils/metrics.py
rename to yolov7-ros/src/utils/metrics.py
index 6d2f536..666b8c7 100644
--- a/air_moveit_config/scripts/yolov7/utils/metrics.py
+++ b/yolov7-ros/src/utils/metrics.py
@@ -15,7 +15,7 @@ def fitness(x):
return (x[:, :4] * w).sum(1)
-def ap_per_class(tp, conf, pred_cls, target_cls, v5_metric=False, plot=False, save_dir='.', names=()):
+def ap_per_class(tp, conf, pred_cls, target_cls, plot=False, save_dir='.', names=()):
""" Compute the average precision, given the recall and precision curves.
Source: https://github.com/rafaelpadilla/Object-Detection-Metrics.
# Arguments
@@ -62,7 +62,7 @@ def ap_per_class(tp, conf, pred_cls, target_cls, v5_metric=False, plot=False, sa
# AP from recall-precision curve
for j in range(tp.shape[1]):
- ap[ci, j], mpre, mrec = compute_ap(recall[:, j], precision[:, j], v5_metric=v5_metric)
+ ap[ci, j], mpre, mrec = compute_ap(recall[:, j], precision[:, j])
if plot and j == 0:
py.append(np.interp(px, mrec, mpre)) # precision at mAP@0.5
@@ -78,21 +78,17 @@ def ap_per_class(tp, conf, pred_cls, target_cls, v5_metric=False, plot=False, sa
return p[:, i], r[:, i], ap, f1[:, i], unique_classes.astype('int32')
-def compute_ap(recall, precision, v5_metric=False):
+def compute_ap(recall, precision):
""" Compute the average precision, given the recall and precision curves
# Arguments
recall: The recall curve (list)
precision: The precision curve (list)
- v5_metric: Assume maximum recall to be 1.0, as in YOLOv5, MMDetetion etc.
# Returns
Average precision, precision curve, recall curve
"""
# Append sentinel values to beginning and end
- if v5_metric: # New YOLOv5 metric, same as MMDetection and Detectron2 repositories
- mrec = np.concatenate(([0.], recall, [1.0]))
- else: # Old YOLOv5 metric, i.e. default YOLOv7 metric
- mrec = np.concatenate(([0.], recall, [recall[-1] + 0.01]))
+ mrec = np.concatenate(([0.], recall, [recall[-1] + 0.01]))
mpre = np.concatenate(([1.], precision, [0.]))
# Compute the precision envelope
diff --git a/air_moveit_config/scripts/yolov7/utils/plots.py b/yolov7-ros/src/utils/plots.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/utils/plots.py
rename to yolov7-ros/src/utils/plots.py
diff --git a/yolov7-ros/src/utils/ros.py b/yolov7-ros/src/utils/ros.py
new file mode 100644
index 0000000..905682b
--- /dev/null
+++ b/yolov7-ros/src/utils/ros.py
@@ -0,0 +1,59 @@
+import rospy
+import torch
+from std_msgs.msg import Header
+from sensor_msgs.msg import Image
+from vision_msgs.msg import Detection2DArray, Detection2D, BoundingBox2D, \
+ ObjectHypothesisWithPose
+from geometry_msgs.msg import Pose2D
+
+
+def create_header():
+ h = Header()
+ h.stamp = rospy.Time.now()
+ return h
+
+
+def create_detection_msg(img_msg: Image, detections: torch.Tensor) -> Detection2DArray:
+ """
+ :param img_msg: original ros image message
+ :param detections: torch tensor of shape [num_boxes, 6] where each element is
+ [x1, y1, x2, y2, confidence, class_id]
+ :returns: detections as a ros message of type Detection2DArray
+ """
+ detection_array_msg = Detection2DArray()
+
+ # header
+ header = create_header()
+ detection_array_msg.header = header
+ for detection in detections:
+ x1, y1, x2, y2, conf, cls = detection.tolist()
+ single_detection_msg = Detection2D()
+ single_detection_msg.header = header
+
+ # src img
+ single_detection_msg.source_img = img_msg
+
+ # bbox
+ bbox = BoundingBox2D()
+ w = int(round(x2 - x1))
+ h = int(round(y2 - y1))
+ cx = int(round(x1 + w / 2))
+ cy = int(round(y1 + h / 2))
+ bbox.size_x = w
+ bbox.size_y = h
+
+ bbox.center = Pose2D()
+ bbox.center.x = cx
+ bbox.center.y = cy
+
+ single_detection_msg.bbox = bbox
+
+ # class id & confidence
+ obj_hyp = ObjectHypothesisWithPose()
+ obj_hyp.id = int(cls)
+ obj_hyp.score = conf
+ single_detection_msg.results = [obj_hyp]
+
+ detection_array_msg.detections.append(single_detection_msg)
+
+ return detection_array_msg
\ No newline at end of file
diff --git a/air_moveit_config/scripts/yolov7/utils/torch_utils.py b/yolov7-ros/src/utils/torch_utils.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/utils/torch_utils.py
rename to yolov7-ros/src/utils/torch_utils.py
diff --git a/air_moveit_config/scripts/yolov7/utils/wandb_logging/__init__.py b/yolov7-ros/src/utils/wandb_logging/__init__.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/utils/wandb_logging/__init__.py
rename to yolov7-ros/src/utils/wandb_logging/__init__.py
diff --git a/air_moveit_config/scripts/yolov7/utils/wandb_logging/log_dataset.py b/yolov7-ros/src/utils/wandb_logging/log_dataset.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/utils/wandb_logging/log_dataset.py
rename to yolov7-ros/src/utils/wandb_logging/log_dataset.py
diff --git a/air_moveit_config/scripts/yolov7/utils/wandb_logging/wandb_utils.py b/yolov7-ros/src/utils/wandb_logging/wandb_utils.py
similarity index 100%
rename from air_moveit_config/scripts/yolov7/utils/wandb_logging/wandb_utils.py
rename to yolov7-ros/src/utils/wandb_logging/wandb_utils.py
diff --git a/yolov7-ros/src/visualizer.py b/yolov7-ros/src/visualizer.py
new file mode 100644
index 0000000..cba6820
--- /dev/null
+++ b/yolov7-ros/src/visualizer.py
@@ -0,0 +1,33 @@
+import numpy as np
+import cv2
+from typing import List, Union
+
+
+def get_random_color(seed):
+ gen = np.random.default_rng(seed)
+ color = tuple(gen.choice(range(256), size=3))
+ color = tuple([int(c) for c in color])
+ return color
+
+
+def draw_detections(img: np.array, bboxes: List[List[int]], classes: List[int],
+ class_labels: Union[List[str], None]):
+ for bbox, cls in zip(bboxes, classes):
+ x1, y1, x2, y2 = bbox
+
+ color = get_random_color(int(cls))
+ img = cv2.rectangle(
+ img, (int(x1), int(y1)), (int(x2), int(y2)), color, 3
+ )
+
+ if class_labels:
+ label = class_labels[int(cls)]
+
+ x_text = int(x1)
+ y_text = max(15, int(y1 - 10))
+ img = cv2.putText(
+ img, label, (x_text, y_text), cv2.FONT_HERSHEY_SIMPLEX,
+ 0.5, color, 1, cv2.LINE_AA
+ )
+
+ return img