Skip to content

Commit

Permalink
radius estimator
Browse files Browse the repository at this point in the history
  • Loading branch information
mrceki committed May 4, 2023
1 parent 053cfc1 commit 8b9a704
Show file tree
Hide file tree
Showing 6 changed files with 85 additions and 18 deletions.
10 changes: 8 additions & 2 deletions air_moveit_config/include/detect_and_add.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@

#include <ros/ros.h>
#include <std_msgs/Int32MultiArray.h>
#include <std_msgs/Float32.h>
#include <vision_msgs/Detection2DArray.h>

#include <sensor_msgs/PointCloud2.h>
class air_object
{
public:
Expand All @@ -13,13 +14,17 @@ class air_object

void yolo_callback(const vision_msgs::Detection2DArrayConstPtr& d2d_arr_msg);
void pcl_callback(const sensor_msgs::PointCloud2ConstPtr& msg);
void radius_estimator_callback(const std_msgs::Float32ConstPtr& radius);
void sphereCallback(const std_msgs::Float32MultiArray::ConstPtr& xyz);
std_msgs::Float32MultiArray m_xyz;

ros::NodeHandle m_NodeHandle;
ros::Publisher m_coordinate_pub;
ros::Publisher m_coordinate_pub;
ros::Subscriber m_coordinate_sub;
ros::Subscriber m_yolo_sub;
ros::Subscriber m_pcl_sub;
ros::Subscriber m_sub;
ros::Subscriber m_estimator_sub;

float m_x;
float m_y;
Expand All @@ -29,6 +34,7 @@ class air_object
int m_center_y;
int m_center_x_buffer;
int m_center_y_buffer;
float m_radius;
private:

};
Expand Down
4 changes: 2 additions & 2 deletions air_moveit_config/launch/air_bringup.launch
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
<include file="$(find yolov7_ros)/launch/yolov7.launch" pass_all_args="true" unless="$(arg slave)"/>
<include file="$(find realsense2_camera)/launch/rs_camera.launch" unless="$(arg bagfile)"/>

<node pkg="air_moveit_config" name="air_detection_publisher" type="detect_and_add_orange" output="screen"/>
<node pkg="air_moveit_config" name="air_detection_publisher" type="detect_and_add_ball" output="screen"/>
<node pkg="air_moveit_config" name="air_collision_publisher" type="add_sphere" output="screen"/>

<node pkg="radius_estimation" name="regression_node" type="estimate.py" output="screen"/>

</launch>
33 changes: 23 additions & 10 deletions air_moveit_config/src/add_sphere.cpp
Original file line number Diff line number Diff line change
@@ -1,19 +1,32 @@
#include <ros/ros.h>
#include <moveit/planning_scene_interface/planning_scene_interface.h>
#include <moveit_msgs/CollisionObject.h>
#include <geometry_msgs/Vector3.h>
#include <std_msgs/Float32MultiArray.h>
#include <cmath>
#include "../include/detect_and_add.hpp"



air_object::air_object(ros::NodeHandle& nh)
: m_NodeHandle(nh)
{
ros::Subscriber sub;
m_estimator_sub = m_NodeHandle.subscribe("/radius_estimator", 1000, &air_object::radius_estimator_callback, this);
m_coordinate_sub = m_NodeHandle.subscribe("/coordinates_of_object", 1000, &air_object::sphereCallback, this);
}

void air_object::radius_estimator_callback(const std_msgs::Float32ConstPtr& radius)
{
m_radius = radius->data*0.01;
}

ros::Subscriber sub;


// Callback function for the subscriber
void sphereCallback(const std_msgs::Float32MultiArray::ConstPtr xyz)
void air_object::sphereCallback(const std_msgs::Float32MultiArray::ConstPtr& xyz)
{
moveit::planning_interface::PlanningSceneInterface planning_scene_interface;
// Get the sphere dimensions from the message
float radius = 0.0685; // Radius of sphere(ball)
float x = xyz->data[0];
float y = xyz->data[1];
float z = xyz->data[2];
Expand All @@ -22,15 +35,15 @@ void sphereCallback(const std_msgs::Float32MultiArray::ConstPtr xyz)
shape_msgs::SolidPrimitive primitive;
primitive.type = primitive.SPHERE;
primitive.dimensions.resize(1);
primitive.dimensions[0] = radius;
primitive.dimensions[0] = m_radius;

// Specify the pose of the sphere
geometry_msgs::Pose pose;
// To fix data transformations -> x = z, y = -x, z = -y
float cam_degree = 30; // Angle between robot and camera
pose.position.x = z + (((radius/2) / sin(((90-cam_degree) * M_PI)/180))*2);
pose.position.x = z + (((m_radius/2) / sin(((90-cam_degree) * M_PI)/180))*2);
pose.position.y = -x;
pose.position.z = -y + radius; //hotfix for collision
pose.position.z = -y + (m_radius/4); //hotfix for collision

// Orientation fix (to do: cam_degree)
pose.orientation.w = 0.9659258;
Expand All @@ -56,9 +69,9 @@ void sphereCallback(const std_msgs::Float32MultiArray::ConstPtr xyz)
int main(int argc, char** argv)
{
ros::init(argc, argv, "detect_and_add_sphere_collision_object_demo");
ros::NodeHandle node_handle;
// Subscribe to the sphere dimensions topic
sub = node_handle.subscribe("/coordinates_of_object", 10, sphereCallback);
ros::NodeHandle nh;
air_object object(nh);

ros::spin();
return 0;
}
48 changes: 48 additions & 0 deletions radius_estimation/scripts/estimate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
#!/usr/bin/env python3

import rospy
from sensor_msgs.msg import Image
from cv_bridge import CvBridge
from pyrealsense2 import pyrealsense2 as rs
from vision_msgs.msg import Detection2DArray
from std_msgs.msg import Float32
import cv2
import numpy as np
import os
# Modeli yükleme
import joblib



class Estimator:
def __init__(self):
self.model = joblib.load("/home/cenk/regression_model2.joblib") #import pretrained model
self.bridge = CvBridge()
self.depth_scale = 0.001
rospy.init_node('regression_node')
rospy.Subscriber('/yolov7/yolov7', Detection2DArray, self.image_callback)
rospy.Subscriber('/camera/aligned_depth_to_color/image_raw', Image, self.depth_callback)
self.pub = rospy.Publisher("/radius_estimator", Float32, queue_size=10)


def image_callback(self,detection_array):
# Bbox to numpy
for detection in detection_array.detections:
if detection.results[0].id == 32: # nesne ID'si 32 ise
bbox = detection.bbox
depth = (self.depth_image[int(bbox.center.y), int(bbox.center.x)]) * self.depth_scale
features = np.array([depth, bbox.size_x, bbox.size_y])
features = features.reshape(1, -1) # add an extra dimension
predict=self.model.predict(features)
self.pub.publish(predict) #check
print("Radius of ball predicted as %d cm" % predict)


def depth_callback(self, depth_image):
self.depth_image = self.bridge.imgmsg_to_cv2(depth_image, desired_encoding="passthrough")



if __name__ == '__main__':
estimator=Estimator()
rospy.spin()
6 changes: 3 additions & 3 deletions realsense-ros/realsense2_camera/launch/rs_camera.launch
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
<arg name="serial_no" default=""/>
<arg name="usb_port_id" default=""/>
<arg name="device_type" default=""/>
<arg name="json_file_path" default="$(find realsense2_camera)/launch/preset.json"/>
<arg name="json_file_path" default=""/>
<arg name="camera" default="camera"/>
<arg name="tf_prefix" default="$(arg camera)"/>
<arg name="external_manager" default="false"/>
Expand All @@ -19,8 +19,8 @@
<arg name="depth_height" default="480"/>
<arg name="enable_depth" default="true"/>

<arg name="confidence_width" default="640"/>
<arg name="confidence_height" default="640"/>
<arg name="confidence_width" default="640"/>
<arg name="confidence_height" default="480"/>
<arg name="enable_confidence" default="false"/>
<arg name="confidence_fps" default="-1"/>

Expand Down
2 changes: 1 addition & 1 deletion yolov7-ros/launch/yolov7.launch
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
ns="yolov7">
<!-- Download the official weights from the original repo -->
<param name="weights_path" type="str"
value="$(find yolov7_ros)/src/yolov7-tiny.pt"/>
value="$(find yolov7_ros)/src/ball-tiny.pt"/>
<!-- Path to a class_labels.txt file containing your desired class labels. The i-th entry corresponds to the i-th class id. For example, in coco class label 0 corresponds to 'person'. Files for the coco and berkeley deep drive datasets are provided in the 'class_labels/' directory. If you leave it empty then no class labels are visualized.-->
<param name="classes_path" type="str" value="$(find yolov7_ros)/class_labels/coco.txt" />
<!-- topic name to subscribe to -->
Expand Down

0 comments on commit 8b9a704

Please sign in to comment.