Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Pr/estimate/vo/world offset #593

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,14 @@

#include <aerial_robot_estimation/sensor/base_plugin.h>
#include <geometry_msgs/Vector3Stamped.h>
#include <geometry_msgs/TransformStamped.h>
#include <kalman_filter/kf_pos_vel_acc_plugin.h>
#include <nav_msgs/Odometry.h>
#include <sensor_msgs/JointState.h>
#include <spinal/ServoControlCmd.h>
#include <std_msgs/Empty.h>
#include <tf2_ros/static_transform_broadcaster.h>


namespace sensor_plugin
{
Expand Down Expand Up @@ -109,6 +112,8 @@ namespace sensor_plugin
double reference_timestamp_;
aerial_robot_msgs::States vo_state_;

tf2_ros::StaticTransformBroadcaster static_broadcaster_; // publish the transfrom between the work and vo frame

void rosParamInit();
void servoControl(const ros::TimerEvent & e);
void estimateProcess();
Expand Down
44 changes: 27 additions & 17 deletions aerial_robot_estimation/src/sensor/vo.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -238,33 +238,43 @@ namespace sensor_plugin
}
}

/* step1: set the init offset from world to the baselink of UAV from egomotion estimation (e.g. yaw) */
/** ^{w}H_{b} **/
world_offset_tf_.setRotation(tf::createQuaternionFromYaw(estimator_->getState(State::YAW_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE)[0]));
/** step1: ^{w}H_{b'}, b': level frame of b **/
tf::Transform w_bdash_f(tf::createQuaternionFromYaw(estimator_->getState(State::YAW_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE)[0]));

tf::Vector3 world_offset_pos = estimator_->getPos(Frame::BASELINK, aerial_robot_estimation::EGOMOTION_ESTIMATE);
tf::Vector3 baselink_pos = estimator_->getPos(Frame::BASELINK, aerial_robot_estimation::EGOMOTION_ESTIMATE);
if(estimator_->getStateStatus(State::X_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE))
world_offset_tf_.getOrigin().setX(world_offset_pos.x());
w_bdash_f.getOrigin().setX(baselink_pos.x());
if(estimator_->getStateStatus(State::Y_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE))
world_offset_tf_.getOrigin().setY(world_offset_pos.y());
w_bdash_f.getOrigin().setY(baselink_pos.y());
if(estimator_->getStateStatus(State::Z_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE))
world_offset_tf_.getOrigin().setZ(world_offset_pos.z());
w_bdash_f.getOrigin().setZ(baselink_pos.z());

/* set the init offset from world to the baselink of UAV if we know the ground truth */
/* set the offset if we know the ground truth */
if(estimator_->getStateStatus(State::YAW_BASE, aerial_robot_estimation::GROUND_TRUTH))
{
world_offset_tf_.setOrigin(estimator_->getPos(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH));
world_offset_tf_.setRotation(tf::createQuaternionFromYaw(estimator_->getState(State::YAW_BASE, aerial_robot_estimation::GROUND_TRUTH)[0]));

double y, p, r; world_offset_tf_.getBasis().getRPY(r, p, y);
w_bdash_f.setOrigin(estimator_->getPos(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH));
w_bdash_f.setRotation(tf::createQuaternionFromYaw(estimator_->getState(State::YAW_BASE, aerial_robot_estimation::GROUND_TRUTH)[0]));
}

/* step2: also consider the offset tf from baselink to sensor */
/** ^{w}H_{b} * ^{b}H_{vo} * ^{vo}H_{w_vo} = ^{w}H_{w_vo} **/
world_offset_tf_ *= (sensor_tf_ * raw_sensor_tf.inverse());
/** step2: ^{vo}H_{b'} **/
tf::Transform vo_bdash_f = raw_sensor_tf * sensor_tf_.inverse(); // ^{vo}H_{b}
double r,p,y;
vo_bdash_f.getBasis().getRPY(r,p,y);
vo_bdash_f.setRotation(tf::createQuaternionFromYaw(y)); // ^{vo}H_{b'}

/** step3: ^{w}H_{vo} = ^{w}H_{b'} * ^{b'}H_{vo} **/
world_offset_tf_ = w_bdash_f * vo_bdash_f.inverse();

/* publish the offset tf if necessary */
geometry_msgs::TransformStamped static_transformStamped;
static_transformStamped.header.stamp = vo_msg->header.stamp;
static_transformStamped.header.frame_id = "world";
static_transformStamped.child_frame_id = vo_msg->header.frame_id;
tf::transformTFToMsg(world_offset_tf_, static_transformStamped.transform);
static_broadcaster_.sendTransform(static_transformStamped);

tf::Vector3 init_pos = w_bdash_f.getOrigin();

//double y, p, r; raw_sensor_tf.getBasis().getRPY(r, p, y);
tf::Vector3 init_pos = (world_offset_tf_ * raw_sensor_tf * sensor_tf_.inverse()).getOrigin();

for(auto& fuser : estimator_->getFuser(aerial_robot_estimation::EGOMOTION_ESTIMATE))
{
Expand Down
Loading