ROS实验笔记之——基于Augmented State EKF的多传感器融合(IMU,PnP,VO)

Posted gwpscut

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了ROS实验笔记之——基于Augmented State EKF的多传感器融合(IMU,PnP,VO)相关的知识,希望对你有一定的参考价值。

在之前的博客中,分别实现了如下的实验:

本博文通过Augmented State EKF来实现多传感器的融合(实现效果好像有点问题,debug中。。。。。)

 

实验视频如下:

基于Augmented State EKF来实现多传感器的融合(IMU,PNP,VO)

 

目录

Augmented State EKF 基本原理的介绍

代码

测试


 

Augmented State EKF 基本原理的介绍

在有些情况下,只有relative state measurements。比如之前博客《ROS实验笔记之——基于stereo camera的视觉里程计的实现》介绍的基于关键帧的visual odometry。那么这个时候,测量值中就包含了关键帧的数据了。

而对应的state可以写为如下的形式:

对应的协方差矩阵为:

其中,关键帧对应的状态值称为augmented states(m个)。而对于augmented states的增加与减少,可见下图

通过关键帧的引入,来提供global localization,从而保证overall performance。这是由于有了关键帧来保证不会漂?

接下来举个例子。假如一个quadrotor具备Good Acceleration Sensorand Keyframe-based Visual Odometry

那么为了将其relztive pose的测量与singl-keyframe visual odometry进行融合,将state增强为:

对应的process model为(prediction step之会影响mian state不会影响Augmented State):

对应的测量模型为:

而关键帧的改变,则需要对augmented state进行更新如下:

 

代码

关于TF(frame transformation),将所有的relative pose measurement转到body frame(对准IMU);将global pose measurement (PnP)转到world frame。

接下来思考于注释都写在代码中~

ekf_math.h

#pragma once

#include <Eigen/Eigen>
#include <Eigen/Geometry>
#include <math.h>

//定义了一系列函数,类似于之前EKF的project一样,只是定义于头文件中,让整个工程整洁一些~

inline Eigen::Vector3d rotationMatrix2EulerVector_zxy(Eigen::Matrix3d R)
{
    Eigen::Vector3d xyz_vector;
    double phi = asinf64(R(2, 1));
    double theta = atan2f64(-R(2, 0), R(2, 2));
    double psi = atan2f64(-R(0, 1), R(1, 1));
    xyz_vector << phi, theta, psi;
    return xyz_vector;
}
    
inline Eigen::Matrix3d Rotation_matrix(double roll, double pitch, double yaw)
{
    Eigen::Matrix3d R;
    R = Eigen::AngleAxisd(yaw, Eigen::Vector3d::UnitZ()) * 
        Eigen::AngleAxisd(roll, Eigen::Vector3d::UnitX()) * 
        Eigen::AngleAxisd(pitch, Eigen::Vector3d::UnitY());
    return R;
}

inline Eigen::Matrix3d Ginv(double roll, double pitch, double yaw)
{
    Eigen::Matrix3d G_inv;
    // G << cos(pitch), 0, -cos(roll) * sin(pitch),
    //      0,          1, sin(roll),
    //      sin(pitch), 0, cos(roll) * cos(pitch);

    G_inv << cos(pitch),                        0,                      sin(pitch),
            (sin(roll)*sin(pitch))/cos(roll),   1, -(cos(pitch)*sin(roll))/cos(roll),
            -sin(pitch)/cos(roll),              0,             cos(pitch)/cos(roll);
    return G_inv;
}
inline Eigen::Matrix3d delGinv_delroll(double roll, double pitch)
{
    // \\frac{\\partial G}{\\partial \\phi}
    Eigen::Matrix3d deltaG;
    double theta = pitch;
    double phi = roll;
    double cos2_1 = 1.0 / (cos(phi) * cos(phi) + 1e-8);
    deltaG << 0, 0, 0, 
                sin(theta) * cos2_1, 0, -cos(theta) * cos2_1,
                -sin(phi) * sin(theta) * cos2_1, 0, cos(theta) * sin(phi) * cos(phi);
    return deltaG; 
}


inline Eigen::Matrix3d delGinv_delpitch(double roll, double pitch)
{
    // \\frac{\\partial G}{\\partial \\theta}
    Eigen::Matrix3d deltaG;
    double theta = pitch;
    double phi = roll;
    double cos_1 = 1.0 / (cos(phi) + 1e-8);
    deltaG << -sin(theta), 0, cos(theta), 
        cos(theta) * sin(phi) * cos_1, 0, sin(theta) * sin(phi) * cos_1,
        -cos(theta)* cos_1, 0 , - sin(theta) * cos_1;
    return deltaG;
}

inline Eigen::Matrix3d deltaR_deltaroll(double roll, double pitch, double yaw)
{
    // \\frac{\\partial R}{\\partial \\phi}
    Eigen::Matrix3d deltaR;
    double theta = pitch;
    double phi = roll;
    double psi = yaw;

    deltaR << 
        -cos(phi)*sin(psi)*sin(theta),  sin(phi)*sin(psi), cos(phi)*cos(theta)*sin(psi),
         cos(phi)*cos(psi)*sin(theta), -cos(psi)*sin(phi), -cos(phi)*cos(psi)*cos(theta),
         sin(phi)*sin(theta), cos(phi), -cos(theta)*sin(phi);
        
    return deltaR;
}

inline Eigen::Matrix3d deltaR_deltapitch(double roll, double pitch, double yaw)
{
    // \\frac{\\partial R}{\\partial \\phi}
    Eigen::Matrix3d deltaR;
    double theta = pitch;
    double phi = roll;
    double psi = yaw;

    deltaR << 
    - cos(psi)*sin(theta) - cos(theta)*sin(phi)*sin(psi), 0, cos(psi)*cos(theta) - sin(phi)*sin(psi)*sin(theta),
         cos(psi)*cos(theta)*sin(phi) - sin(psi)*sin(theta), 0,  cos(theta)*sin(psi) + cos(psi)*sin(theta)*sin(phi),
         -cos(phi)*cos(theta)                               , 0, -cos(phi)*sin(theta);
    return deltaR;
}

inline Eigen::Matrix3d deltaR_deltayaw(double roll, double pitch, double yaw)
{
    // \\frac{\\partial R}{\\partial \\phi}
    Eigen::Matrix3d deltaR;
    double theta = pitch;
    double phi = roll;
    double psi = yaw;

    deltaR << - cos(theta)*sin(psi) - cos(psi)*sin(phi)*sin(theta), -cos(phi)*cos(psi), cos(psi)*cos(theta)*sin(phi) - sin(psi)*sin(theta),
   cos(psi)*cos(theta) - sin(phi)*sin(psi)*sin(theta), -cos(phi)*sin(psi), cos(psi)*sin(theta) + sin(phi)*sin(psi)*cos(theta),
    0, 0, 0;
    return deltaR;
}

inline Eigen::Matrix3d deltatheta_deltaR(Eigen::Matrix3d R)
{
  Eigen::Matrix3d dtheta_dR = Eigen::MatrixXd::Zero(3, 3);
  dtheta_dR(2, 2) = R(2, 0) / (R(2, 0) * R(2, 0) + R(2, 2) * R(2, 2));
  dtheta_dR(2, 0) = R(2, 2) / (R(2, 0) * R(2, 0) + R(2, 2) * R(2, 2));
  return dtheta_dR;
}
inline Eigen::Matrix3d deltaphi_deltaR(Eigen::Matrix3d R)
{
  Eigen::Matrix3d dphi_dR = Eigen::MatrixXd::Zero(3, 3);
  dphi_dR(2, 1) = 1.0 / sqrt(1 - R(2, 1) * R(2, 1));
  return dphi_dR;
}
inline Eigen::Matrix3d deltayaw_deltaR(Eigen::Matrix3d R)
{
  Eigen::Matrix3d dyaw_dR = Eigen::MatrixXd::Zero(3, 3);
  dyaw_dR(1, 1) = R(0, 1) / (R(0, 1) * R(0, 1) + R(1, 1) *R(1, 1));
  dyaw_dR(0, 1) = R(1, 1) / (R(0, 1) * R(0, 1) + R(1, 1) *R(1, 1));
  return dyaw_dR;
}

ekf_node.cpp

#include <iostream>
#include <ros/ros.h>
#include <ros/console.h>
#include <sensor_msgs/Imu.h>
#include <sensor_msgs/Range.h>
#include <nav_msgs/Odometry.h>
#include <Eigen/Eigen>

#include <ekf_filter.h>

// for debug
// #include <backward.hpp>
// namespace backward {
// backward::SignalHandling sh;
// }

using namespace std;
using namespace Eigen;

int main(int argc, char** argv) {
  ros::init(argc, argv, "aug_ekf");
  ros::NodeHandle n("~");

  ekf_imu_vision::EKFImuVision aug_ekf;
  aug_ekf.init(n);//对类进行了初始化

  ros::spin();
}

ekf_filter.cpp

#include <ekf_filter.h>


namespace ekf_imu_vision {
EKFImuVision::EKFImuVision(/* args */) {}

EKFImuVision::~EKFImuVision() {}

void EKFImuVision::init(ros::NodeHandle& nh) {
  node_ = nh;

  /* ---------- parameter ---------- */
  Qt_.setZero();
  Rt1_.setZero();
  Rt2_.setZero();

  // addition and removel of augmented state
  
  // TODO
  // set M_a_ and M_r_
  M_r_.setZero();
  M_r_.block<15,15>(0,0) = Eigen::MatrixXd::Identity(15,15);

  M_a_.setZero();
  M_a_.block<15,15>(0,0) = Eigen::MatrixXd::Identity(15, 15);
  M_a_.block<6,6>(15,0) = Eigen::MatrixXd::Identity(6, 6);


  for (int i = 0; i < 3; i++) {
    /* process noise */
    node_.param("aug_ekf/ng", Qt_(i, i), -1.0);
    node_.param("aug_ekf/na", Qt_(i + 3, i + 3), -1.0);
    node_.param("aug_ekf/nbg", Qt_(i + 6, i + 6), -1.0);
    node_.param("aug_ekf/nba", Qt_(i + 9, i + 9), -1.0);
    node_.param("aug_ekf/pnp_p", Rt1_(i, i), -1.0);
    node_.param("aug_ekf/pnp_q", Rt1_(i + 3, i + 3), -1.0);
    node_.param("aug_ekf/vo_pos", Rt2_(i, i), -1.0);
    node_.param("aug_ekf/vo_rot", Rt2_(i + 3, i + 3), -1.0);
  }

  init_        = false;

  for(int i = 0; i < 4; i++)
    latest_idx[i] = 0;

  /* ---------- subscribe and publish ---------- */
  imu_sub_ =
      node_.subscribe<sensor_msgs::Imu>("/dji_sdk_1/dji_sdk/imu", 100, &EKFImuVision::imuCallback, this);
  pnp_sub_     = node_.subscribe<nav_msgs::Odometry>("tag_odom", 10, &EKFImuVision::PnPCallback, this);
  // opti_tf_sub_ = node_.subscribe<geometry_msgs::PointStamped>("opti_tf_odom", 10,
                                                              // &EKFImuVision::opticalCallback, this);
  stereo_sub_  = node_.subscribe<stereo_vo::relative_pose>("/vo/Relative_pose", 10,
                                                          &EKFImuVision::stereoVOCallback, this);
  fuse_odom_pub_ = node_.advertise<nav_msgs::Odometry>("ekf_fused_odom", 10);
  path_pub_         = node_.advertise<nav_msgs::Path>("/aug_ekf/Path", 100);

  ros::Duration(0.5).sleep();

  ROS_INFO("Start ekf.");
}

void EKFImuVision::rectMeasPnP(const nav_msgs::OdometryConstPtr& msg, Vec6 &ut){
  double rel_x, rel_y, rel_z, rel_qw, rel_qx, rel_qy, rel_qz;
  rel_x = msg->pose.pose.position.x;
  rel_y = msg->pose.pose.position.y;
  rel_z = msg->pose.pose.position.z;

  rel_qw = msg->pose.pose.orientation.w;
  rel_qx = msg->pose.pose.orientation.x;
  rel_qy = msg->pose.pose.orientation.y;
  rel_qz = msg->pose.pose.orientation.z;
  Eigen::Quaterniond q_kc(rel_qw, rel_qx, rel_qy, rel_qz);
  Mat3x3 R_cam_tag(q_kc);
  Eigen::Matrix4d T_w_tag, T_i_down, T_cam_tag, T_w_b;
  T_cam_tag.setZero();
  T_cam_tag.block<3,3>(0,0) = R_cam_tag;
  T_cam_tag(0,3) = rel_x;
  T_cam_tag(1,3) = rel_y;
  T_cam_tag(2,3) = rel_z;
  T_cam_tag(3,3) = 1;

  T_w_tag<< 0, 1, 0, 0,
          1, 0, 0 , 0,
          0 ,0, -1, 0,
          0, 0, 0, 1;
  T_i_down << 1, 0, 0, 0.07,
          0, -1, 0, -0.02,
          0, 0, -1, 0.01,
          0, 0, 0, 1;
  T_w_b = T_w_tag * T_cam_tag.inverse() * T_i_down.inverse();

  Vec3 rpy_wb = rotation2Euler(T_w_b.block<3,3>(0,0));

  Vec6 temp;

  temp<<T_w_b.block<3,1>(0,3), rpy_wb;
  ut = temp;

}

void EKFImuVision::PnPCallback(const nav_msgs::OdometryConstPtr& msg) {

  // TODO 
  // construct a new AugState using the absolute measurement from marker PnP and insert the new state into the queue
  ROS_INFO("Receiving pnp: %f", msg->header.stamp.toSec());

  AugState state0;
  state0.type = pnp;
  state0.time_stamp = msg->header.stamp;
  rectMeasPnP(msg, state0.ut);

  if(!init_){
    int insert_pos = -1, que_end = aug_state_hist_.size()-1;
    if(que_end<0 || state0.time_stamp < aug_state_hist_.front().time_stamp)
      insert_pos = 0;
    else {
      for(int i=que_end;i>=0;i--)
        if(aug_state_hist_.at(i).time_stamp<state0.time_stamp) {
          insert_pos = i+1;
          break;
        }
    }
    if(insert_pos<que_end+1)
      latest_idx[imu]++;
    aug_state_hist_.insert(aug_state_hist_.begin()+insert_pos, state0);
    aug_state_hist_.erase(aug_state_hist_.begin(),aug_state_hist_.begin()+insert_pos);
    latest_idx[imu]-=insert_pos;
    if(latest_idx[imu]<0)
      latest_idx[imu]++;

    initUsingPnP(aug_state_hist_.begin());
    latest_idx[pnp] = 0;
    init_ = true;
    return;
  }

  insertNewState(state0, false);

}

void EKFImuVision::stereoVOCallback(const stereo_vo::relative_poseConstPtr& msg) {

  // TODO
  // if the keyframe is changed, label the state associated with the new keyframe in the queue
  // construct a new AugState using the relative measurement from VO and insert the new state into the queue

  ROS_INFO("Receiving VO : %f, KF: %f", msg->header.stamp.toSec(), msg->key_stamp.toSec());
//  ROS_INFO("Receiving VO : %d, KF: %d", msg->header.stamp.nsec, msg->previous_stamp.nsec);

  if(!init_)
    return;
  if(msg->key_stamp<aug_state_hist_.front().time_stamp)
    return;

  ros::Time keyframe_time = msg->key_stamp, vo_time = msg->header.stamp;
  double rel_x, rel_y, rel_z, rel_qw, rel_qx, rel_qy, rel_qz;
  rel_x = msg->relative_pose.position.x;
  rel_y = msg->relative_pose.position.y;
  rel_z = msg->relative_pose.position.z;

  rel_qw = msg->relative_pose.orientation.w;
  rel_qx = msg->relative_pose.orientation.x;
  rel_qy = msg->relative_pose.orientation.y;
  rel_qz = msg->relative_pose.orientation.z;

  Eigen::Quaterniond q_kc(rel_qw, rel_qx, rel_qy, rel_qz);
  Mat3x3 R_kc(q_kc);
  Vec3 rpy = rotation2Euler(R_kc);

  AugState to_insert;
  to_insert.time_stamp = vo_time;
  to_insert.ut(0) = rel_x;
  to_insert.ut(1) = rel_y;
  to_insert.ut(2) = rel_z;
  to_insert.ut.segment(3,3) = rpy;
  to_insert.type = vo;
  to_insert.key_frame_time_stamp = keyframe_time;

  if(keyframe_time == aug_state_hist_.at(latest_idx[keyframe]).time_stamp)
    insertNewState(to_insert, false);
  else
    insertNewState(to_insert, true);


}

void EKFImuVision::imuCallback(const sensor_msgs::ImuConstPtr& imu_msg) {

  // TODO
  // construct a new AugState using the IMU input and insert the new state into the queue

//  ROS_INFO("Receiving IMU: %f", imu_msg->header.stamp.toSec());

  double wx, wy, wz, ax, ay, az;

  wx = imu_msg->angular_velocity.x;
  wy = imu_msg->angular_velocity.y;
  wz = imu_msg->angular_velocity.z;
  ax = imu_msg->linear_acceleration.x;
  ay = imu_msg->linear_acceleration.y;
  az = imu_msg->linear_acceleration.z;

  Vec6 ut;
  ut<<wx, wy, wz, ax, ay, az;

  AugState curr;
  curr.type = imu;
  curr.time_stamp = imu_msg->header.stamp;
  curr.ut = ut;
  aug_state_hist_.push_back(curr);
  latest_idx[imu]=aug_state_hist_.size()-1;

}

bool EKFImuVision::insertNewState(AugState& new_state, bool change_keyframe) {

  // TODO
  // insert the new AugState to the queue and then do repropagation to update the mean & covariance using the input/measurement. Call repropagate() here.

  int insert_pos = -1, que_end = aug_state_hist_.size()-1;
  if(que_end<0 || new_state.time_stamp < aug_state_hist_.front().time_stamp)
    insert_pos = 0;
  else {
    for(int i=que_end;i>=0;i--)
      if(aug_state_hist_.at(i).time_stamp<new_state.time_stamp) {
        insert_pos = i+1;
        break;
      }
  }

  for(unsigned int & i : latest_idx)
    if(i>=insert_pos)
      i++;
  aug_state_hist_.insert(aug_state_hist_.begin()+insert_pos, new_state);

  if(new_state.type == pnp){
    int prop_start = min(max(latest_idx[pnp], latest_idx[vo])+1, (unsigned int)insert_pos),
        prop_end = max(max(latest_idx[pnp], latest_idx[vo]), (unsigned int)insert_pos);
//    for (auto it = aug_state_hist_.begin() + prop_start; it != aug_state_hist_.begin() + prop_end + 1; it++) {
    for (int i = prop_start; i <= prop_end; i++) {
      if(aug_state_hist_.at(i).type == pnp){
        updatePnP(aug_state_hist_.at(i), aug_state_hist_.at(i-1));
      } else if(aug_state_hist_.at(i).type == imu){
        predictIMU(aug_state_hist_.at(i), aug_state_hist_.at(i-1));
      } else if(aug_state_hist_.at(i).type == vo){
        updateVO(aug_state_hist_.at(i),aug_state_hist_.at(i-1));
      } else{
        updateVO(aug_state_hist_.at(i), aug_state_hist_.at(i-1));
        changeAugmentedState(aug_state_hist_.at(i));
      }
    }

    latest_idx[pnp] = insert_pos;

  } else{
    if(change_keyframe){
      int newkf_pos = -1;
      for(int i=insert_pos;i>0;i--){
        if(aug_state_hist_.at(i-1).time_stamp <= new_state.time_stamp &&
          new_state.time_stamp <= aug_state_hist_.at(i).time_stamp){
          if(new_state.time_stamp - aug_state_hist_.at(i-1).time_stamp >
            aug_state_hist_.at(i).time_stamp - new_state.time_stamp){
            newkf_pos = i;
          } else newkf_pos = i-1;
          break;
        }
      }

      aug_state_hist_.at(insert_pos).key_frame_time_stamp = aug_state_hist_.at(newkf_pos).time_stamp;
      int prop_start = min(max(latest_idx[pnp], latest_idx[vo])+1, (unsigned int)newkf_pos),
              prop_end = max(max(latest_idx[pnp], latest_idx[vo]), (unsigned int)insert_pos);

      for (int i = prop_start; i <= prop_end; i++) {
        if(aug_state_hist_.at(i).type == pnp){
          updatePnP(aug_state_hist_.at(i), aug_state_hist_.at(i-1));
        } else if(aug_state_hist_.at(i).type == imu){
          predictIMU(aug_state_hist_.at(i), aug_state_hist_.at(i-1));
        } else if(aug_state_hist_.at(i).type == vo){
          updateVO(aug_state_hist_.at(i),aug_state_hist_.at(i-1));
        } else{
          updateVO(aug_state_hist_.at(i), aug_state_hist_.at(i-1));
          changeAugmentedState(aug_state_hist_.at(i));
        }
        if(i == newkf_pos){
          if(aug_state_hist_.at(i).type == imu || aug_state_hist_.at(i).type == pnp){
            aug_state_hist_.at(i).type = keyframe;
            Vec6 self_tf;
            self_tf.setZero();
            aug_state_hist_.at(i).ut = self_tf;
          }
          changeAugmentedState(aug_state_hist_.at(i));
        }
      }

      latest_idx[keyframe] = newkf_pos;


    }else{
      int prop_start = min(max(latest_idx[pnp], latest_idx[vo])+1, (unsigned int)insert_pos),
              prop_end = max(max(latest_idx[pnp], latest_idx[vo]), (unsigned int)insert_pos);
      for (int i = prop_start; i <= prop_end; i++) {
        if(aug_state_hist_.at(i).type == pnp){
          updatePnP(aug_state_hist_.at(i), aug_state_hist_.at(i-1));
        } else if(aug_state_hist_.at(i).type == imu){
          predictIMU(aug_state_hist_.at(i), aug_state_hist_.at(i-1));
        } else if(aug_state_hist_.at(i).type == vo){
          updateVO(aug_state_hist_.at(i),aug_state_hist_.at(i-1));
        } else{
          updateVO(aug_state_hist_.at(i), aug_state_hist_.at(i-1));
          changeAugmentedState(aug_state_hist_.at(i));
        }
      }

    }

    latest_idx[vo] = insert_pos;
  }


  removeOldState();


  // Then and remove unnecessary states in the queue by calling removeOldState()
//  removeOldState();   // TODO

  publishFusedOdom();
  return true;

}

void EKFImuVision::repropagate(deque<AugState>::iterator& new_input_it, bool& init) {


  // TODO
  // repropagate along the queue from the new input (or keyframe when keyframe is changed), according to the type of the inputs / measurements
//  if


  // publish the latest fused odom

  publishFusedOdom();

}

void EKFImuVision::removeOldState() {

  // TODO
  // remove the unnecessary old states to prevent the queue from becoming too long
  if(aug_state_hist_.size()>10){
    int useful = min(min(latest_idx[0],latest_idx[1]), min(latest_idx[2], latest_idx[3]))-1;
    if(useful<10)
      return;

    ROS_INFO("Q before erase: %ld, useful: %d, latest: %u, %u, %u, %u", aug_state_hist_.size(), useful,
            latest_idx[0], latest_idx[1], latest_idx[2], latest_idx[3]);

    aug_state_hist_.erase(aug_state_hist_.begin(), aug_state_hist_.begin()+useful);

    latest_idx[0]-=useful;
    latest_idx[1]-=useful;
    latest_idx[2]-=useful;
    latest_idx[3]-=useful;

    ROS_INFO("Q after erase: %ld, useful: %d, latest: %u, %u, %u, %u", aug_state_hist_.size(), useful,
             latest_idx[0], latest_idx[1], latest_idx[2], latest_idx[3]);
  }
}

void EKFImuVision::predictIMU(AugState& cur_state, AugState& prev_state) {
//  ROS_INFO_STREAM("predIMU:" << prev_state.mean.head(3).transpose()<<", "<<cur_state.time_stamp.sec%100<<"+"<<cur_state.time_stamp.nsec/100000);

  // TODO
  // one of the main component in repropagate, predict new state by IMU inputs
  double dt = (cur_state.time_stamp-prev_state.time_stamp).toSec();
//  cur_state.key_frame_time_stamp = prev_state.key_frame_time_stamp;

  // copy aug states
  cur_state.mean.segment(15,6) = prev_state.mean.segment(15,6);
  Vec12 v_zeros;
  Vec15 xdot;
  v_zeros.setZero();
  xdot = modelF(prev_state.mean.segment(0,15), cur_state.ut, v_zeros);

  cur_state.mean.segment(0,15) = prev_state.mean.segment(0,15) + dt * xdot;

  Mat15x15 A_t = jacobiFx(prev_state.mean.segment(0,15), cur_state.ut, v_zeros), F_t;
  F_t = Mat15x15::Identity(15,15)+dt*A_t;

  Mat15x12 U_t = jacobiFn(prev_state.mean.segment(0,15), cur_state.ut, v_zeros), V_t;
  V_t = dt * U_t;

  cur_state.covariance.block<15,15>(0,0) = F_t * prev_state.covariance.block<15,15>(0,0) * F_t.transpose() + V_t * Qt_ * V_t.transpose();
  cur_state.covariance.block<6,15>(15,0) = prev_state.covariance.block<6,15>(15,0) * F_t.transpose();
  cur_state.covariance.block<15,6>(0,15) = F_t * prev_state.covariance.block<15,6>(0,15);
  cur_state.covariance.block<6,6>(15,15) = prev_state.covariance.block<6,6>(15,15);

}

void EKFImuVision::updatePnP(AugState& cur_state, AugState& prev_state) {
  ROS_INFO_STREAM("updaPNP:" << prev_state.mean.head(3).transpose()<<", "<<cur_state.time_stamp.sec%100+(cur_state.time_stamp.nsec*1e-9));
//  ROS_INFO_STREAM("updaPNP:" << prev_state.mean.segment(6, 3).transpose()<<", "<<cur_state.time_stamp.sec%100+(cur_state.time_stamp.nsec*1e-9));

  // TODO
  // one of the main component in repropagate, update new state by marker PnP measurements
//  cur_state.key_frame_time_stamp = prev_state.key_frame_time_stamp;

  Vec6 v_zeros;
  v_zeros.setZero();
  Vec6 z_t0 = modelG1(prev_state.mean.segment(0,15),v_zeros);
  Vec6 inno = cur_state.ut - z_t0;

  ekf_imu_vision::clampEuler(inno(3));
  ekf_imu_vision::clampEuler(inno(4));
  ekf_imu_vision::clampEuler(inno(5));

  Mat21x6 K_t;
  Mat6x21 C_t;
  Mat6x6 W_t;
  C_t.setZero();
  C_t.block<6,15>(0,0) = jacobiG1x(prev_state.mean.segment(0,15), v_zeros);
  W_t = jacobiG1v(prev_state.mean.segment(0,15), v_zeros);
  K_t = prev_state.covariance * C_t.transpose() * (C_t * prev_state.covariance * C_t.transpose() + W_t * Rt1_ * W_t.transpose()).inverse();

  cur_state.mean = prev_state.mean + K_t * inno;
  cur_state.covariance = prev_state.covariance - K_t * C_t * prev_state.covariance;

  ekf_imu_vision::clampEuler(cur_state.mean(3));
  ekf_imu_vision::clampEuler(cur_state.mean(4));
  ekf_imu_vision::clampEuler(cur_state.mean(5));

  ROS_INFO_STREAM("- after:" << cur_state.mean.head(3).transpose());
//  ROS_INFO_STREAM("- after:" << cur_state.mean.segment(6,3).transpose());

}



void EKFImuVision::updateVO(AugState& cur_state, AugState& prev_state) {
  ROS_INFO_STREAM("updatVO:" << prev_state.mean.head(3).transpose()<<", "<<cur_state.time_stamp.sec%100+(cur_state.time_stamp.nsec*1e-9));
//  ROS_INFO_STREAM("updatVO:" << prev_state.mean.segment(6,3).transpose()<<", "<<cur_state.time_stamp.sec%100+(cur_state.time_stamp.nsec*1e-9));
  // TODO
  // one of the main component in repropagate, update new state by relative pose measurements
//  cur_state.key_frame_time_stamp = prev_state.key_frame_time_stamp;

  Vec6 v_zeros;
  v_zeros.setZero();
  // prev state: the prediction state
//  cur_state.mean.segment(0,15) = prev_state.mean.segment(0,15);
  Vec6 z_t0 = modelG2(prev_state.mean, v_zeros);
  Vec6 inno = cur_state.ut - z_t0;

  ekf_imu_vision::clampEuler(inno(3));
  ekf_imu_vision::clampEuler(inno(4));
  ekf_imu_vision::clampEuler(inno(5));

  Mat21x6 K_t;
  Mat6x21 C_t;
  Mat6x6 W_t;

  C_t = jacobiG2x(prev_state.mean, v_zeros);
  W_t = jacobiG2v(prev_state.mean, v_zeros);
  K_t = prev_state.covariance * C_t.transpose() * (C_t * prev_state.covariance * C_t.transpose() + W_t * Rt2_ * W_t.transpose()).inverse();

  cur_state.mean = prev_state.mean + K_t * inno;
  cur_state.covariance = prev_state.covariance - K_t * C_t * prev_state.covariance;

  ekf_imu_vision::clampEuler(cur_state.mean(3));
  ekf_imu_vision::clampEuler(cur_state.mean(4));
  ekf_imu_vision::clampEuler(cur_state.mean(5));

  ROS_INFO_STREAM("- after:" << cur_state.mean.head(3).transpose());
//  ROS_INFO_STREAM("- after:" << cur_state.mean.segment(6,3).transpose());

}

void EKFImuVision::changeAugmentedState(AugState& state) {
  ROS_WARN("----------------change keyframe------------------------");

  // TODO
  // one of the main component in repropagate, remove the old augmented part of the state and recopy 
  state.covariance = M_a_ * M_r_ * state.covariance * M_r_.transpose() * M_a_.transpose();
  state.mean = M_a_ * M_r_ * state.mean;
  state.key_frame_time_stamp = state.time_stamp;
}

bool EKFImuVision::initFilter() {

  // TODO
  // init the filter when a keyframe after marker PnP measurements is available

  return false;
}


bool EKFImuVision::initUsingPnP(deque<AugState>::iterator start_it) {

  // TODO
  // init the state in the queue using marker PnP measurement
  

  AugState zero_state;
  Vec(21) mean;
  Mat(21, 21) covariance;
  mean.setZero();
  covariance.setIdentity();
  zero_state.mean = mean;
  zero_state.covariance = covariance;

  updatePnP(*start_it, zero_state);

  std::cout << "init PnP state: " << start_it->mean.transpose() << std::endl;


  return true;
}

// helper function converting rotation matrix to Euler angle (phi, theta, psi)
// R = Rz * Rx * Ry is assumed. If use other representation you should change the code.
Vec3 EKFImuVision::rotation2Euler(const Mat3x3& R) {
  double phi   = asin(R(2, 1));
  double theta = atan2(-R(2, 0), R(2, 2));
  double psi   = atan2(-R(0, 1), R(1, 1));
  return Vec3(phi, theta, psi);
}

void EKFImuVision::publishFusedOdom() {
//  AugState last_state = aug_state_hist_.back();
  AugState last_state = aug_state_hist_.at(max(latest_idx[pnp],latest_idx[vo]));  // always pnp actually

  double phi, theta, psi;
  phi   = last_state.mean(3);
  theta = last_state.mean(4);
  psi   = last_state.mean(5);

  if (last_state.mean.head(3).norm() > 20) {
    ROS_ERROR_STREAM("error state: " << last_state.mean.head(3).transpose());
    return;
  }

  // using the zxy euler angle
  Eigen::Quaterniond q = Eigen::AngleAxisd(psi, Eigen::Vector3d::UnitZ()) *
      Eigen::AngleAxisd(phi, Eigen::Vector3d::UnitX()) *
      Eigen::AngleAxisd(theta, Eigen::Vector3d::UnitY());
  nav_msgs::Odometry odom;
  odom.header.frame_id = "world";
  odom.header.stamp    = last_state.time_stamp;

  odom.pose.pose.position.x = last_state.mean(0);
  odom.pose.pose.position.y = last_state.mean(1);
  odom.pose.pose.position.z = last_state.mean(2);

  odom.pose.pose.orientation.w = q.w();
  odom.pose.pose.orientation.x = q.x();
  odom.pose.pose.orientation.y = q.y();
  odom.pose.pose.orientation.z = q.z();

  odom.twist.twist.linear.x = last_state.mean(6);
  odom.twist.twist.linear.y = last_state.mean(7);
  odom.twist.twist.linear.z = last_state.mean(8);


  fuse_odom_pub_.publish(odom);

  geometry_msgs::PoseStamped path_pose;
  path_pose.header.frame_id = path_.header.frame_id = "world";
  path_pose.pose.position.x                         = last_state.mean(0);
  path_pose.pose.position.y                         = last_state.mean(1);
  path_pose.pose.position.z                         = last_state.mean(2);
  path_.poses.push_back(path_pose);
  path_pub_.publish(path_);
}




}  // namespace ekf_imu_vision

调了好几天。。。感觉没有调出来。。。。

 

测试

roslaunch aug_ekf augekf.launch 

 

 

 

以上是关于ROS实验笔记之——基于Augmented State EKF的多传感器融合(IMU,PnP,VO)的主要内容,如果未能解决你的问题,请参考以下文章

ROS实验笔记之——基于VSCODE的ROS开发

ROS实验笔记之——基于Prometheus的控制模块

ROS实验笔记之——基于Prometheus的无人机运动规划

ROS实验笔记之——基于l515激光相机的FLVIS与MLMapping

ROS实验笔记之——无人机在VICION下试飞

ROS实验笔记之——无人机在VICION下试飞