cartographer当机器人不动时,同时收到landmark,如何解决定位问题?
Posted COCO_PEAK_NOODLE
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了cartographer当机器人不动时,同时收到landmark,如何解决定位问题?相关的知识,希望对你有一定的参考价值。
上一次,我们通过修改代码,实现了当机器人静止不动时,不再向后端发送回环检测(即不再向后端添加node),这样造成的结果就是,如果有landmark同时发送过来,也不会立即进行后端优化,如何解决这个问题呢?
我给出了一种解决办法:如下,
1:为前端local_trajectory增加一个AddLandmarkData函数,当检测到有landmark数据发送过来,就取消当前静止的状态。
代码如下:
void KalmanLocalTrajectoryBuilder2D::AddLandmarkData()
//when receive landmark data, update the last_time_in_place;
//because the landmark need the node data for optimization
//这个变量配合静止检测部分的代码
last_time_in_place = common::Time::min();
return;
//这里是静止检测的代码修改
if (pose_prediction_distance < 0.005 &&
pose_prediction_angle < (0.25 / 180 * M_PI))
// robot is still at local
// LOG(INFO) << "robot do not move ";
// not fail .just do not tell back end to optimization!
if(last_time_in_place == common::Time::min())
last_time_in_place = time;
common::Duration duration = time - last_time_in_place;
if (common::ToSeconds(duration) >
2 * options_.motion_filter_options().max_time_seconds())
current_match_state = 1; // robot is in place;
// give the last_pose_estimate
// the other reason is
pose_estimate = last_pose_estimate;
else
//last_time_in_place = time;
if(last_time_in_place != common::Time::min())
last_time_in_place = time;
else
完整代码
std::unique_ptr<KalmanLocalTrajectoryBuilder2D::MatchingResult>
KalmanLocalTrajectoryBuilder2D::AddAccumulatedRangeData(
const common::Time time,
const sensor::RangeData& gravity_aligned_range_data,
const transform::Rigid3d& gravity_alignment,
const absl::optional<common::Duration>& sensor_duration)
if (gravity_aligned_range_data.returns.empty())
LOG(WARNING) << "Dropped empty horizontal range data.";
return nullptr;
if(gravity_aligned_range_data.returns.size() < 50)
LOG(WARNING) << "Miss lots of horizontal range data";
return nullptr;
//
int current_match_state = 0; //default is no error
transform::Rigid3d gravity_alignment_variety = gravity_alignment;
// Computes a gravity aligned pose prediction.
//LOG(INFO) << "1 pose_prediction time " << time;
const transform::Rigid3d non_gravity_aligned_pose_prediction =
extrapolator_->ExtrapolatePose(time);
if (last_pose_estimate.translation() != Eigen::Vector3d::Zero())
pose_prediction_distance =
(non_gravity_aligned_pose_prediction.translation() -
last_pose_estimate.translation())
.norm();
pose_prediction_angle = transform::GetAngle(
non_gravity_aligned_pose_prediction.inverse() * last_pose_estimate);
//cartographer
const transform::Rigid2d pose_prediction = transform::Project2D(
non_gravity_aligned_pose_prediction * gravity_alignment_variety.inverse());
// LOG(INFO) << "pose_prediction " << pose_prediction.rotation().angle()
// << " last_pose_observation " << last_pose_observation.rotation().angle()
// << " variety_angle " << variety_angle;
// LOG(INFO) << "last_pose_estimate_angle " << last_pose_estimate_angle
// << " last_pose_observation " << last_pose_observation.rotation().angle() ;
// LOG(INFO) << "1 pose_prediction time " << time;
//LOG(INFO) << "1 pose_prediction angle " << pose_prediction.rotation().angle();
// LOG(INFO) << "1 pose_prediction dista " << pose_prediction.translation();
const sensor::PointCloud& filtered_gravity_aligned_point_cloud =
sensor::AdaptiveVoxelFilter(options_.adaptive_voxel_filter_options())
.Filter(gravity_aligned_range_data.returns);
if (filtered_gravity_aligned_point_cloud.empty())
return nullptr;
// local map frame <- gravity-aligned frame
pose_estimate_2d =
ScanMatch(time, pose_prediction, filtered_gravity_aligned_point_cloud);
//double variety_distance = (pose_estimate_2d->translation() - pose_prediction.translation()).norm();
/*
if (pose_prediction_distance < 0.01 &&
pose_prediction_angle < (0.5 / 180 * M_PI))
if (variety_distance > std::max(0.02, 3*pose_prediction_distance))
pose_estimate_2d =
absl::make_unique<transform::Rigid2d>(last_pose_estimate_2d);
gravity_alignment_variety = last_gravity_alignment;
LOG(INFO) << "B peak.ding not use the scan match " << variety_distance;
// std::make_unique<transform::Rigid2d>(pose_prediction);
LOG(WARNING) << "C pose_prediction_distance " << pose_prediction_distance
<< " pose_prediction_angle "
<< pose_prediction_angle / M_PI * 180 ;
//<< " variety_distance " << variety_distance;
else
*/
last_pose_estimate_2d = *pose_estimate_2d;
if (pose_estimate_2d == nullptr)
LOG(WARNING) << "Scan matching failed.";
return nullptr;
//cartographer
// const transform::Rigid3d pose_estimate =
// transform::Embed3D(*pose_estimate_2d) * gravity_alignment;
/
transform::Rigid3d pose_estimate =
transform::Embed3D(*pose_estimate_2d) * gravity_alignment_variety;
//
if (last_pose_estimate.translation() != Eigen::Vector3d::Zero())
pose_prediction_distance =
(pose_estimate.translation() - last_pose_estimate.translation())
.norm();
pose_prediction_angle = transform::GetAngle(
pose_estimate.inverse() * last_pose_estimate);
if (pose_prediction_distance < 0.005 &&
pose_prediction_angle < (0.25 / 180 * M_PI))
//LOG(INFO) << " pose_prediction_distance " << pose_prediction_distance
// << " angle " << pose_prediction_angle / M_PI * 180;
// robot is still at local
// LOG(INFO) << "robot do not move ";
// not fail .just do not tell back end to optimization!
// return nullptr;
if(last_time_in_place == common::Time::min())
//LOG(INFO) << "last_time_in_place initial";
last_time_in_place = time;
common::Duration duration = time - last_time_in_place;
//LOG(INFO) << "duration time " << common::ToSeconds(duration);
if (common::ToSeconds(duration) >
2 * options_.motion_filter_options().max_time_seconds())
current_match_state = 1; // robot is in place;
// give the last_pose_estimate
// the other reason is
pose_estimate = last_pose_estimate;
else
//last_time_in_place = time;
if(last_time_in_place != common::Time::min())
//LOG(INFO) << "update last_time_in_place";
last_time_in_place = time;
else
//LOG(INFO) << "last_time_in_place maybe need initial";
//kalman_filter addposeObservation
pose_tracker_->AddPoseObservation(
time, pose_estimate,
options_.scan_matcher_variance() *
kalman_filter::PoseCovariance::Identity());
//kalman_filter GetPoseEstimate
kalman_filter::PoseCovariance covariance_estimate;
pose_tracker_->GetPoseEstimateMeanAndCovariance(
time, &scan_matcher_pose_estimate_, &covariance_estimate);
//LOG(INFO) << " AddAccumulatedRangeData AddPose";
extrapolator_->AddPose(time, scan_matcher_pose_estimate_/*pose_estimate*/); //
last_pose_estimate = scan_matcher_pose_estimate_/*pose_estimate*/;
sensor::RangeData range_data_in_local =
TransformRangeData(gravity_aligned_range_data,
transform::Embed3D(pose_estimate_2d->cast<float>()));
std::unique_ptr<InsertionResult> insertion_result = nullptr;
//
if (current_match_state == 0)
insertion_result = InsertIntoSubmap(
time, range_data_in_local, filtered_gravity_aligned_point_cloud,
scan_matcher_pose_estimate_ /*pose_estimate*/,
gravity_alignment_variety.rotation());
else if(current_match_state == 1)
//LOG(INFO) << "InsertIntoSubmapWithoutNode";
insertion_result = InsertIntoSubmapWithoutNode(
time, range_data_in_local, filtered_gravity_aligned_point_cloud,
scan_matcher_pose_estimate_ /*pose_estimate*/,
gravity_alignment_variety.rotation());
else
return nullptr;
//
last_gravity_alignment = gravity_alignment_variety;
const auto wall_time = std::chrono::steady_clock::now();
if (last_wall_time_.has_value())
const auto wall_time_duration = wall_time - last_wall_time_.value();
kLocalSlamLatencyMetric->Set(common::ToSeconds(wall_time_duration));
if (sensor_duration.has_value())
kLocalSlamRealTimeRatio->Set(common::ToSeconds(sensor_duration.value()) /
common::ToSeconds(wall_time_duration));
const double thread_cpu_time_seconds = common::GetThreadCpuTimeSeconds();
if (last_thread_cpu_time_seconds_.has_value())
const double thread_cpu_duration_seconds =
thread_cpu_time_seconds - last_thread_cpu_time_seconds_.value();
if (sensor_duration.has_value())
kLocalSlamCpuRealTimeRatio->Set(
common::ToSeconds(sensor_duration.value()) /
thread_cpu_duration_seconds);
last_wall_time_ = wall_time;
last_thread_cpu_time_seconds_ = thread_cpu_time_seconds;
return absl::make_unique<MatchingResult>(
MatchingResulttime, scan_matcher_pose_estimate_/*pose_estimate*/,
std::move(range_data_in_local),
std::move(insertion_result),
current_match_state);
当有landmark数据发过来时,同时通知前端和后端
void AddSensorData(const std::string& sensor_id,
const sensor::LandmarkData& landmark_data) override
//通知后端
pose_graph_->AddLandmarkData(trajectory_id_, landmark_data);
//通知前端
local_trajectory_builder_->AddLandmarkData();
以上是关于cartographer当机器人不动时,同时收到landmark,如何解决定位问题?的主要内容,如果未能解决你的问题,请参考以下文章
静止不动时使用 TYPE_ROTATION_VECTOR 实现当前的基本方向方法?
ROS实验笔记之——基于cartographer的多机器人SLAM地图融合