LocalTrajectoryBuilder意思是局部轨迹的构建,下面的类图中方法的参数没有画进去。
注意其中的三个类:PoseExtrapolator类,RealTimeCorrelativeScanMatcher类和CeresScanMatcher类。
(1)PoseExtrapolator类(如下图),Node类和LocalTrajectoryBuilder类都有用到PoseExtrapolator对象,好像两者之间并没有什么关系?
LocalTrajectoryBuilder中的PoseExtrapolator对象类似于运动模型。
(Node类中的可能是为了发布位姿信息用的,单独进行了位姿推算。先不管了。)
PoseExtrapolator的构造函数 VS 通过IMU初始化InitializeWithImu方法。
在LocalTrajectoryBuilder::InitializeExtrapolator中对其构造函数的调用:
void LocalTrajectoryBuilder::InitializeExtrapolator(const common::Time time)
{
if (extrapolator_ != nullptr) {
return;
}
// We derive velocities from poses which are at least 1 ms apart for numerical
// stability. Usually poses known to the extrapolator will be further apart
// in time and thus the last two are used.
constexpr double kExtrapolationEstimationTimeSec = 0.001;
// TODO(gaschler): Consider using InitializeWithImu as 3D does.
extrapolator_ = common::make_unique<mapping::PoseExtrapolator>(
::cartographer::common::FromSeconds(kExtrapolationEstimationTimeSec),
options_.imu_gravity_time_constant());
extrapolator_->AddPose(time, transform::Rigid3d::Identity());
}
PoseExtrapolator::InitializeWithImu方法:
std::unique_ptr<PoseExtrapolator> PoseExtrapolator::InitializeWithImu(
const common::Duration pose_queue_duration,
const double imu_gravity_time_constant, const sensor::ImuData& imu_data)
{
auto extrapolator = common::make_unique<PoseExtrapolator>(pose_queue_duration, imu_gravity_time_constant);
extrapolator->AddImuData(imu_data);
extrapolator->imu_tracker_ =common::make_unique<ImuTracker>(imu_gravity_time_constant, imu_data.time);
extrapolator->imu_tracker_->AddImuLinearAccelerationObservation(
imu_data.linear_acceleration);
extrapolator->imu_tracker_->AddImuAngularVelocityObservation(
imu_data.angular_velocity);
extrapolator->imu_tracker_->Advance(imu_data.time);
extrapolator->AddPose(imu_data.time,transform::Rigid3d::Rotation(extrapolator->imu_tracker_->orientation()));
return extrapolator;
}
LocalTrajectoryBuilder的AddImuData和AddOdometryData方法不赘述。
void LocalTrajectoryBuilder::AddImuData(const sensor::ImuData& imu_data) {
CHECK(options_.use_imu_data()) << "An unexpected IMU packet was added.";
InitializeExtrapolator(imu_data.time);
extrapolator_->AddImuData(imu_data);
} void LocalTrajectoryBuilder::AddOdometryData(
const sensor::OdometryData& odometry_data) {
if (extrapolator_ == nullptr) {
// Until we've initialized the extrapolator we cannot add odometry data.
LOG(INFO) << "Extrapolator not yet initialized.";
return;
}
extrapolator_->AddOdometryData(odometry_data);
}
如下查看LocalTrajectoryBuilder::AddRangeData方法。如果使用IMU数据,直接进入10行,如果不是用进入7行。
std::unique_ptr<LocalTrajectoryBuilder::MatchingResult>
LocalTrajectoryBuilder::AddRangeData(const common::Time time,
const sensor::TimedRangeData& range_data)
{
// Initialize extrapolator now if we do not ever use an IMU.
if (!options_.use_imu_data())
{
InitializeExtrapolator(time);
}
if (extrapolator_ == nullptr)
{
// Until we've initialized the extrapolator with our first IMU message, we
// cannot compute the orientation of the rangefinder.
LOG(INFO) << "Extrapolator not yet initialized.";
return nullptr;
} CHECK(!range_data.returns.empty());
CHECK_EQ(range_data.returns.back()[], );
const common::Time time_first_point =
time + common::FromSeconds(range_data.returns.front()[]);
if (time_first_point < extrapolator_->GetLastPoseTime()) {
LOG(INFO) << "Extrapolator is still initializing.";
return nullptr;
} std::vector<transform::Rigid3f> range_data_poses;
range_data_poses.reserve(range_data.returns.size());
for (const Eigen::Vector4f& hit : range_data.returns) {
const common::Time time_point = time + common::FromSeconds(hit[]);
range_data_poses.push_back(
extrapolator_->ExtrapolatePose(time_point).cast<float>());
} if (num_accumulated_ == ) {
// 'accumulated_range_data_.origin' is uninitialized until the last
// accumulation.
accumulated_range_data_ = sensor::RangeData{{}, {}, {}};
} // Drop any returns below the minimum range and convert returns beyond the
// maximum range into misses.
for (size_t i = ; i < range_data.returns.size(); ++i) {
const Eigen::Vector4f& hit = range_data.returns[i];
const Eigen::Vector3f origin_in_local =
range_data_poses[i] * range_data.origin;
const Eigen::Vector3f hit_in_local = range_data_poses[i] * hit.head<>();
const Eigen::Vector3f delta = hit_in_local - origin_in_local;
const float range = delta.norm();
if (range >= options_.min_range()) {
if (range <= options_.max_range()) {
accumulated_range_data_.returns.push_back(hit_in_local);
} else {
accumulated_range_data_.misses.push_back(
origin_in_local +
options_.missing_data_ray_length() / range * delta);
}
}
}
++num_accumulated_; if (num_accumulated_ >= options_.num_accumulated_range_data()) {
num_accumulated_ = ;
const transform::Rigid3d gravity_alignment = transform::Rigid3d::Rotation(
extrapolator_->EstimateGravityOrientation(time));
accumulated_range_data_.origin =
range_data_poses.back() * range_data.origin;
return AddAccumulatedRangeData(
time,
TransformToGravityAlignedFrameAndFilter(
gravity_alignment.cast<float>() * range_data_poses.back().inverse(),
accumulated_range_data_),
gravity_alignment);
}
return nullptr;
}
接着,LocalTrajectoryBuilder::AddAccumulatedRangeData代码如下,传入的参数为3个。
const common::Time time,const sensor::RangeData& gravity_aligned_range_data, const transform::Rigid3d& gravity_alignment
重力定向,定向后的深度数据和定向矩阵。
注意下面21行代码执行了扫描匹配的ScanMatch方法,之后代码29行调用的extrapolator_->AddPose()方法:
每次扫描匹配之后执行AddPose方法。
std::unique_ptr<LocalTrajectoryBuilder::MatchingResult>
LocalTrajectoryBuilder::AddAccumulatedRangeData(
const common::Time time,
const sensor::RangeData& gravity_aligned_range_data,
const transform::Rigid3d& gravity_alignment)
{
if (gravity_aligned_range_data.returns.empty())
{
LOG(WARNING) << "Dropped empty horizontal range data.";
return nullptr;
} // Computes a gravity aligned pose prediction.
const transform::Rigid3d non_gravity_aligned_pose_prediction =
extrapolator_->ExtrapolatePose(time);
const transform::Rigid2d pose_prediction = transform::Project2D(
non_gravity_aligned_pose_prediction * gravity_alignment.inverse()); // local map frame <- gravity-aligned frame
std::unique_ptr<transform::Rigid2d> pose_estimate_2d =
ScanMatch(time, pose_prediction, gravity_aligned_range_data);
if (pose_estimate_2d == nullptr)
{
LOG(WARNING) << "Scan matching failed.";
return nullptr;
}
const transform::Rigid3d pose_estimate =
transform::Embed3D(*pose_estimate_2d) * gravity_alignment;
extrapolator_->AddPose(time, pose_estimate); sensor::RangeData range_data_in_local =
TransformRangeData(gravity_aligned_range_data,
transform::Embed3D(pose_estimate_2d->cast<float>()));
std::unique_ptr<InsertionResult> insertion_result =
InsertIntoSubmap(time, range_data_in_local, gravity_aligned_range_data,
pose_estimate, gravity_alignment.rotation());
return common::make_unique<MatchingResult>(
MatchingResult{time, pose_estimate, std::move(range_data_in_local),
std::move(insertion_result)});
} std::unique_ptr<LocalTrajectoryBuilder::InsertionResult>
LocalTrajectoryBuilder::InsertIntoSubmap(
const common::Time time, const sensor::RangeData& range_data_in_local,
const sensor::RangeData& gravity_aligned_range_data,
const transform::Rigid3d& pose_estimate,
const Eigen::Quaterniond& gravity_alignment)
{
if (motion_filter_.IsSimilar(time, pose_estimate))
{
return nullptr;
} // Querying the active submaps must be done here before calling
// InsertRangeData() since the queried values are valid for next insertion.
std::vector<std::shared_ptr<const Submap>> insertion_submaps;
for (const std::shared_ptr<Submap>& submap : active_submaps_.submaps())
{
insertion_submaps.push_back(submap);
}
active_submaps_.InsertRangeData(range_data_in_local); sensor::AdaptiveVoxelFilter adaptive_voxel_filter(
options_.loop_closure_adaptive_voxel_filter_options());
const sensor::PointCloud filtered_gravity_aligned_point_cloud =
adaptive_voxel_filter.Filter(gravity_aligned_range_data.returns); return common::make_unique<InsertionResult>(InsertionResult{
std::make_shared<const mapping::TrajectoryNode::Data>(
mapping::TrajectoryNode::Data{
time,
gravity_alignment,
filtered_gravity_aligned_point_cloud,
{}, // 'high_resolution_point_cloud' is only used in 3D.
{}, // 'low_resolution_point_cloud' is only used in 3D.
{}, // 'rotational_scan_matcher_histogram' is only used in 3D.
pose_estimate}),
std::move(insertion_submaps)});
}
LocalTrajectoryBuilder::AddAccumulatedRangeData
(2)RealTimeCorrelativeScanMatcher类,实时的扫描匹配,用的相关分析方法。