Skip to content

Commit

Permalink
Remove intensities - they are unused in SLAM. (cartographer-project#247)
Browse files Browse the repository at this point in the history
  • Loading branch information
SirVer authored May 3, 2017
1 parent 1cdcd12 commit cdd366b
Show file tree
Hide file tree
Showing 13 changed files with 96 additions and 180 deletions.
2 changes: 1 addition & 1 deletion cartographer/mapping_2d/global_trajectory_builder.cc
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ void GlobalTrajectoryBuilder::AddRangefinderData(
const sensor::PointCloud& ranges) {
std::unique_ptr<LocalTrajectoryBuilder::InsertionResult> insertion_result =
local_trajectory_builder_.AddHorizontalRangeData(
time, sensor::RangeData{origin, ranges, {}, {}});
time, sensor::RangeData{origin, ranges, {}});
if (insertion_result != nullptr) {
sparse_pose_graph_->AddScan(
insertion_result->time, insertion_result->tracking_to_tracking_2d,
Expand Down
2 changes: 1 addition & 1 deletion cartographer/mapping_2d/local_trajectory_builder.cc
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ sensor::RangeData LocalTrajectoryBuilder::TransformAndFilterRangeData(
const sensor::RangeData& range_data) const {
// Drop any returns below the minimum range and convert returns beyond the
// maximum range into misses.
sensor::RangeData returns_and_misses{range_data.origin, {}, {}, {}};
sensor::RangeData returns_and_misses{range_data.origin, {}, {}};
for (const Eigen::Vector3f& hit : range_data.returns) {
const float range = (hit - range_data.origin).norm();
if (range >= options_.laser_min_range()) {
Expand Down
2 changes: 1 addition & 1 deletion cartographer/mapping_2d/map_limits_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ TEST(MapLimitsTest, ComputeMapLimits) {
Eigen::Vector3f::Zero(),
{Eigen::Vector3f(-30.f, 1.f, 0.f), Eigen::Vector3f(50.f, -10.f, 0.f)},
{}},
Compress(sensor::RangeData{Eigen::Vector3f::Zero(), {}, {}, {}}), nullptr,
Compress(sensor::RangeData{Eigen::Vector3f::Zero(), {}, {}}), nullptr,
transform::Rigid3d::Identity()};
const mapping::TrajectoryNode trajectory_node{&constant_data,
transform::Rigid3d::Identity()};
Expand Down
47 changes: 26 additions & 21 deletions cartographer/mapping_2d/sparse_pose_graph.cc
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ void SparsePoseGraph::AddScan(

constant_node_data_->push_back(mapping::TrajectoryNode::ConstantData{
time, range_data_in_pose,
Compress(sensor::RangeData{Eigen::Vector3f::Zero(), {}, {}, {}}), submaps,
Compress(sensor::RangeData{Eigen::Vector3f::Zero(), {}, {}}), submaps,
tracking_to_pose});
trajectory_nodes_.push_back(mapping::TrajectoryNode{
&constant_node_data_->back(), optimized_pose,
Expand Down Expand Up @@ -294,25 +294,30 @@ void SparsePoseGraph::WaitForAllComputations() {
common::MutexLocker locker(&mutex_);
const int num_finished_scans_at_start =
constraint_builder_.GetNumFinishedScans();
while (!locker.AwaitWithTimeout([this]() REQUIRES(mutex_) {
return static_cast<size_t>(constraint_builder_.GetNumFinishedScans()) ==
trajectory_nodes_.size();
}, common::FromSeconds(1.))) {
while (!locker.AwaitWithTimeout(
[this]() REQUIRES(mutex_) {
return static_cast<size_t>(constraint_builder_.GetNumFinishedScans()) ==
trajectory_nodes_.size();
},
common::FromSeconds(1.))) {
std::ostringstream progress_info;
progress_info << "Optimizing: " << std::fixed << std::setprecision(1)
<< 100. * (constraint_builder_.GetNumFinishedScans() -
num_finished_scans_at_start) /
<< 100. *
(constraint_builder_.GetNumFinishedScans() -
num_finished_scans_at_start) /
(trajectory_nodes_.size() -
num_finished_scans_at_start) << "%...";
num_finished_scans_at_start)
<< "%...";
std::cout << "\r\x1b[K" << progress_info.str() << std::flush;
}
std::cout << "\r\x1b[KOptimizing: Done. " << std::endl;
constraint_builder_.WhenDone([this, &notification](
const sparse_pose_graph::ConstraintBuilder::Result& result) {
constraints_.insert(constraints_.end(), result.begin(), result.end());
common::MutexLocker locker(&mutex_);
notification = true;
});
constraint_builder_.WhenDone(
[this, &notification](
const sparse_pose_graph::ConstraintBuilder::Result& result) {
constraints_.insert(constraints_.end(), result.begin(), result.end());
common::MutexLocker locker(&mutex_);
notification = true;
});
locker.Await([&notification]() { return notification; });
}

Expand Down Expand Up @@ -346,13 +351,13 @@ void SparsePoseGraph::RunOptimization() {
const mapping::Submaps* trajectory =
trajectory_nodes_[i].constant_data->trajectory;
if (extrapolation_transforms.count(trajectory) == 0) {
extrapolation_transforms[trajectory] =
transform::Rigid3d(ExtrapolateSubmapTransforms(submap_transforms_,
*trajectory).back() *
ExtrapolateSubmapTransforms(
optimized_submap_transforms_, *trajectory)
.back()
.inverse());
extrapolation_transforms[trajectory] = transform::Rigid3d(
ExtrapolateSubmapTransforms(submap_transforms_, *trajectory)
.back() *
ExtrapolateSubmapTransforms(optimized_submap_transforms_,
*trajectory)
.back()
.inverse());
}
trajectory_nodes_[i].pose =
extrapolation_transforms[trajectory] * trajectory_nodes_[i].pose;
Expand Down
2 changes: 1 addition & 1 deletion cartographer/mapping_3d/kalman_local_trajectory_builder.cc
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ KalmanLocalTrajectoryBuilder::AddRangefinderData(
const transform::Rigid3f tracking_delta =
first_pose_prediction_.inverse() * pose_prediction.cast<float>();
const sensor::RangeData range_data_in_first_tracking =
sensor::TransformRangeData(sensor::RangeData{origin, ranges, {}, {}},
sensor::TransformRangeData(sensor::RangeData{origin, ranges, {}},
tracking_delta);
for (const Eigen::Vector3f& hit : range_data_in_first_tracking.returns) {
const Eigen::Vector3f delta = hit - range_data_in_first_tracking.origin;
Expand Down
2 changes: 1 addition & 1 deletion cartographer/mapping_3d/submaps.cc
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ void InsertSegmentsIntoProbabilityGrid(const std::vector<RaySegment>& segments,
// information.
sensor::RangeData FilterRangeDataByMaxRange(const sensor::RangeData& range_data,
const float max_range) {
sensor::RangeData result{range_data.origin, {}, {}, {}};
sensor::RangeData result{range_data.origin, {}, {}};
for (const Eigen::Vector3f& hit : range_data.returns) {
if ((hit - range_data.origin).norm() <= max_range) {
result.returns.push_back(hit);
Expand Down
115 changes: 47 additions & 68 deletions cartographer/sensor/compressed_point_cloud.cc
Original file line number Diff line number Diff line change
Expand Up @@ -35,67 +35,6 @@ constexpr int kBitsPerCoordinate = 10;
constexpr int kCoordinateMask = (1 << kBitsPerCoordinate) - 1;
constexpr int kMaxBitsPerDirection = 23;

// Compressed point cloud into a vector and optionally returns mapping from
// compressed indices to original indices.
std::vector<int32> Compress(const PointCloud& point_cloud,
std::vector<int>* new_to_old = nullptr) {
// Distribute points into blocks.
struct RasterPoint {
Eigen::Array3i point;
int index;
};
using Blocks = mapping_3d::HybridGridBase<std::vector<RasterPoint>>;
Blocks blocks(kPrecision, Eigen::Vector3f::Zero());
int num_blocks = 0;
CHECK_LE(point_cloud.size(), std::numeric_limits<int>::max());
for (int point_index = 0; point_index < static_cast<int>(point_cloud.size());
++point_index) {
const Eigen::Vector3f& point = point_cloud[point_index];
CHECK_LT(point.cwiseAbs().maxCoeff() / kPrecision,
1 << kMaxBitsPerDirection)
<< "Point out of bounds: " << point;
Eigen::Array3i raster_point;
Eigen::Array3i block_coordinate;
for (int i = 0; i < 3; ++i) {
raster_point[i] = common::RoundToInt(point[i] / kPrecision);
block_coordinate[i] = raster_point[i] >> kBitsPerCoordinate;
raster_point[i] &= kCoordinateMask;
}
auto* const block = blocks.mutable_value(block_coordinate);
num_blocks += block->empty();
block->push_back({raster_point, point_index});
}

if (new_to_old != nullptr) {
new_to_old->clear();
new_to_old->reserve(point_cloud.size());
}

// Encode blocks.
std::vector<int32> result;
result.reserve(4 * num_blocks + point_cloud.size());
for (Blocks::Iterator it(blocks); !it.Done(); it.Next(), --num_blocks) {
const auto& raster_points = it.GetValue();
CHECK_LE(raster_points.size(), std::numeric_limits<int32>::max());
result.push_back(raster_points.size());
const Eigen::Array3i block_coordinate = it.GetCellIndex();
result.push_back(block_coordinate.x());
result.push_back(block_coordinate.y());
result.push_back(block_coordinate.z());
for (const RasterPoint& raster_point : raster_points) {
result.push_back((((raster_point.point.z() << kBitsPerCoordinate) +
raster_point.point.y())
<< kBitsPerCoordinate) +
raster_point.point.x());
if (new_to_old != nullptr) {
new_to_old->push_back(raster_point.index);
}
}
}
CHECK_EQ(num_blocks, 0);
return result;
}

} // namespace

CompressedPointCloud::ConstIterator::ConstIterator(
Expand Down Expand Up @@ -158,18 +97,58 @@ void CompressedPointCloud::ConstIterator::ReadNextPoint() {
}

CompressedPointCloud::CompressedPointCloud(const PointCloud& point_cloud)
: point_data_(Compress(point_cloud)), num_points_(point_cloud.size()) {}
: num_points_(point_cloud.size()) {
// Distribute points into blocks.
struct RasterPoint {
Eigen::Array3i point;
int index;
};
using Blocks = mapping_3d::HybridGridBase<std::vector<RasterPoint>>;
Blocks blocks(kPrecision, Eigen::Vector3f::Zero());
int num_blocks = 0;
CHECK_LE(point_cloud.size(), std::numeric_limits<int>::max());
for (int point_index = 0; point_index < static_cast<int>(point_cloud.size());
++point_index) {
const Eigen::Vector3f& point = point_cloud[point_index];
CHECK_LT(point.cwiseAbs().maxCoeff() / kPrecision,
1 << kMaxBitsPerDirection)
<< "Point out of bounds: " << point;
Eigen::Array3i raster_point;
Eigen::Array3i block_coordinate;
for (int i = 0; i < 3; ++i) {
raster_point[i] = common::RoundToInt(point[i] / kPrecision);
block_coordinate[i] = raster_point[i] >> kBitsPerCoordinate;
raster_point[i] &= kCoordinateMask;
}
auto* const block = blocks.mutable_value(block_coordinate);
num_blocks += block->empty();
block->push_back({raster_point, point_index});
}

// Encode blocks.
point_data_.reserve(4 * num_blocks + point_cloud.size());
for (Blocks::Iterator it(blocks); !it.Done(); it.Next(), --num_blocks) {
const auto& raster_points = it.GetValue();
CHECK_LE(raster_points.size(), std::numeric_limits<int32>::max());
point_data_.push_back(raster_points.size());
const Eigen::Array3i block_coordinate = it.GetCellIndex();
point_data_.push_back(block_coordinate.x());
point_data_.push_back(block_coordinate.y());
point_data_.push_back(block_coordinate.z());
for (const RasterPoint& raster_point : raster_points) {
point_data_.push_back((((raster_point.point.z() << kBitsPerCoordinate) +
raster_point.point.y())
<< kBitsPerCoordinate) +
raster_point.point.x());
}
}
CHECK_EQ(num_blocks, 0);
}

CompressedPointCloud::CompressedPointCloud(const std::vector<int32>& point_data,
size_t num_points)
: point_data_(point_data), num_points_(num_points) {}

CompressedPointCloud CompressedPointCloud::CompressAndReturnOrder(
const PointCloud& point_cloud, std::vector<int>* new_to_old) {
return CompressedPointCloud(Compress(point_cloud, new_to_old),
point_cloud.size());
}

bool CompressedPointCloud::empty() const { return num_points_ == 0; }

size_t CompressedPointCloud::size() const { return num_points_; }
Expand Down
10 changes: 2 additions & 8 deletions cartographer/sensor/compressed_point_cloud.h
Original file line number Diff line number Diff line change
Expand Up @@ -40,12 +40,6 @@ class CompressedPointCloud {
CompressedPointCloud() : num_points_(0) {}
explicit CompressedPointCloud(const PointCloud& point_cloud);

// Returns a compressed point cloud and further returns a mapping 'new_to_old'
// from the compressed indices to the original indices, i.e., conceptually
// compressed[i] = point_cloud[new_to_old[i]].
static CompressedPointCloud CompressAndReturnOrder(
const PointCloud& point_cloud, std::vector<int>* new_to_old);

// Returns decompressed point cloud.
PointCloud Decompress() const;

Expand All @@ -59,8 +53,8 @@ class CompressedPointCloud {
private:
CompressedPointCloud(const std::vector<int32>& point_data, size_t num_points);

const std::vector<int32> point_data_;
const size_t num_points_;
std::vector<int32> point_data_;
size_t num_points_;
};

// Forward iterator for compressed point clouds.
Expand Down
15 changes: 0 additions & 15 deletions cartographer/sensor/compressed_point_cloud_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -116,21 +116,6 @@ TEST(CompressPointCloudTest, CompressesNoGaps) {
}
}

TEST(CompressPointCloudTest, CompressesAndReturnsOrder) {
PointCloud point_cloud = {
Eigen::Vector3f(1.f, 0.f, 0.f), Eigen::Vector3f(-10.f, 0.f, 0.f),
Eigen::Vector3f(1.f, 0.f, 0.f), Eigen::Vector3f(-10.f, 0.f, 0.f)};
std::vector<int> new_to_old;
CompressedPointCloud compressed =
CompressedPointCloud::CompressAndReturnOrder(point_cloud, &new_to_old);
EXPECT_EQ(point_cloud.size(), new_to_old.size());
int i = 0;
for (const Eigen::Vector3f& point : compressed) {
EXPECT_THAT(point, ApproximatelyEquals(point_cloud[new_to_old[i]]));
++i;
}
}

} // namespace
} // namespace sensor
} // namespace cartographer
3 changes: 0 additions & 3 deletions cartographer/sensor/proto/sensor.proto
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,4 @@ message RangeData {
optional transform.proto.Vector3f origin = 1;
optional PointCloud point_cloud = 2;
optional PointCloud missing_echo_point_cloud = 3;

// Reflectivity values of point_cloud or empty.
repeated int32 reflectivity = 4 [packed = true];
}
31 changes: 3 additions & 28 deletions cartographer/sensor/range_data.cc
Original file line number Diff line number Diff line change
Expand Up @@ -22,21 +22,6 @@
namespace cartographer {
namespace sensor {

namespace {

// Reorders reflectivities according to index mapping.
std::vector<uint8> ReorderReflectivities(
const std::vector<uint8>& reflectivities,
const std::vector<int>& new_to_old) {
std::vector<uint8> reordered(reflectivities.size());
for (size_t i = 0; i < reordered.size(); ++i) {
reordered[i] = reflectivities[new_to_old[i]];
}
return reordered;
}

} // namespace

PointCloud ToPointCloud(const proto::LaserScan& proto) {
return ToPointCloudWithIntensities(proto).points;
}
Expand Down Expand Up @@ -77,8 +62,6 @@ proto::RangeData ToProto(const RangeData& range_data) {
*proto.mutable_origin() = transform::ToProto(range_data.origin);
*proto.mutable_point_cloud() = ToProto(range_data.returns);
*proto.mutable_missing_echo_point_cloud() = ToProto(range_data.misses);
std::copy(range_data.reflectivities.begin(), range_data.reflectivities.end(),
RepeatedFieldBackInserter(proto.mutable_reflectivity()));
return proto;
}

Expand All @@ -87,8 +70,6 @@ RangeData FromProto(const proto::RangeData& proto) {
transform::ToEigen(proto.origin()), ToPointCloud(proto.point_cloud()),
ToPointCloud(proto.missing_echo_point_cloud()),
};
std::copy(proto.reflectivity().begin(), proto.reflectivity().end(),
std::back_inserter(range_data.reflectivities));
return range_data;
}

Expand All @@ -98,7 +79,6 @@ RangeData TransformRangeData(const RangeData& range_data,
transform * range_data.origin,
TransformPointCloud(range_data.returns, transform),
TransformPointCloud(range_data.misses, transform),
range_data.reflectivities,
};
}

Expand All @@ -109,21 +89,16 @@ RangeData CropRangeData(const RangeData& range_data, const float min_z,
}

CompressedRangeData Compress(const RangeData& range_data) {
std::vector<int> new_to_old;
CompressedPointCloud compressed_returns =
CompressedPointCloud::CompressAndReturnOrder(range_data.returns,
&new_to_old);
return CompressedRangeData{
range_data.origin, std::move(compressed_returns),
range_data.origin, CompressedPointCloud(range_data.returns),
CompressedPointCloud(range_data.misses),
ReorderReflectivities(range_data.reflectivities, new_to_old)};
};
}

RangeData Decompress(const CompressedRangeData& compressed_range_data) {
return RangeData{compressed_range_data.origin,
compressed_range_data.returns.Decompress(),
compressed_range_data.misses.Decompress(),
compressed_range_data.reflectivities};
compressed_range_data.misses.Decompress()};
}

} // namespace sensor
Expand Down
6 changes: 0 additions & 6 deletions cartographer/sensor/range_data.h
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,6 @@ struct RangeData {
Eigen::Vector3f origin;
PointCloud returns;
PointCloud misses;

// Reflectivity value of returns.
std::vector<uint8> reflectivities;
};

// Builds a PointCloud of returns from 'proto', dropping any beams with ranges
Expand Down Expand Up @@ -67,9 +64,6 @@ struct CompressedRangeData {
Eigen::Vector3f origin;
CompressedPointCloud returns;
CompressedPointCloud misses;

// Reflectivity value of returns.
std::vector<uint8> reflectivities;
};

CompressedRangeData Compress(const RangeData& range_data);
Expand Down
Loading

0 comments on commit cdd366b

Please sign in to comment.