diff --git a/CMakeLists.txt b/CMakeLists.txt
index d972d03..d02ace0 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -58,6 +58,7 @@ include_directories(
add_library(omni_slam_eval_lib
src/module/tracking_module.cc
src/module/matching_module.cc
+ src/module/stereo_module.cc
src/module/reconstruction_module.cc
src/module/odometry_module.cc
src/data/frame.cc
@@ -70,6 +71,8 @@ add_library(omni_slam_eval_lib
src/odometry/pnp.cc
src/odometry/five_point.cc
src/optimization/bundle_adjuster.cc
+ src/stereo/stereo_matcher.cc
+ src/stereo/lk_stereo_matcher.cc
src/util/hdf_file.cc
)
@@ -99,6 +102,13 @@ add_executable(omni_slam_odometry_eval_node
src/omni_slam_odometry_eval_node.cc
)
+add_executable(omni_slam_stereo_eval_node
+ src/ros/stereo_eval.cc
+ src/ros/tracking_eval.cc
+ src/ros/eval_base.cc
+ src/omni_slam_stereo_eval_node.cc
+)
+
target_link_libraries(omni_slam_tracking_eval_node PUBLIC omni_slam_eval_lib
${catkin_LIBRARIES}
${OpenCV_LIBS}
@@ -135,11 +145,21 @@ target_link_libraries(omni_slam_odometry_eval_node PUBLIC omni_slam_eval_lib
${CERES_LIBRARIES}
)
+target_link_libraries(omni_slam_stereo_eval_node PUBLIC omni_slam_eval_lib
+ ${catkin_LIBRARIES}
+ ${OpenCV_LIBS}
+ ${HDF5_CXX_LIBRARIES}
+ ${OpenMP_CXX_FLAGS}
+ ${PCL_LIBRARIES}
+ ${CERES_LIBRARIES}
+)
+
target_compile_options(omni_slam_tracking_eval_node PUBLIC ${OpenMP_CXX_FLAGS})
target_compile_options(omni_slam_matching_eval_node PUBLIC ${OpenMP_CXX_FLAGS})
target_compile_options(omni_slam_reconstruction_eval_node PUBLIC ${OpenMP_CXX_FLAGS})
target_compile_options(omni_slam_odometry_eval_node PUBLIC ${OpenMP_CXX_FLAGS})
-set(CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS})
+target_compile_options(omni_slam_stereo_eval_node PUBLIC ${OpenMP_CXX_FLAGS})
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS} -ggdb")
install(TARGETS omni_slam_tracking_eval_node
ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
@@ -164,3 +184,9 @@ install(TARGETS omni_slam_odometry_eval_node
LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
)
+
+install(TARGETS omni_slam_stereo_eval_node
+ ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+ LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+ RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+)
diff --git a/launch/stereo_eval.launch b/launch/stereo_eval.launch
new file mode 100644
index 0000000..cb5f968
--- /dev/null
+++ b/launch/stereo_eval.launch
@@ -0,0 +1,33 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ camera_model: '$(arg camera_model)'
+ camera_parameters: $(arg camera_params)
+ detector_type: 'GFTT'
+ detector_parameters: {maxCorners: 1000, qualityLevel: 0.001, minDistance: 5, blockSize: 5}
+ stereo_matcher_window_size: 256
+ stereo_matcher_num_scales: 5
+ stereo_matcher_error_threshold: 20
+ stereo_matcher_epipolar_threshold: 0.008
+ stereo_tf_t: [2.0, 0.0, 0.0]
+ stereo_tf_r: [0.0, 0.0, 0.0, 1.0]
+
+
+
+
+
+
+
diff --git a/src/data/feature.h b/src/data/feature.h
index 2a921a4..5953d74 100644
--- a/src/data/feature.h
+++ b/src/data/feature.h
@@ -28,14 +28,15 @@ class Feature
bool HasWorldPoint() const;
bool HasEstimatedWorldPoint() const;
- bool worldPointCached_{false};
- bool worldPointEstimateCached_{false};
private:
Frame &frame_;
cv::KeyPoint kpt_;
cv::Mat descriptor_;
Vector3d worldPoint_;
Vector3d worldPointEstimate_;
+
+ bool worldPointCached_{false};
+ bool worldPointEstimateCached_{false};
};
}
diff --git a/src/data/frame.cc b/src/data/frame.cc
index 663ef4b..fa4eb6f 100644
--- a/src/data/frame.cc
+++ b/src/data/frame.cc
@@ -8,17 +8,33 @@ namespace data
int Frame::lastFrameId_ = 0;
-Frame::Frame(cv::Mat &image, cv::Mat &depth_image, Matrix &pose, double time, camera::CameraModel<> &camera_model)
+Frame::Frame(cv::Mat &image, cv::Mat &stereo_image, cv::Mat &depth_image, Matrix &pose, Matrix &stereo_pose, double time, camera::CameraModel<> &camera_model)
: id_(lastFrameId_++),
image_(image.clone()),
+ stereoImage_(stereo_image.clone()),
depthImage_(depth_image.clone()),
pose_(pose),
invPose_(util::TFUtil::InversePoseMatrix(pose)),
+ stereoPose_(stereo_pose),
timeSec_(time),
cameraModel_(camera_model)
{
hasPose_ = true;
hasDepth_ = true;
+ hasStereo_ = true;
+}
+
+Frame::Frame(cv::Mat &image, cv::Mat &stereo_image, Matrix &stereo_pose, double time, camera::CameraModel<> &camera_model)
+ : id_(lastFrameId_++),
+ image_(image.clone()),
+ stereoImage_(stereo_image.clone()),
+ stereoPose_(stereo_pose),
+ timeSec_(time),
+ cameraModel_(camera_model)
+{
+ hasPose_ = false;
+ hasDepth_ = false;
+ hasStereo_ = true;
}
Frame::Frame(cv::Mat &image, cv::Mat &depth_image, double time, camera::CameraModel<> &camera_model)
@@ -30,6 +46,21 @@ Frame::Frame(cv::Mat &image, cv::Mat &depth_image, double time, camera::CameraMo
{
hasPose_ = false;
hasDepth_ = true;
+ hasStereo_ = false;
+}
+
+Frame::Frame(cv::Mat &image, cv::Mat &stereo_image, cv::Mat &depth_image, Matrix &stereo_pose, double time, camera::CameraModel<> &camera_model)
+ : id_(lastFrameId_++),
+ image_(image.clone()),
+ stereoImage_(stereo_image.clone()),
+ depthImage_(depth_image.clone()),
+ stereoPose_(stereo_pose),
+ timeSec_(time),
+ cameraModel_(camera_model)
+{
+ hasPose_ = false;
+ hasDepth_ = true;
+ hasStereo_ = true;
}
Frame::Frame(cv::Mat &image, Matrix &pose, double time, camera::CameraModel<> &camera_model)
@@ -42,6 +73,36 @@ Frame::Frame(cv::Mat &image, Matrix &pose, double time, camera::C
{
hasPose_ = true;
hasDepth_ = false;
+ hasStereo_ = false;
+}
+
+Frame::Frame(cv::Mat &image, cv::Mat &stereo_image, Matrix &pose, Matrix &stereo_pose, double time, camera::CameraModel<> &camera_model)
+ : id_(lastFrameId_++),
+ image_(image.clone()),
+ stereoImage_(stereo_image.clone()),
+ pose_(pose),
+ invPose_(util::TFUtil::InversePoseMatrix(pose)),
+ stereoPose_(stereo_pose),
+ timeSec_(time),
+ cameraModel_(camera_model)
+{
+ hasPose_ = true;
+ hasDepth_ = false;
+ hasStereo_ = true;
+}
+
+Frame::Frame(cv::Mat &image, Matrix &pose, cv::Mat &depth_image, double time, camera::CameraModel<> &camera_model)
+ : id_(lastFrameId_++),
+ image_(image.clone()),
+ depthImage_(depth_image.clone()),
+ pose_(pose),
+ invPose_(util::TFUtil::InversePoseMatrix(pose)),
+ timeSec_(time),
+ cameraModel_(camera_model)
+{
+ hasPose_ = true;
+ hasDepth_ = true;
+ hasStereo_ = false;
}
Frame::Frame(cv::Mat &image, double time, camera::CameraModel<> &camera_model)
@@ -51,13 +112,16 @@ Frame::Frame(cv::Mat &image, double time, camera::CameraModel<> &camera_model)
{
hasPose_ = false;
hasDepth_ = false;
+ hasStereo_ = false;
}
Frame::Frame(const Frame &other)
: image_(other.image_.clone()),
depthImage_(other.depthImage_.clone()),
+ stereoImage_(other.stereoImage_.clone()),
imageComp_(other.imageComp_),
depthImageComp_(other.depthImageComp_),
+ stereoImageComp_(other.stereoImageComp_),
cameraModel_(other.cameraModel_),
id_(other.id_),
pose_(other.pose_),
@@ -65,6 +129,7 @@ Frame::Frame(const Frame &other)
timeSec_(other.timeSec_),
hasPose_(other.hasPose_),
hasDepth_(other.hasDepth_),
+ hasStereo_(other.hasStereo_),
isCompressed_(other.isCompressed_)
{
}
@@ -97,11 +162,25 @@ const cv::Mat& Frame::GetDepthImage()
return depthImage_;
}
+const cv::Mat& Frame::GetStereoImage()
+{
+ if (isCompressed_)
+ {
+ DecompressImages();
+ }
+ return stereoImage_;
+}
+
const int Frame::GetID() const
{
return id_;
}
+const Matrix& Frame::GetStereoPose() const
+{
+ return stereoPose_;
+}
+
const camera::CameraModel<>& Frame::GetCameraModel() const
{
return cameraModel_;
@@ -138,6 +217,25 @@ void Frame::SetDepthImage(cv::Mat &depth_image)
hasDepth_ = true;
}
+void Frame::SetStereoImage(cv::Mat &stereo_image)
+{
+ if (isCompressed_)
+ {
+ std::vector param = {cv::IMWRITE_PNG_COMPRESSION, 5};
+ cv::imencode(".png", stereo_image, stereoImageComp_, param);
+ }
+ else
+ {
+ stereoImageComp_ = stereo_image.clone();
+ }
+ hasStereo_ = true;
+}
+
+void Frame::SetStereoPose(Matrix &pose)
+{
+ stereoPose_ = pose;
+}
+
void Frame::SetEstimatedPose(const Matrix &pose, const std::vector &landmark_ids)
{
poseEstimate_ = pose;
@@ -178,6 +276,11 @@ bool Frame::HasDepthImage() const
return hasDepth_;
}
+bool Frame::HasStereoImage() const
+{
+ return hasStereo_;
+}
+
bool Frame::HasEstimatedPose() const
{
return hasPoseEstimate_;
@@ -200,8 +303,13 @@ void Frame::CompressImages()
{
cv::imencode(".png", depthImage_, depthImageComp_, param);
}
+ if (hasStereo_)
+ {
+ cv::imencode(".png", stereoImage_, stereoImageComp_, param);
+ }
image_.release();
depthImage_.release();
+ stereoImage_.release();
isCompressed_ = true;
}
@@ -216,8 +324,13 @@ void Frame::DecompressImages()
{
depthImage_ = cv::imdecode(cv::Mat(1, depthImageComp_.size(), CV_8UC1, depthImageComp_.data()), CV_LOAD_IMAGE_UNCHANGED);
}
+ if (hasStereo_)
+ {
+ stereoImage_ = cv::imdecode(cv::Mat(1, stereoImageComp_.size(), CV_8UC1, stereoImageComp_.data()), CV_LOAD_IMAGE_UNCHANGED);
+ }
imageComp_.clear();
depthImageComp_.clear();
+ stereoImageComp_.clear();
isCompressed_ = false;
}
diff --git a/src/data/frame.h b/src/data/frame.h
index 7ec88eb..cc83be8 100644
--- a/src/data/frame.h
+++ b/src/data/frame.h
@@ -16,9 +16,13 @@ namespace data
class Frame
{
public:
- Frame(cv::Mat &image, cv::Mat &depth_image, Matrix &pose, double time, camera::CameraModel<> &camera_model);
+ Frame(cv::Mat &image, cv::Mat &stereo_image, cv::Mat &depth_image, Matrix &pose, Matrix &stereo_pose, double time, camera::CameraModel<> &camera_model);
+ Frame(cv::Mat &image, cv::Mat &stereo_image, Matrix &stereo_pose, double time, camera::CameraModel<> &camera_model);
Frame(cv::Mat &image, cv::Mat &depth_image, double time, camera::CameraModel<> &camera_model);
+ Frame(cv::Mat &image, cv::Mat &stereo_image, cv::Mat &depth_image, Matrix &stereo_pose, double time, camera::CameraModel<> &camera_model);
Frame(cv::Mat &image, Matrix &pose, double time, camera::CameraModel<> &camera_model);
+ Frame(cv::Mat &image, cv::Mat &stereo_image, Matrix &pose, Matrix &stereo_pose, double time, camera::CameraModel<> &camera_model);
+ Frame(cv::Mat &image, Matrix &pose, cv::Mat &depth_image, double time, camera::CameraModel<> &camera_model);
Frame(cv::Mat &image, double time, camera::CameraModel<> &camera_model);
Frame(const Frame &other);
@@ -26,6 +30,8 @@ class Frame
const Matrix& GetInversePose() const;
const cv::Mat& GetImage();
const cv::Mat& GetDepthImage();
+ const cv::Mat& GetStereoImage();
+ const Matrix& GetStereoPose() const;
const camera::CameraModel<>& GetCameraModel() const;
const Matrix& GetEstimatedPose() const;
const Matrix& GetEstimatedInversePose() const;
@@ -34,11 +40,14 @@ class Frame
bool HasPose() const;
bool HasDepthImage() const;
+ bool HasStereoImage() const;
bool HasEstimatedPose() const;
bool IsEstimatedByLandmark(const int landmark_id) const;
void SetPose(Matrix &pose);
void SetDepthImage(cv::Mat &depth_image);
+ void SetStereoImage(cv::Mat &stereo_image);
+ void SetStereoPose(Matrix &pose);
void SetEstimatedPose(const Matrix &pose, const std::vector &landmark_ids);
void SetEstimatedPose(const Matrix &pose);
void SetEstimatedInversePose(const Matrix &pose, const std::vector &landmark_ids);
@@ -52,10 +61,13 @@ class Frame
const int id_;
std::vector imageComp_;
std::vector depthImageComp_;
+ std::vector stereoImageComp_;
cv::Mat image_;
cv::Mat depthImage_;
+ cv::Mat stereoImage_;
Matrix pose_;
Matrix invPose_;
+ Matrix stereoPose_;
Matrix poseEstimate_;
Matrix invPoseEstimate_;
double timeSec_;
@@ -65,6 +77,7 @@ class Frame
bool hasPose_;
bool hasDepth_;
+ bool hasStereo_;
bool hasPoseEstimate_{false};
bool isCompressed_{false};
diff --git a/src/data/landmark.cc b/src/data/landmark.cc
index a2dbff2..f88b409 100644
--- a/src/data/landmark.cc
+++ b/src/data/landmark.cc
@@ -21,7 +21,7 @@ void Landmark::AddObservation(Feature obs, bool compute_gnd)
groundTruth_ = obs.GetWorldPoint();
hasGroundTruth_ = true;
}
- if (obs.HasEstimatedWorldPoint())
+ if (!obs.GetFrame().HasStereoImage() && obs.HasEstimatedWorldPoint())
{
posEstimate_ = obs.GetEstimatedWorldPoint();
estFrameIds_.insert(obs.GetFrame().GetID());
@@ -36,6 +36,16 @@ void Landmark::AddObservation(Feature obs, bool compute_gnd)
obs_.push_back(obs);
}
+void Landmark::AddStereoObservation(Feature obs)
+{
+ if (idToStereoIndex_.find(obs.GetFrame().GetID()) != idToStereoIndex_.end())
+ {
+ return;
+ }
+ idToStereoIndex_[obs.GetFrame().GetID()] = stereoObs_.size();
+ stereoObs_.push_back(obs);
+}
+
const std::vector& Landmark::GetObservations() const
{
return obs_;
@@ -46,6 +56,11 @@ std::vector& Landmark::GetObservations()
return obs_;
}
+const std::vector& Landmark::GetStereoObservations() const
+{
+ return stereoObs_;
+}
+
bool Landmark::IsObservedInFrame(const int frame_id) const
{
for (Feature f : obs_)
@@ -81,6 +96,15 @@ const Feature* Landmark::GetObservationByFrameID(const int frame_id) const
return &obs_[idToIndex_.at(frame_id)];
}
+const Feature* Landmark::GetStereoObservationByFrameID(const int frame_id) const
+{
+ if (idToStereoIndex_.find(frame_id) == idToStereoIndex_.end())
+ {
+ return nullptr;
+ }
+ return &stereoObs_[idToStereoIndex_.at(frame_id)];
+}
+
void Landmark::SetEstimatedPosition(const Vector3d &pos, const std::vector &frame_ids)
{
posEstimate_ = pos;
diff --git a/src/data/landmark.h b/src/data/landmark.h
index db5b9e4..c8b9f6d 100644
--- a/src/data/landmark.h
+++ b/src/data/landmark.h
@@ -18,12 +18,15 @@ class Landmark
public:
Landmark();
void AddObservation(Feature obs, bool compute_gnd = true);
+ void AddStereoObservation(Feature obs);
const std::vector& GetObservations() const;
std::vector& GetObservations();
+ const std::vector& GetStereoObservations() const;
bool IsObservedInFrame(const int frame_id) const;
const int GetFirstFrameID() const;
const int GetNumObservations() const;
const Feature* GetObservationByFrameID(const int frame_id) const;
+ const Feature* GetStereoObservationByFrameID(const int frame_id) const;
void SetEstimatedPosition(const Vector3d &pos, const std::vector &frame_ids);
void SetEstimatedPosition(const Vector3d &pos);
@@ -37,8 +40,10 @@ class Landmark
private:
const int id_;
std::vector obs_;
+ std::vector stereoObs_;
std::unordered_set estFrameIds_;
std::map idToIndex_;
+ std::map idToStereoIndex_;
Vector3d groundTruth_;
Vector3d posEstimate_;
bool hasGroundTruth_{false};
diff --git a/src/feature/tracker.cc b/src/feature/tracker.cc
index 454712d..421bdb7 100644
--- a/src/feature/tracker.cc
+++ b/src/feature/tracker.cc
@@ -30,22 +30,22 @@ int Tracker::Track(std::vector &landmarks, data::Frame &cur_fram
}
bool prevCompressed = prevFrame_->IsCompressed();
bool curCompressed = cur_frame.IsCompressed();
- int prev_id = prevFrame_->GetID();
- std::vector points_to_track;
- std::vector orig_kpt;
- std::vector orig_inx;
+ int prevId = prevFrame_->GetID();
+ std::vector pointsToTrack;
+ std::vector origKpt;
+ std::vector origInx;
for (int i = 0; i < landmarks.size(); i++)
{
data::Landmark &landmark = landmarks[i];
- const data::Feature *feat = landmark.GetObservationByFrameID(prev_id);
+ const data::Feature *feat = landmark.GetObservationByFrameID(prevId);
if (feat != nullptr)
{
- points_to_track.push_back(feat->GetKeypoint().pt);
- orig_kpt.push_back(feat->GetKeypoint());
- orig_inx.push_back(i);
+ pointsToTrack.push_back(feat->GetKeypoint().pt);
+ origKpt.push_back(feat->GetKeypoint());
+ origInx.push_back(i);
}
}
- if (points_to_track.size() == 0)
+ if (pointsToTrack.size() == 0)
{
prevFrame_ = &cur_frame;
return 0;
@@ -53,30 +53,30 @@ int Tracker::Track(std::vector &landmarks, data::Frame &cur_fram
std::vector results;
std::vector status;
std::vector err;
- cv::calcOpticalFlowPyrLK(prevFrame_->GetImage(), cur_frame.GetImage(), points_to_track, results, status, err, windowSize_, numScales_, termCrit_, 0);
+ cv::calcOpticalFlowPyrLK(prevFrame_->GetImage(), cur_frame.GetImage(), pointsToTrack, results, status, err, windowSize_, numScales_, termCrit_, 0);
errors.clear();
int numGood = 0;
for (int i = 0; i < results.size(); i++)
{
- data::Landmark &landmark = landmarks[orig_inx[i]];
+ data::Landmark &landmark = landmarks[origInx[i]];
if (deltaPixErrThresh_ > 0)
{
if (landmark.HasGroundTruth() && cur_frame.HasPose())
{
- Vector2d pixel_gnd;
- Vector2d pixel_gnd_prev;
- if (!cur_frame.GetCameraModel().ProjectToImage(util::TFUtil::WorldFrameToCameraFrame(util::TFUtil::TransformPoint(cur_frame.GetInversePose(), landmark.GetGroundTruth())), pixel_gnd) || !prevFrame_->GetCameraModel().ProjectToImage(util::TFUtil::WorldFrameToCameraFrame(util::TFUtil::TransformPoint(prevFrame_->GetInversePose(), landmark.GetGroundTruth())), pixel_gnd_prev))
+ Vector2d pixelGnd;
+ Vector2d pixelGndPrev;
+ if (!cur_frame.GetCameraModel().ProjectToImage(util::TFUtil::WorldFrameToCameraFrame(util::TFUtil::TransformPoint(cur_frame.GetInversePose(), landmark.GetGroundTruth())), pixelGnd) || !prevFrame_->GetCameraModel().ProjectToImage(util::TFUtil::WorldFrameToCameraFrame(util::TFUtil::TransformPoint(prevFrame_->GetInversePose(), landmark.GetGroundTruth())), pixelGndPrev))
{
continue;
}
else
{
- Vector2d pixel_cur;
- pixel_cur << results[i].x, results[i].y;
- Vector2d pixel_prev;
- pixel_prev << points_to_track[i].x, points_to_track[i].y;
- double cur_error = (pixel_cur - pixel_gnd).norm();
- double prev_error = (pixel_prev - pixel_gnd_prev).norm();
+ Vector2d pixelCur;
+ pixelCur << results[i].x, results[i].y;
+ Vector2d pixelPrev;
+ pixelPrev << pointsToTrack[i].x, pointsToTrack[i].y;
+ double cur_error = (pixelCur - pixelGnd).norm();
+ double prev_error = (pixelPrev - pixelGndPrev).norm();
if (cur_error - prev_error > deltaPixErrThresh_)
{
continue;
@@ -86,7 +86,7 @@ int Tracker::Track(std::vector &landmarks, data::Frame &cur_fram
}
if (status[i] == 1 && err[i] <= errThresh_)
{
- cv::KeyPoint kpt(results[i], orig_kpt[i].size);
+ cv::KeyPoint kpt(results[i], origKpt[i].size);
data::Feature feat(cur_frame, kpt);
landmark.AddObservation(feat);
errors.push_back(err[i]);
diff --git a/src/module/odometry_module.cc b/src/module/odometry_module.cc
index 2176feb..94a1520 100644
--- a/src/module/odometry_module.cc
+++ b/src/module/odometry_module.cc
@@ -20,6 +20,14 @@ OdometryModule::OdometryModule(std::unique_ptr &&pnp, std::unique
void OdometryModule::Update(std::vector &landmarks, data::Frame &frame)
{
+ if (landmarks.size() == 0)
+ {
+ if (frame.HasPose())
+ {
+ frame.SetEstimatedPose(frame.GetPose());
+ }
+ return;
+ }
pnp_->Compute(landmarks, frame);
}
diff --git a/src/module/reconstruction_module.cc b/src/module/reconstruction_module.cc
index dd09bc6..79a4794 100644
--- a/src/module/reconstruction_module.cc
+++ b/src/module/reconstruction_module.cc
@@ -20,6 +20,11 @@ ReconstructionModule::ReconstructionModule(std::unique_ptr &landmarks)
{
+ if (landmarks.size() == 0)
+ {
+ return;
+ }
+
triangulator_->Triangulate(landmarks);
visualization_.Reserve(landmarks.size());
diff --git a/src/module/stereo_module.cc b/src/module/stereo_module.cc
new file mode 100644
index 0000000..aa92baa
--- /dev/null
+++ b/src/module/stereo_module.cc
@@ -0,0 +1,80 @@
+#include "stereo_module.h"
+
+using namespace std;
+
+namespace omni_slam
+{
+namespace module
+{
+
+StereoModule::StereoModule(std::unique_ptr &stereo)
+ : stereo_(std::move(stereo))
+{
+}
+
+StereoModule::StereoModule(std::unique_ptr &&stereo)
+ : StereoModule(stereo)
+{
+}
+
+void StereoModule::Update(data::Frame &frame, std::vector &landmarks)
+{
+ stereo_->Match(frame, landmarks);
+
+ if (frameNum_ == 0)
+ {
+ visualization_.Init(frame.GetImage().size());
+ }
+ for (data::Landmark &landmark : landmarks)
+ {
+ const data::Feature *feat1 = landmark.GetObservationByFrameID(frame.GetID());
+ const data::Feature *feat2 = landmark.GetStereoObservationByFrameID(frame.GetID());
+ if (feat1 != nullptr && feat2 != nullptr)
+ {
+ Matrix framePose = frame.HasEstimatedPose() ? frame.GetEstimatedPose() : frame.GetPose();
+ double depth = (landmark.GetEstimatedPosition() - framePose.block<3, 1>(0, 3)).norm();
+ visualization_.AddMatch(feat1->GetKeypoint().pt, feat2->GetKeypoint().pt, depth);
+ }
+ }
+ frameNum_++;
+}
+
+StereoModule::Stats& StereoModule::GetStats()
+{
+ return stats_;
+}
+
+void StereoModule::Visualize(cv::Mat &base_img, const cv::Mat &base_stereo_img)
+{
+ visualization_.Draw(base_img, base_stereo_img);
+}
+
+void StereoModule::Visualization::Init(cv::Size img_size)
+{
+ curMask_ = cv::Mat::zeros(cv::Size(img_size.width * 2, img_size.height), CV_8UC3);
+ curDepth_ = cv::Mat::zeros(img_size, CV_8UC3);
+}
+
+void StereoModule::Visualization::AddMatch(cv::Point2f pt1, cv::Point2f pt2, double depth)
+{
+ cv::circle(curMask_, pt1, 3, cv::Scalar(255, 0, 0), -1);
+ cv::circle(curMask_, pt2 + cv::Point2f(curMask_.cols / 2., 0), 3, cv::Scalar(255, 0, 0), -1);
+ cv::line(curMask_, pt1, pt2 + cv::Point2f(curMask_.cols / 2., 0), cv::Scalar(255, 0, 0), 1);
+ cv::circle(curDepth_, pt1, 2, cv::Scalar(depth / maxDepth_ * 255, depth / maxDepth_ * 255, depth / maxDepth_ * 255));
+}
+
+void StereoModule::Visualization::Draw(cv::Mat &img, const cv::Mat &stereo_img)
+{
+ cv::hconcat(img, stereo_img, img);
+ if (img.channels() == 1)
+ {
+ cv::cvtColor(img, img, CV_GRAY2BGR);
+ }
+ curMask_.copyTo(img, curMask_);
+ curMask_ = cv::Mat::zeros(img.size(), CV_8UC3);
+ cv::hconcat(img, curDepth_, img);
+ curDepth_ = cv::Mat::zeros(stereo_img.size(), CV_8UC3);
+}
+
+}
+}
diff --git a/src/module/stereo_module.h b/src/module/stereo_module.h
new file mode 100644
index 0000000..f3c6f19
--- /dev/null
+++ b/src/module/stereo_module.h
@@ -0,0 +1,61 @@
+#ifndef _STEREO_MODULE_H_
+#define _STEREO_MODULE_H_
+
+#include
+#include
+#include
+
+#include "data/landmark.h"
+#include "stereo/stereo_matcher.h"
+
+#include
+#include
+
+using namespace Eigen;
+
+namespace omni_slam
+{
+namespace module
+{
+
+class StereoModule
+{
+public:
+ struct Stats
+ {
+ };
+
+ StereoModule(std::unique_ptr &stereo);
+ StereoModule(std::unique_ptr &&stereo);
+
+ void Update(data::Frame &frame, std::vector &landmarks);
+
+ Stats& GetStats();
+ void Visualize(cv::Mat &base_img, const cv::Mat &base_stereo_img);
+
+private:
+ class Visualization
+ {
+ public:
+ void Init(cv::Size img_size);
+ void AddMatch(cv::Point2f pt1, cv::Point2f pt2, double depth);
+ void Draw(cv::Mat &img, const cv::Mat &stereo_img);
+
+ private:
+ cv::Mat curMask_;
+ cv::Mat curDepth_;
+ const double maxDepth_{50.};
+ };
+
+ std::shared_ptr stereo_;
+
+ int frameNum_{0};
+
+ Stats stats_;
+ Visualization visualization_;
+};
+
+}
+}
+
+#endif /* _STEREO_MODULE_H_ */
diff --git a/src/module/tracking_module.cc b/src/module/tracking_module.cc
index d7ae4cc..01e1fc3 100644
--- a/src/module/tracking_module.cc
+++ b/src/module/tracking_module.cc
@@ -31,14 +31,6 @@ void TrackingModule::Update(std::unique_ptr &frame)
int imsize = max(frames_.back()->GetImage().rows, frames_.back()->GetImage().cols);
if (frameNum_ == 0)
{
- #pragma omp parallel for collapse(2)
- for (int i = 0; i < rs_.size() - 1; i++)
- {
- for (int j = 0; j < ts_.size() - 1; j++)
- {
- detector_->DetectInRadialRegion(*frames_.back(), landmarks_, rs_[i] * imsize, rs_[i+1] * imsize, ts_[j], ts_[j+1]);
- }
- }
tracker_->Init(*frames_.back());
visualization_.Init(frames_.back()->GetImage().size(), landmarks_.size());
@@ -48,22 +40,24 @@ void TrackingModule::Update(std::unique_ptr &frame)
}
vector trackErrors;
- std::vector landmarksTemp(landmarks_);
- tracker_->Track(landmarksTemp, *frames_.back(), trackErrors);
-
if (fivePointChecker_)
{
- Matrix3d E;
- std::vector inlierIndices;
- fivePointChecker_->Compute(landmarksTemp, **next(frames_.rbegin()), *frames_.back(), E, inlierIndices);
- for (int inx : inlierIndices)
+ std::vector landmarksTemp(landmarks_);
+ int tracks = tracker_->Track(landmarksTemp, *frames_.back(), trackErrors);
+ if (tracks > 0)
{
- landmarks_[inx].AddObservation(*landmarksTemp[inx].GetObservationByFrameID(frames_.back()->GetID()));
+ Matrix3d E;
+ std::vector inlierIndices;
+ fivePointChecker_->Compute(landmarksTemp, **next(frames_.rbegin()), *frames_.back(), E, inlierIndices);
+ for (int inx : inlierIndices)
+ {
+ landmarks_[inx].AddObservation(*landmarksTemp[inx].GetObservationByFrameID(frames_.back()->GetID()));
+ }
}
}
else
{
- landmarks_ = std::move(landmarksTemp);
+ tracker_->Track(landmarks_, *frames_.back(), trackErrors);
}
int i = 0;
diff --git a/src/odometry/five_point.cc b/src/odometry/five_point.cc
index 2a2f21a..e682939 100644
--- a/src/odometry/five_point.cc
+++ b/src/odometry/five_point.cc
@@ -33,6 +33,10 @@ int FivePoint::Compute(const std::vector &landmarks, const data:
}
i++;
}
+ if (x1.size() < 5)
+ {
+ return 0;
+ }
int inliers = RANSAC(x1, x2, E);
std::vector indices = GetInlierIndices(x1, x2, E);
inlier_indices.clear();
diff --git a/src/odometry/pnp.cc b/src/odometry/pnp.cc
index 9c93aa3..2fa2eab 100644
--- a/src/odometry/pnp.cc
+++ b/src/odometry/pnp.cc
@@ -59,6 +59,10 @@ int PNP::Compute(const std::vector &landmarks, data::Frame &fram
}
i++;
}
+ if (xs.size() < 4)
+ {
+ return 0;
+ }
Matrix pose;
int inliers = RANSAC(xs, ys, yns, frame.GetCameraModel(), pose);
std::vector indices = GetInlierIndices(xs, yns, pose, frame.GetCameraModel());
diff --git a/src/omni_slam_odometry_eval_node.cc b/src/omni_slam_odometry_eval_node.cc
index 5f3c95f..2e3042f 100644
--- a/src/omni_slam_odometry_eval_node.cc
+++ b/src/omni_slam_odometry_eval_node.cc
@@ -4,7 +4,7 @@ int main(int argc, char *argv[])
{
ros::init(argc, argv, "omni_slam_odometry_eval_node");
- omni_slam::ros::OdometryEval odometry_eval;
+ omni_slam::ros::OdometryEval<> odometry_eval;
odometry_eval.Run();
return 0;
diff --git a/src/omni_slam_reconstruction_eval_node.cc b/src/omni_slam_reconstruction_eval_node.cc
index 9b24837..c16076a 100644
--- a/src/omni_slam_reconstruction_eval_node.cc
+++ b/src/omni_slam_reconstruction_eval_node.cc
@@ -4,7 +4,7 @@ int main(int argc, char *argv[])
{
ros::init(argc, argv, "omni_slam_reconstruction_eval_node");
- omni_slam::ros::ReconstructionEval reconstruction_eval;
+ omni_slam::ros::ReconstructionEval<> reconstruction_eval;
reconstruction_eval.Run();
return 0;
diff --git a/src/omni_slam_stereo_eval_node.cc b/src/omni_slam_stereo_eval_node.cc
new file mode 100644
index 0000000..38aa4df
--- /dev/null
+++ b/src/omni_slam_stereo_eval_node.cc
@@ -0,0 +1,12 @@
+#include "ros/stereo_eval.h"
+
+int main(int argc, char *argv[])
+{
+ ros::init(argc, argv, "omni_slam_stereo_eval_node");
+
+ omni_slam::ros::StereoEval stereo_eval;
+ stereo_eval.Run();
+
+ return 0;
+}
+
diff --git a/src/omni_slam_tracking_eval_node.cc b/src/omni_slam_tracking_eval_node.cc
index 3d1b141..83b7601 100644
--- a/src/omni_slam_tracking_eval_node.cc
+++ b/src/omni_slam_tracking_eval_node.cc
@@ -4,7 +4,7 @@ int main(int argc, char *argv[])
{
ros::init(argc, argv, "omni_slam_tracking_eval_node");
- omni_slam::ros::TrackingEval tracking_eval;
+ omni_slam::ros::TrackingEval<> tracking_eval;
tracking_eval.Run();
return 0;
diff --git a/src/ros/eval_base.cc b/src/ros/eval_base.cc
index 5b3e67c..ec8820f 100644
--- a/src/ros/eval_base.cc
+++ b/src/ros/eval_base.cc
@@ -15,7 +15,8 @@ namespace omni_slam
namespace ros
{
-EvalBase::EvalBase(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &nh_private)
+template
+EvalBase::EvalBase(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &nh_private)
: nh_(nh), nhp_(nh_private), imageTransport_(nh)
{
std::string cameraModel;
@@ -24,6 +25,19 @@ EvalBase::EvalBase(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &nh_priv
nhp_.param("image_topic", imageTopic_, std::string("/camera/image_raw"));
nhp_.param("depth_image_topic", depthImageTopic_, std::string("/depth_camera/image_raw"));
nhp_.param("pose_topic", poseTopic_, std::string("/pose"));
+ if (Stereo)
+ {
+ nhp_.param("stereo_image_topic", stereoImageTopic_, std::string("/camera2/image_raw"));
+ std::vector stereoT;
+ stereoT.reserve(3);
+ std::vector stereoR;
+ stereoR.reserve(4);
+ nhp_.getParam("stereo_tf_t", stereoT);
+ nhp_.getParam("stereo_tf_r", stereoR);
+ Quaterniond q(stereoR[3], stereoR[0], stereoR[1], stereoR[2]);
+ Vector3d t(stereoT[0], stereoT[1], stereoT[2]);
+ stereoPose_ = util::TFUtil::QuaternionTranslationToPoseMatrix(q, t);
+ }
if (cameraModel == "double_sphere")
{
@@ -39,20 +53,34 @@ EvalBase::EvalBase(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &nh_priv
}
}
-void EvalBase::InitSubscribers()
+template <>
+void EvalBase::InitSubscribers()
+{
+ imageSubscriber_.subscribe(imageTransport_, imageTopic_, 3);
+ depthImageSubscriber_.subscribe(imageTransport_, depthImageTopic_, 3);
+ poseSubscriber_.subscribe(nh_, poseTopic_, 10);
+ sync_.reset(new message_filters::Synchronizer(MessageFilter(5), imageSubscriber_, depthImageSubscriber_, poseSubscriber_));
+ sync_->registerCallback(boost::bind(&EvalBase::FrameCallback, this, _1, _2, _3));
+}
+
+template <>
+void EvalBase::InitSubscribers()
{
imageSubscriber_.subscribe(imageTransport_, imageTopic_, 3);
+ stereoImageSubscriber_.subscribe(imageTransport_, stereoImageTopic_, 3);
depthImageSubscriber_.subscribe(imageTransport_, depthImageTopic_, 3);
poseSubscriber_.subscribe(nh_, poseTopic_, 10);
- sync_.reset(new message_filters::Synchronizer>(message_filters::sync_policies::ApproximateTime(5), imageSubscriber_, depthImageSubscriber_, poseSubscriber_));
- sync_->registerCallback(boost::bind(&EvalBase::FrameCallback, this, _1, _2, _3));
+ sync_.reset(new message_filters::Synchronizer(MessageFilter(5), imageSubscriber_, stereoImageSubscriber_, depthImageSubscriber_, poseSubscriber_));
+ sync_->registerCallback(boost::bind(&EvalBase::FrameCallback, this, _1, _2, _3, _4));
}
-void EvalBase::InitPublishers()
+template
+void EvalBase::InitPublishers()
{
}
-void EvalBase::FrameCallback(const sensor_msgs::ImageConstPtr &image, const sensor_msgs::ImageConstPtr &depth_image, const geometry_msgs::PoseStamped::ConstPtr &pose)
+template
+void EvalBase::FrameCallback(const sensor_msgs::ImageConstPtr &image, const sensor_msgs::ImageConstPtr &depth_image, const geometry_msgs::PoseStamped::ConstPtr &pose)
{
cv_bridge::CvImagePtr cvImage;
cv_bridge::CvImagePtr cvDepthImage;
@@ -75,30 +103,74 @@ void EvalBase::FrameCallback(const sensor_msgs::ImageConstPtr &image, const sens
cv::Mat depthFloatImg;
cvDepthImage->image.convertTo(depthFloatImg, CV_64FC1, 500. / 65535);
- ProcessFrame(std::unique_ptr(new data::Frame(monoImg, depthFloatImg, posemat, pose->header.stamp.toSec(), *cameraModel_)));
+ ProcessFrame(std::unique_ptr(new data::Frame(monoImg, posemat, depthFloatImg, pose->header.stamp.toSec(), *cameraModel_)));
Visualize(cvImage);
}
-void EvalBase::Finish()
+template
+void EvalBase::FrameCallback(const sensor_msgs::ImageConstPtr &image, const sensor_msgs::ImageConstPtr &stereo_image, const sensor_msgs::ImageConstPtr &depth_image, const geometry_msgs::PoseStamped::ConstPtr &pose)
+{
+ cv_bridge::CvImagePtr cvImage;
+ cv_bridge::CvImagePtr cvStereoImage;
+ cv_bridge::CvImagePtr cvDepthImage;
+ try
+ {
+ cvImage = cv_bridge::toCvCopy(image, sensor_msgs::image_encodings::BGR8);
+ cvStereoImage = cv_bridge::toCvCopy(stereo_image, sensor_msgs::image_encodings::BGR8);
+ cvDepthImage = cv_bridge::toCvCopy(depth_image, sensor_msgs::image_encodings::TYPE_16UC1);
+ }
+ catch (cv_bridge::Exception &e)
+ {
+ ROS_ERROR("cv_bridge exception: %s", e.what());
+ return;
+ }
+
+ Quaterniond q(pose->pose.orientation.w, pose->pose.orientation.x, pose->pose.orientation.y, pose->pose.orientation.z);
+ Vector3d t(pose->pose.position.x, pose->pose.position.y, pose->pose.position.z);
+ Matrix posemat = util::TFUtil::QuaternionTranslationToPoseMatrix(q, t);
+ cv::Mat monoImg;
+ cv::cvtColor(cvImage->image, monoImg, CV_BGR2GRAY);
+ cv::Mat monoImg2;
+ cv::cvtColor(cvStereoImage->image, monoImg2, CV_BGR2GRAY);
+ cv::Mat depthFloatImg;
+ cvDepthImage->image.convertTo(depthFloatImg, CV_64FC1, 500. / 65535);
+
+ ProcessFrame(std::unique_ptr(new data::Frame(monoImg, monoImg2, depthFloatImg, posemat, stereoPose_, pose->header.stamp.toSec(), *cameraModel_)));
+
+ Visualize(cvImage, cvStereoImage);
+}
+
+template
+void EvalBase::Finish()
{
}
-bool EvalBase::GetAttributes(std::map &attributes)
+template
+bool EvalBase::GetAttributes(std::map &attributes)
{
return false;
}
-bool EvalBase::GetAttributes(std::map &attributes)
+template
+bool EvalBase::GetAttributes(std::map &attributes)
{
return false;
}
-void EvalBase::Visualize(cv_bridge::CvImagePtr &base_img)
+template
+void EvalBase::Visualize(cv_bridge::CvImagePtr &base_img)
+{
+}
+
+template
+void EvalBase::Visualize(cv_bridge::CvImagePtr &base_img, cv_bridge::CvImagePtr &base_stereo_img)
{
+ Visualize(base_img);
}
-void EvalBase::Run()
+template
+void EvalBase::Run()
{
std::string bagFile;
std::string resultsFile;
@@ -128,6 +200,7 @@ void EvalBase::Run()
rosbag::Bag bag;
bag.open(bagFile);
sensor_msgs::ImageConstPtr imageMsg = nullptr;
+ sensor_msgs::ImageConstPtr stereoMsg = nullptr;
sensor_msgs::ImageConstPtr depthMsg = nullptr;
geometry_msgs::PoseStamped::ConstPtr poseMsg = nullptr;
int runNext = 0;
@@ -145,20 +218,32 @@ void EvalBase::Run()
depthMsg = m.instantiate();
runNext++;
}
+ else if (Stereo && m.getTopic() == stereoImageTopic_)
+ {
+ stereoMsg = m.instantiate();
+ runNext++;
+ }
else if (m.getTopic() == imageTopic_)
{
imageMsg = m.instantiate();
runNext++;
}
- else if (runNext >= 2 && m.getTopic() == poseTopic_)
+ else if (runNext >= (Stereo ? 3 : 2) && m.getTopic() == poseTopic_)
{
poseMsg = m.instantiate();
runNext = 0;
if (skip == 0)
{
- if (imageMsg != nullptr && depthMsg != nullptr && poseMsg != nullptr)
+ if (imageMsg != nullptr && depthMsg != nullptr && poseMsg != nullptr && (!Stereo || stereoMsg != nullptr))
{
- FrameCallback(imageMsg, depthMsg, poseMsg);
+ if (Stereo)
+ {
+ FrameCallback(imageMsg, stereoMsg, depthMsg, poseMsg);
+ }
+ else
+ {
+ FrameCallback(imageMsg, depthMsg, poseMsg);
+ }
}
}
skip++;
@@ -198,5 +283,8 @@ void EvalBase::Run()
}
}
+template class EvalBase;
+template class EvalBase;
+
}
}
diff --git a/src/ros/eval_base.h b/src/ros/eval_base.h
index 4b5c5dc..0888470 100644
--- a/src/ros/eval_base.h
+++ b/src/ros/eval_base.h
@@ -21,6 +21,7 @@ namespace omni_slam
namespace ros
{
+template
class EvalBase
{
public:
@@ -42,6 +43,7 @@ class EvalBase
private:
void FrameCallback(const sensor_msgs::ImageConstPtr &image, const sensor_msgs::ImageConstPtr &depth_image, const geometry_msgs::PoseStamped::ConstPtr &pose);
+ void FrameCallback(const sensor_msgs::ImageConstPtr &image, const sensor_msgs::ImageConstPtr &stereo_image, const sensor_msgs::ImageConstPtr &depth_image, const geometry_msgs::PoseStamped::ConstPtr &pose);
virtual void ProcessFrame(std::unique_ptr &&frame) = 0;
virtual void GetResultsData(std::map>> &data) = 0;
@@ -49,16 +51,21 @@ class EvalBase
virtual bool GetAttributes(std::map &attributes);
virtual bool GetAttributes(std::map &attributes);
virtual void Visualize(cv_bridge::CvImagePtr &base_img);
+ virtual void Visualize(cv_bridge::CvImagePtr &base_img, cv_bridge::CvImagePtr &base_stereo_img);
image_transport::SubscriberFilter imageSubscriber_;
image_transport::SubscriberFilter depthImageSubscriber_;
+ image_transport::SubscriberFilter stereoImageSubscriber_;
message_filters::Subscriber poseSubscriber_;
- std::unique_ptr>> sync_;
+ typedef typename std::conditional, message_filters::sync_policies::ApproximateTime>::type MessageFilter;
+ std::unique_ptr> sync_;
std::string imageTopic_;
+ std::string stereoImageTopic_;
std::string depthImageTopic_;
std::string poseTopic_;
std::map cameraParams_;
+ Matrix stereoPose_;
};
}
diff --git a/src/ros/matching_eval.cc b/src/ros/matching_eval.cc
index 5bb89ec..7a71bb9 100644
--- a/src/ros/matching_eval.cc
+++ b/src/ros/matching_eval.cc
@@ -11,7 +11,7 @@ namespace ros
{
MatchingEval::MatchingEval(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &nh_private)
- : EvalBase(nh, nh_private)
+ : EvalBase<>(nh, nh_private)
{
map detectorParams;
map descriptorParams;
@@ -48,7 +48,7 @@ MatchingEval::MatchingEval(const ::ros::NodeHandle &nh, const ::ros::NodeHandle
void MatchingEval::InitPublishers()
{
- EvalBase::InitPublishers();
+ EvalBase<>::InitPublishers();
string outputTopic;
nhp_.param("matched_image_topic", outputTopic, string("/omni_slam/matched"));
diff --git a/src/ros/matching_eval.h b/src/ros/matching_eval.h
index 73de0bf..a7e21e2 100644
--- a/src/ros/matching_eval.h
+++ b/src/ros/matching_eval.h
@@ -14,7 +14,7 @@ namespace omni_slam
namespace ros
{
-class MatchingEval : public EvalBase
+class MatchingEval : public EvalBase<>
{
public:
MatchingEval(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &nh_private);
diff --git a/src/ros/odometry_eval.cc b/src/ros/odometry_eval.cc
index b6f492e..da33de8 100644
--- a/src/ros/odometry_eval.cc
+++ b/src/ros/odometry_eval.cc
@@ -14,8 +14,9 @@ namespace omni_slam
namespace ros
{
-OdometryEval::OdometryEval(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &nh_private)
- : TrackingEval(nh, nh_private)
+template
+OdometryEval::OdometryEval(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &nh_private)
+ : TrackingEval(nh, nh_private)
{
double reprojThresh;
int iterations;
@@ -24,12 +25,12 @@ OdometryEval::OdometryEval(const ::ros::NodeHandle &nh, const ::ros::NodeHandle
bool logCeres;
int numCeresThreads;
- nhp_.param("pnp_inlier_threshold", reprojThresh, 10.);
- nhp_.param("pnp_iterations", iterations, 1000);
- nhp_.param("bundle_adjustment_max_iterations", baMaxIter, 500);
- nhp_.param("bundle_adjustment_loss_coefficient", baLossCoeff, 0.1);
- nhp_.param("bundle_adjustment_logging", logCeres, false);
- nhp_.param("bundle_adjustment_num_threads", numCeresThreads, 1);
+ this->nhp_.param("pnp_inlier_threshold", reprojThresh, 10.);
+ this->nhp_.param("pnp_iterations", iterations, 1000);
+ this->nhp_.param("bundle_adjustment_max_iterations", baMaxIter, 500);
+ this->nhp_.param("bundle_adjustment_loss_coefficient", baLossCoeff, 0.1);
+ this->nhp_.param("bundle_adjustment_logging", logCeres, false);
+ this->nhp_.param("bundle_adjustment_num_threads", numCeresThreads, 1);
unique_ptr pnp(new odometry::PNP(iterations, reprojThresh, numCeresThreads));
unique_ptr bundleAdjuster(new optimization::BundleAdjuster(baMaxIter, baLossCoeff, numCeresThreads, logCeres));
@@ -37,55 +38,61 @@ OdometryEval::OdometryEval(const ::ros::NodeHandle &nh, const ::ros::NodeHandle
odometryModule_.reset(new module::OdometryModule(pnp, bundleAdjuster));
}
-void OdometryEval::InitPublishers()
+template
+void OdometryEval::InitPublishers()
{
- TrackingEval::InitPublishers();
+ TrackingEval::InitPublishers();
string outputTopic;
string outputGndTopic;
string outputPathTopic;
string outputPathGndTopic;
- nhp_.param("odometry_estimate_topic", outputTopic, string("/omni_slam/odometry"));
- nhp_.param("odometry_ground_truth_topic", outputGndTopic, string("/omni_slam/odometry_truth"));
- nhp_.param("path_estimate_topic", outputPathTopic, string("/omni_slam/odometry_path"));
- nhp_.param("path_ground_truth_topic", outputPathGndTopic, string("/omni_slam/odometry_path_truth"));
- odometryPublisher_ = nh_.advertise(outputTopic, 2);
- odometryGndPublisher_ = nh_.advertise(outputGndTopic, 2);
- pathPublisher_ = nh_.advertise(outputPathTopic, 2);
- pathGndPublisher_ = nh_.advertise(outputPathGndTopic, 2);
+ this->nhp_.param("odometry_estimate_topic", outputTopic, string("/omni_slam/odometry"));
+ this->nhp_.param("odometry_ground_truth_topic", outputGndTopic, string("/omni_slam/odometry_truth"));
+ this->nhp_.param("path_estimate_topic", outputPathTopic, string("/omni_slam/odometry_path"));
+ this->nhp_.param("path_ground_truth_topic", outputPathGndTopic, string("/omni_slam/odometry_path_truth"));
+ odometryPublisher_ = this->nh_.template advertise(outputTopic, 2);
+ odometryGndPublisher_ = this->nh_.template advertise(outputGndTopic, 2);
+ pathPublisher_ = this->nh_.template advertise(outputPathTopic, 2);
+ pathGndPublisher_ = this->nh_.template advertise(outputPathGndTopic, 2);
}
-void OdometryEval::ProcessFrame(unique_ptr &&frame)
+template
+void OdometryEval::ProcessFrame(unique_ptr &&frame)
{
- trackingModule_->Update(frame);
- odometryModule_->Update(trackingModule_->GetLandmarks(), *trackingModule_->GetFrames().back());
- trackingModule_->Redetect();
+ this->trackingModule_->Update(frame);
+ odometryModule_->Update(this->trackingModule_->GetLandmarks(), *this->trackingModule_->GetFrames().back());
+ this->trackingModule_->Redetect();
}
-void OdometryEval::Finish()
+template
+void OdometryEval::Finish()
{
ROS_INFO("Performing bundle adjustment...");
- odometryModule_->BundleAdjust(trackingModule_->GetLandmarks());
+ odometryModule_->BundleAdjust(this->trackingModule_->GetLandmarks());
PublishOdometry();
}
-void OdometryEval::GetResultsData(std::map>> &data)
+template
+void OdometryEval::GetResultsData(std::map>> &data)
{
module::OdometryModule::Stats &stats = odometryModule_->GetStats();
}
-void OdometryEval::Visualize(cv_bridge::CvImagePtr &base_img)
+template
+void OdometryEval::Visualize(cv_bridge::CvImagePtr &base_img)
{
- TrackingEval::Visualize(base_img);
+ TrackingEval::Visualize(base_img);
PublishOdometry();
}
-void OdometryEval::PublishOdometry()
+template
+void OdometryEval::PublishOdometry()
{
geometry_msgs::PoseStamped poseMsg;
poseMsg.header.frame_id = "map";
- if (trackingModule_->GetFrames().back()->HasEstimatedPose())
+ if (this->trackingModule_->GetFrames().back()->HasEstimatedPose())
{
- const Matrix &pose = trackingModule_->GetFrames().back()->GetEstimatedPose();
+ const Matrix &pose = this->trackingModule_->GetFrames().back()->GetEstimatedPose();
poseMsg.pose.position.x = pose(0, 3);
poseMsg.pose.position.y = pose(1, 3);
poseMsg.pose.position.z = pose(2, 3);
@@ -94,10 +101,10 @@ void OdometryEval::PublishOdometry()
poseMsg.pose.orientation.y = quat.normalized().y();
poseMsg.pose.orientation.z = quat.normalized().z();
poseMsg.pose.orientation.w = quat.normalized().w();
- poseMsg.header.stamp = ::ros::Time(trackingModule_->GetFrames().back()->GetTime());
+ poseMsg.header.stamp = ::ros::Time(this->trackingModule_->GetFrames().back()->GetTime());
odometryPublisher_.publish(poseMsg);
}
- const Matrix &poseGnd = trackingModule_->GetFrames().back()->GetPose();
+ const Matrix &poseGnd = this->trackingModule_->GetFrames().back()->GetPose();
poseMsg.pose.position.x = poseGnd(0, 3);
poseMsg.pose.position.y = poseGnd(1, 3);
poseMsg.pose.position.z = poseGnd(2, 3);
@@ -106,7 +113,7 @@ void OdometryEval::PublishOdometry()
poseMsg.pose.orientation.y = quatGnd.normalized().y();
poseMsg.pose.orientation.z = quatGnd.normalized().z();
poseMsg.pose.orientation.w = quatGnd.normalized().w();
- poseMsg.header.stamp = ::ros::Time(trackingModule_->GetFrames().back()->GetTime());
+ poseMsg.header.stamp = ::ros::Time(this->trackingModule_->GetFrames().back()->GetTime());
odometryGndPublisher_.publish(poseMsg);
nav_msgs::Path path;
@@ -115,7 +122,7 @@ void OdometryEval::PublishOdometry()
nav_msgs::Path pathGnd;
pathGnd.header.stamp = ::ros::Time::now();
pathGnd.header.frame_id = "map";
- for (const std::unique_ptr &frame : trackingModule_->GetFrames())
+ for (const std::unique_ptr &frame : this->trackingModule_->GetFrames())
{
if (frame->HasEstimatedPose())
{
@@ -154,5 +161,8 @@ void OdometryEval::PublishOdometry()
pathGndPublisher_.publish(pathGnd);
}
+template class OdometryEval;
+template class OdometryEval;
+
}
}
diff --git a/src/ros/odometry_eval.h b/src/ros/odometry_eval.h
index 6ca98fc..d86814f 100644
--- a/src/ros/odometry_eval.h
+++ b/src/ros/odometry_eval.h
@@ -12,23 +12,26 @@ namespace omni_slam
namespace ros
{
-class OdometryEval : public TrackingEval
+template
+class OdometryEval : public virtual TrackingEval
{
public:
OdometryEval(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &nh_private);
OdometryEval() : OdometryEval(::ros::NodeHandle(), ::ros::NodeHandle("~")) {}
-private:
- void InitPublishers();
+protected:
+ virtual void InitPublishers();
- void ProcessFrame(std::unique_ptr &&frame);
- void Finish();
- void GetResultsData(std::map>> &data);
- void Visualize(cv_bridge::CvImagePtr &base_img);
- void PublishOdometry();
+ virtual void ProcessFrame(std::unique_ptr &&frame);
+ virtual void Finish();
+ virtual void GetResultsData(std::map>> &data);
+ virtual void Visualize(cv_bridge::CvImagePtr &base_img);
std::unique_ptr odometryModule_;
+private:
+ void PublishOdometry();
+
::ros::Publisher odometryPublisher_;
::ros::Publisher odometryGndPublisher_;
::ros::Publisher pathPublisher_;
diff --git a/src/ros/reconstruction_eval.cc b/src/ros/reconstruction_eval.cc
index 2260da7..7bda19b 100644
--- a/src/ros/reconstruction_eval.cc
+++ b/src/ros/reconstruction_eval.cc
@@ -14,8 +14,9 @@ namespace omni_slam
namespace ros
{
-ReconstructionEval::ReconstructionEval(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &nh_private)
- : TrackingEval(nh, nh_private)
+template
+ReconstructionEval::ReconstructionEval(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &nh_private)
+ : TrackingEval(nh, nh_private)
{
double maxReprojError;
double minTriAngle;
@@ -24,12 +25,12 @@ ReconstructionEval::ReconstructionEval(const ::ros::NodeHandle &nh, const ::ros:
bool logCeres;
int numCeresThreads;
- nhp_.param("max_reprojection_error", maxReprojError, 0.0);
- nhp_.param("min_triangulation_angle", minTriAngle, 1.0);
- nhp_.param("bundle_adjustment_max_iterations", baMaxIter, 500);
- nhp_.param("bundle_adjustment_loss_coefficient", baLossCoeff, 0.1);
- nhp_.param("bundle_adjustment_logging", logCeres, false);
- nhp_.param("bundle_adjustment_num_threads", numCeresThreads, 1);
+ this->nhp_.param("max_reprojection_error", maxReprojError, 0.0);
+ this->nhp_.param("min_triangulation_angle", minTriAngle, 1.0);
+ this->nhp_.param("bundle_adjustment_max_iterations", baMaxIter, 500);
+ this->nhp_.param("bundle_adjustment_loss_coefficient", baLossCoeff, 0.1);
+ this->nhp_.param("bundle_adjustment_logging", logCeres, false);
+ this->nhp_.param("bundle_adjustment_num_threads", numCeresThreads, 1);
unique_ptr triangulator(new reconstruction::Triangulator(maxReprojError, minTriAngle));
unique_ptr bundleAdjuster(new optimization::BundleAdjuster(baMaxIter, baLossCoeff, numCeresThreads, logCeres));
@@ -37,25 +38,28 @@ ReconstructionEval::ReconstructionEval(const ::ros::NodeHandle &nh, const ::ros:
reconstructionModule_.reset(new module::ReconstructionModule(triangulator, bundleAdjuster));
}
-void ReconstructionEval::InitPublishers()
+template
+void ReconstructionEval::InitPublishers()
{
- TrackingEval::InitPublishers();
+ TrackingEval::InitPublishers();
string outputTopic;
- nhp_.param("point_cloud_topic", outputTopic, string("/omni_slam/reconstructed"));
- pointCloudPublisher_ = nh_.advertise(outputTopic, 2);
+ this->nhp_.param("point_cloud_topic", outputTopic, string("/omni_slam/reconstructed"));
+ pointCloudPublisher_ = this->nh_.template advertise(outputTopic, 2);
}
-void ReconstructionEval::ProcessFrame(unique_ptr &&frame)
+template
+void ReconstructionEval::ProcessFrame(unique_ptr &&frame)
{
- trackingModule_->Update(frame);
- reconstructionModule_->Update(trackingModule_->GetLandmarks());
- trackingModule_->Redetect();
+ this->trackingModule_->Update(frame);
+ reconstructionModule_->Update(this->trackingModule_->GetLandmarks());
+ this->trackingModule_->Redetect();
}
-void ReconstructionEval::Finish()
+template
+void ReconstructionEval::Finish()
{
ROS_INFO("Performing bundle adjustment...");
- reconstructionModule_->BundleAdjust(trackingModule_->GetLandmarks());
+ reconstructionModule_->BundleAdjust(this->trackingModule_->GetLandmarks());
pcl::PointCloud cloud;
cv::Mat noarr;
@@ -67,12 +71,14 @@ void ReconstructionEval::Finish()
pointCloudPublisher_.publish(msg);
}
-void ReconstructionEval::GetResultsData(std::map>> &data)
+template
+void ReconstructionEval::GetResultsData(std::map>> &data)
{
module::ReconstructionModule::Stats &stats = reconstructionModule_->GetStats();
}
-void ReconstructionEval::Visualize(cv_bridge::CvImagePtr &base_img)
+template
+void ReconstructionEval::Visualize(cv_bridge::CvImagePtr &base_img)
{
pcl::PointCloud cloud;
reconstructionModule_->Visualize(base_img->image, cloud);
@@ -81,8 +87,11 @@ void ReconstructionEval::Visualize(cv_bridge::CvImagePtr &base_img)
msg.header.frame_id = "map";
msg.header.stamp = ::ros::Time::now();
pointCloudPublisher_.publish(msg);
- TrackingEval::Visualize(base_img);
+ TrackingEval::Visualize(base_img);
}
+template class ReconstructionEval;
+template class ReconstructionEval;
+
}
}
diff --git a/src/ros/reconstruction_eval.h b/src/ros/reconstruction_eval.h
index 37cbc93..effc9f9 100644
--- a/src/ros/reconstruction_eval.h
+++ b/src/ros/reconstruction_eval.h
@@ -12,23 +12,25 @@ namespace omni_slam
namespace ros
{
-class ReconstructionEval : public TrackingEval
+template
+class ReconstructionEval : public virtual TrackingEval
{
public:
ReconstructionEval(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &nh_private);
ReconstructionEval() : ReconstructionEval(::ros::NodeHandle(), ::ros::NodeHandle("~")) {}
-private:
- void InitPublishers();
-
- void ProcessFrame(std::unique_ptr &&frame);
- void Finish();
- void GetResultsData(std::map>> &data);
- void Visualize(cv_bridge::CvImagePtr &base_img);
+protected:
+ virtual void InitPublishers();
- ::ros::Publisher pointCloudPublisher_;
+ virtual void ProcessFrame(std::unique_ptr &&frame);
+ virtual void Finish();
+ virtual void GetResultsData(std::map>> &data);
+ virtual void Visualize(cv_bridge::CvImagePtr &base_img);
std::unique_ptr reconstructionModule_;
+
+private:
+ ::ros::Publisher pointCloudPublisher_;
};
}
diff --git a/src/ros/stereo_eval.cc b/src/ros/stereo_eval.cc
new file mode 100644
index 0000000..230f78f
--- /dev/null
+++ b/src/ros/stereo_eval.cc
@@ -0,0 +1,60 @@
+#include "stereo_eval.h"
+
+#include "stereo/lk_stereo_matcher.h"
+#include "module/tracking_module.h"
+
+using namespace std;
+
+namespace omni_slam
+{
+namespace ros
+{
+
+StereoEval::StereoEval(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &nh_private)
+ : TrackingEval(nh, nh_private)
+{
+ int windowSize;
+ int numScales;
+ double errThresh;
+ double epiThresh;
+
+ this->nhp_.param("stereo_matcher_window_size", windowSize, 256);
+ this->nhp_.param("stereo_matcher_num_scales", numScales, 5);
+ this->nhp_.param("stereo_matcher_error_threshold", errThresh, 20.);
+ this->nhp_.param("stereo_matcher_epipolar_threshold", epiThresh, 0.005);
+
+ unique_ptr stereo(new stereo::LKStereoMatcher(epiThresh, windowSize, numScales, errThresh));
+
+ stereoModule_.reset(new module::StereoModule(stereo));
+}
+
+void StereoEval::InitPublishers()
+{
+ string outputTopic;
+ this->nhp_.param("stereo_matched_image_topic", outputTopic, string("/omni_slam/stereo_matched"));
+ matchedImagePublisher_ = this->imageTransport_.advertise(outputTopic, 2);
+}
+
+void StereoEval::ProcessFrame(unique_ptr &&frame)
+{
+ this->trackingModule_->GetLandmarks().clear();
+ this->trackingModule_->Update(frame);
+ this->trackingModule_->Redetect();
+ stereoModule_->Update(*this->trackingModule_->GetFrames().back(), this->trackingModule_->GetLandmarks());
+}
+
+void StereoEval::GetResultsData(std::map>> &data)
+{
+ module::StereoModule::Stats &stats = stereoModule_->GetStats();
+}
+
+void StereoEval::Visualize(cv_bridge::CvImagePtr &base_img, cv_bridge::CvImagePtr &base_stereo_img)
+{
+ stereoModule_->Visualize(base_img->image, base_stereo_img->image);
+ matchedImagePublisher_.publish(base_img->toImageMsg());
+}
+
+}
+}
+
+
diff --git a/src/ros/stereo_eval.h b/src/ros/stereo_eval.h
new file mode 100644
index 0000000..7fc629f
--- /dev/null
+++ b/src/ros/stereo_eval.h
@@ -0,0 +1,38 @@
+#ifndef _STEREO_EVAL_H_
+#define _STEREO_EVAL_H_
+
+#include "tracking_eval.h"
+#include "module/stereo_module.h"
+
+#include
+#include
+#include
+
+namespace omni_slam
+{
+namespace ros
+{
+
+class StereoEval : public virtual TrackingEval
+{
+public:
+ StereoEval(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &nh_private);
+ StereoEval() : StereoEval(::ros::NodeHandle(), ::ros::NodeHandle("~")) {}
+
+protected:
+ virtual void InitPublishers();
+
+ virtual void ProcessFrame(std::unique_ptr &&frame);
+ virtual void GetResultsData(std::map>> &data);
+ virtual void Visualize(cv_bridge::CvImagePtr &base_img, cv_bridge::CvImagePtr &base_stereo_img);
+
+ std::unique_ptr stereoModule_;
+
+private:
+ image_transport::Publisher matchedImagePublisher_;
+};
+
+}
+}
+
+#endif /* _STEREO_EVAL_H_ */
diff --git a/src/ros/tracking_eval.cc b/src/ros/tracking_eval.cc
index be3fa32..dbb7976 100644
--- a/src/ros/tracking_eval.cc
+++ b/src/ros/tracking_eval.cc
@@ -11,8 +11,9 @@ namespace omni_slam
namespace ros
{
-TrackingEval::TrackingEval(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &nh_private)
- : EvalBase(nh, nh_private)
+template
+TrackingEval::TrackingEval(const ::ros::NodeHandle &nh, const ::ros::NodeHandle &nh_private)
+ : EvalBase(nh, nh_private)
{
string detectorType;
int trackerWindowSize;
@@ -24,15 +25,15 @@ TrackingEval::TrackingEval(const ::ros::NodeHandle &nh, const ::ros::NodeHandle
map detectorParams;
int minFeaturesRegion;
- nhp_.param("detector_type", detectorType, string("GFTT"));
- nhp_.param("tracker_window_size", trackerWindowSize, 128);
- nhp_.param("tracker_num_scales", trackerNumScales, 4);
- nhp_.param("tracker_checker_epipolar_threshold", fivePointThreshold, 0.01745240643);
- nhp_.param("tracker_checker_iterations", fivePointRansacIterations, 1000);
- nhp_.param("tracker_delta_pixel_error_threshold", trackerDeltaPixelErrorThresh, 5.0);
- nhp_.param("tracker_error_threshold", trackerErrorThresh, 20.);
- nhp_.param("min_features_per_region", minFeaturesRegion, 5);
- nhp_.getParam("detector_parameters", detectorParams);
+ this->nhp_.param("detector_type", detectorType, string("GFTT"));
+ this->nhp_.param("tracker_window_size", trackerWindowSize, 128);
+ this->nhp_.param("tracker_num_scales", trackerNumScales, 4);
+ this->nhp_.param("tracker_checker_epipolar_threshold", fivePointThreshold, 0.01745240643);
+ this->nhp_.param("tracker_checker_iterations", fivePointRansacIterations, 1000);
+ this->nhp_.param("tracker_delta_pixel_error_threshold", trackerDeltaPixelErrorThresh, 5.0);
+ this->nhp_.param("tracker_error_threshold", trackerErrorThresh, 20.);
+ this->nhp_.param("min_features_per_region", minFeaturesRegion, 5);
+ this->nhp_.getParam("detector_parameters", detectorParams);
unique_ptr detector;
if (feature::Detector::IsDetectorTypeValid(detectorType))
@@ -51,22 +52,25 @@ TrackingEval::TrackingEval(const ::ros::NodeHandle &nh, const ::ros::NodeHandle
trackingModule_.reset(new module::TrackingModule(detector, tracker, checker, minFeaturesRegion));
}
-void TrackingEval::InitPublishers()
+template
+void TrackingEval::InitPublishers()
{
- EvalBase::InitPublishers();
+ EvalBase::InitPublishers();
string outputTopic;
- nhp_.param("tracked_image_topic", outputTopic, string("/omni_slam/tracked"));
- trackedImagePublisher_ = imageTransport_.advertise(outputTopic, 2);
+ this->nhp_.param("tracked_image_topic", outputTopic, string("/omni_slam/tracked"));
+ trackedImagePublisher_ = this->imageTransport_.advertise(outputTopic, 2);
}
-void TrackingEval::ProcessFrame(unique_ptr &&frame)
+template