Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improving external feature interface and documentation #377

Merged
merged 11 commits into from
Apr 2, 2023
Merged
3 changes: 0 additions & 3 deletions .gitmodules
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,3 @@
[submodule "dependencies/3rdparty/libpointmatcher"]
path = dependencies/3rdparty/libpointmatcher
url = [email protected]:ANYbotics/libpointmatcher.git
[submodule "dependencies/internal/maplab_features"]
path = dependencies/internal/maplab_features
url = [email protected]:ethz-asl/maplab_features.git
3 changes: 2 additions & 1 deletion algorithms/feature-tracking/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@ cs_add_library(${PROJECT_NAME}
src/feature-detection-extraction.cc
src/feature-tracking-pipeline.cc
src/feature-tracking-types.cc
src/vo-feature-tracking-pipeline.cc)
src/vo-feature-tracking-pipeline.cc
src/vo-outlier-rejection-pipeline.cc)

##########
# GTESTS #
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,8 @@ class FeatureTrackingPipeline {

protected:
const std::string feature_tracking_ros_base_topic_;
const bool visualize_keypoint_detections_;
const bool visualize_keypoint_matches_;

private:
virtual void initialize(const aslam::NCamera::ConstPtr& ncamera) = 0;
virtual void trackFeaturesNFrame(
const aslam::Transformation& T_Bk_Bkp1, aslam::VisualNFrame* nframe_k,
aslam::VisualNFrame* nframe_kp1) = 0;
};

} // namespace feature_tracking
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,19 @@ struct FeatureTrackingDetectorSettings {
size_t gridded_detector_num_threads_per_image;
};

struct FeatureTrackingOutlierSettings {
FeatureTrackingOutlierSettings();

// Threshold for the 2-pt RANSAC outlier rejection.
double two_pt_ransac_threshold;
// Maximum number of iterations for the outlier rejection RANSAC.
size_t two_pt_ransac_max_iterations;
// If the outlier rejection should be deterministic, (i.e. fixed seed).
// Useful for doing experiments with repeatable results.
bool deterministic;
};


} // namespace feature_tracking

#endif // FEATURE_TRACKING_FEATURE_TRACKING_TYPES_H_
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ class VOFeatureTrackingPipeline : public FeatureTrackingPipeline {
VOFeatureTrackingPipeline(
const aslam::NCamera::ConstPtr& ncamera,
const FeatureTrackingExtractorSettings& extractor_settings,
const FeatureTrackingDetectorSettings& detector_settings);
const FeatureTrackingDetectorSettings& detector_settings,
const FeatureTrackingOutlierSettings& outlier_settings);
virtual ~VOFeatureTrackingPipeline();

void initializeFirstNFrame(aslam::VisualNFrame* nframe_k);
Expand All @@ -37,10 +38,10 @@ class VOFeatureTrackingPipeline : public FeatureTrackingPipeline {
aslam::FrameToFrameMatchesList* outlier_matches_kp1_k);

private:
void initialize(const aslam::NCamera::ConstPtr& ncamera) override;
void initialize(const aslam::NCamera::ConstPtr& ncamera);
void trackFeaturesNFrame(
const aslam::Transformation& T_Bk_Bkp1, aslam::VisualNFrame* nframe_k,
aslam::VisualNFrame* nframe_kp1) override;
aslam::VisualNFrame* nframe_kp1);

void trackFeaturesSingleCamera(
const aslam::Quaternion& q_Bkp1_Bk, const size_t camera_idx,
Expand All @@ -66,6 +67,7 @@ class VOFeatureTrackingPipeline : public FeatureTrackingPipeline {

const FeatureTrackingExtractorSettings extractor_settings_;
const FeatureTrackingDetectorSettings detector_settings_;
const FeatureTrackingOutlierSettings outlier_settings_;
};

} // namespace feature_tracking
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
#ifndef FEATURE_TRACKING_VO_OUTLIER_REJECTION_PIPELINE_H_
#define FEATURE_TRACKING_VO_OUTLIER_REJECTION_PIPELINE_H_

#include <sensors/external-features.h>
#include <unordered_set>

#include "feature-tracking/feature-tracking-pipeline.h"
#include "feature-tracking/feature-tracking-types.h"

namespace feature_tracking {

class VOOutlierRejectionPipeline : public FeatureTrackingPipeline {
public:
MAPLAB_POINTER_TYPEDEFS(VOOutlierRejectionPipeline);
MAPLAB_DISALLOW_EVIL_CONSTRUCTORS(VOOutlierRejectionPipeline);
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
VOOutlierRejectionPipeline(
const aslam::Camera::ConstPtr& camera, const int cam_idx,
const aslam::Quaternion& q_C_B, const vi_map::FeatureType feature_type,
const FeatureTrackingOutlierSettings& outlier_settings);
virtual ~VOOutlierRejectionPipeline();

void rejectMatchesFrame(
const aslam::Quaternion& q_Bkp1_Bk, aslam::VisualFrame* frame_kp1,
aslam::VisualFrame* frame_k);

void reset();

private:
aslam::Camera::ConstPtr camera_;
const int cam_idx_;
const aslam::Quaternion q_C_B_;
const int feature_type_;
const std::string feature_type_string_;

// Used to check that the frames being given are connected, otherwise
// we have to reset the outlier and inlier information we maintain.
bool initialized_;
int64_t k_frame_timestamp_;

// We remember which tracks were marked as outliers in the previous frame
// so we can continue eliminating them. This is necessary because we can't
// communicate with the external feature tracker on what was discarded.
// If a track stops appearing in a frame we no longer need to remember it
// as an outlier, since it means the tracker has stopped tracking it. This
// assumes matches are always only between two consecutive frames, but saves
// a lot of memory and computation at runtime.
std::unordered_set<int> k_outlier_track_ids;

// We remember which tracks were valid in the previous frame. This is so
// that if we remove a current outlier match, we know if the observation
// in the previous frame has a track length of one and can be deleted.
std::unordered_set<int> k_valid_track_ids_;

const FeatureTrackingOutlierSettings outlier_settings_;
};

} // namespace feature_tracking

#endif // FEATURE_TRACKING_VO_OUTLIER_REJECTION_PIPELINE_H_
7 changes: 6 additions & 1 deletion algorithms/feature-tracking/src/feature-tracking-pipeline.cc
Original file line number Diff line number Diff line change
@@ -1,13 +1,18 @@
#include "feature-tracking/feature-tracking-pipeline.h"

DEFINE_bool(
feature_tracker_visualize_keypoint_detections, false,
"Visualize the raw keypoint detections to a ros topic.");
DEFINE_bool(
feature_tracker_visualize_keypoint_matches, false,
"Flag indicating whether keypoint matches are visualized.");
"Visualize the keypoint matches and the outliers.");

namespace feature_tracking {

FeatureTrackingPipeline::FeatureTrackingPipeline()
: feature_tracking_ros_base_topic_("tracking/"),
visualize_keypoint_detections_(
FLAGS_feature_tracker_visualize_keypoint_detections),
visualize_keypoint_matches_(
FLAGS_feature_tracker_visualize_keypoint_matches) {}

Expand Down
27 changes: 25 additions & 2 deletions algorithms/feature-tracking/src/feature-tracking-types.cc
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
#include "feature-tracking/feature-tracking-types.h"

#include <gflags/gflags.h>
#include <glog/logging.h>
#include <maplab-common/conversions.h>
#include <opencv2/features2d/features2d.hpp>

#include "feature-tracking/feature-tracking-types.h"

DEFINE_string(
feature_tracking_descriptor_type, "brisk",
"Descriptor type to compute: 'brisk' or 'freak'");
Expand Down Expand Up @@ -95,6 +96,19 @@ DEFINE_uint64(
feature_tracking_gridded_detection_num_threads_per_image, 0u,
"Number of hardware threads used for detection (0 means N_cell/2).");

DEFINE_double(
feature_tracker_two_pt_ransac_threshold, 1.0 - cos(0.5 * kDegToRad),
"Threshold for the 2-pt RANSAC used for feature tracking outlier "
"removal. The error is defined as (1 - cos(alpha)) where alpha is "
"the angle between the predicted and measured bearing vectors.");
DEFINE_uint64(
feature_tracker_two_pt_ransac_max_iterations, 200,
"Max iterations for the 2-pt RANSAC used for feature tracking "
"outlier removal.");
DEFINE_bool(
feature_tracker_deterministic, false,
"If true, deterministic RANSAC outlier rejection is used.");

namespace feature_tracking {

FeatureTrackingExtractorSettings::FeatureTrackingExtractorSettings()
Expand Down Expand Up @@ -181,4 +195,13 @@ FeatureTrackingDetectorSettings::FeatureTrackingDetectorSettings()
CHECK_GE(gridded_detector_num_grid_rows, 1u);
}

FeatureTrackingOutlierSettings::FeatureTrackingOutlierSettings()
: two_pt_ransac_threshold(FLAGS_feature_tracker_two_pt_ransac_threshold),
two_pt_ransac_max_iterations(
FLAGS_feature_tracker_two_pt_ransac_max_iterations),
deterministic(FLAGS_feature_tracker_deterministic) {
CHECK_GE(two_pt_ransac_threshold, 0.0);
CHECK_LE(two_pt_ransac_threshold, 1.0);
}

} // namespace feature_tracking
83 changes: 27 additions & 56 deletions algorithms/feature-tracking/src/vo-feature-tracking-pipeline.cc
Original file line number Diff line number Diff line change
Expand Up @@ -4,39 +4,22 @@
#include <aslam/tracker/feature-tracker-gyro.h>
#include <aslam/tracker/feature-tracker.h>
#include <aslam/visualization/basic-visualization.h>
#include <maplab-common/conversions.h>
#include <sensors/external-features.h>
#include <visualization/common-rviz-visualization.h>

#include "feature-tracking/vo-feature-tracking-pipeline.h"

DEFINE_double(
feature_tracker_two_pt_ransac_threshold, 1.0 - cos(0.5 * kDegToRad),
"Threshold for the 2-pt RANSAC used for feature tracking outlier "
"removal. The error is defined as (1 - cos(alpha)) where alpha is "
"the angle between the predicted and measured bearing vectors.");

DEFINE_double(
feature_tracker_two_pt_ransac_max_iterations, 200,
"Max iterations for the 2-pt RANSAC used for feature tracking "
"outlier removal.");

DEFINE_bool(
feature_tracker_deterministic, false,
"If true, deterministic RANSAC outlier rejection is used.");
DEFINE_bool(
detection_visualize_keypoints, false,
"Visualize the raw keypoint detections to a ros topic.");

namespace feature_tracking {

VOFeatureTrackingPipeline::VOFeatureTrackingPipeline(
const aslam::NCamera::ConstPtr& ncamera,
const FeatureTrackingExtractorSettings& extractor_settings,
const FeatureTrackingDetectorSettings& detector_settings)
const FeatureTrackingDetectorSettings& detector_settings,
const FeatureTrackingOutlierSettings& outlier_settings)
: first_nframe_initialized_(false),
extractor_settings_(extractor_settings),
detector_settings_(detector_settings) {
detector_settings_(detector_settings),
outlier_settings_(outlier_settings) {
initialize(ncamera);
}

Expand Down Expand Up @@ -141,21 +124,19 @@ void VOFeatureTrackingPipeline::trackFeaturesSingleCamera(
// Initialize keypoints and descriptors in frame_kp1
detectors_extractors_[camera_idx]->detectAndExtractFeatures(frame_kp1);

if (FLAGS_detection_visualize_keypoints) {
cv::Mat image;
cv::cvtColor(frame_kp1->getRawImage(), image, cv::COLOR_GRAY2BGR);
// The default detector / tracker with always insert descriptors of type
// kBinary = 0 for both BRISK and FREAK
constexpr int descriptor_type =
static_cast<int>(vi_map::FeatureType::kBinary);

aslam_cv_visualization::drawKeypoints(*CHECK_NOTNULL(frame_kp1), &image);
if (visualize_keypoint_detections_) {
cv::Mat image;
aslam_cv_visualization::drawKeypoints(*frame_kp1, &image, descriptor_type);
const std::string topic = feature_tracking_ros_base_topic_ +
"/keypoints_raw_cam" + std::to_string(camera_idx);
visualization::RVizVisualizationSink::publish(topic, image);
}

// The default detector / tracker with always insert descriptors of type
// kBinary = 0 for both BRISK and FREAK
constexpr int descriptor_type =
static_cast<int>(vi_map::FeatureType::kBinary);

CHECK(frame_k->hasKeypointMeasurements());
CHECK(frame_k->hasDescriptors());
CHECK(frame_k->hasDescriptorType(descriptor_type));
Expand All @@ -172,31 +153,17 @@ void VOFeatureTrackingPipeline::trackFeaturesSingleCamera(
aslam::FrameToFrameMatches matches_kp1_k;
trackers_[camera_idx]->track(q_Ckp1_Ck, *frame_k, frame_kp1, &matches_kp1_k);

// The tracker will return the indices with respect to the tracked feature
// block, so here we renormalize them so that the rest of the code can deal
// with them agnostically, since the descriptors are no longer needed.
size_t start_k, size_k, start_kp1, size_kp1;
frame_k->getDescriptorBlockTypeStartAndSize(
descriptor_type, &start_k, &size_k);
frame_kp1->getDescriptorBlockTypeStartAndSize(
descriptor_type, &start_kp1, &size_kp1);

for (aslam::FrameToFrameMatch& match_kp1_k : matches_kp1_k) {
match_kp1_k.setKeypointIndexInFrameA(match_kp1_k.getKeypointIndexInFrameA() + start_kp1);
match_kp1_k.setKeypointIndexInFrameB(match_kp1_k.getKeypointIndexInFrameB() + start_k);
}

// Remove outlier matches.
statistics::StatsCollector stat_ransac("Twopt RANSAC (1 image) in ms");
timing::Timer timer_ransac(
"VOFeatureTrackingPipeline: trackFeaturesSingleCamera - ransac");
bool ransac_success = aslam::geometric_vision::
rejectOutlierFeatureMatchesTranslationRotationSAC(
*frame_kp1, *frame_k, q_Ckp1_Ck, matches_kp1_k,
FLAGS_feature_tracker_deterministic,
FLAGS_feature_tracker_two_pt_ransac_threshold,
FLAGS_feature_tracker_two_pt_ransac_max_iterations,
inlier_matches_kp1_k, outlier_matches_kp1_k);
*frame_kp1, *frame_k, q_Ckp1_Ck, descriptor_type, matches_kp1_k,
outlier_settings_.deterministic,
outlier_settings_.two_pt_ransac_threshold,
outlier_settings_.two_pt_ransac_max_iterations, inlier_matches_kp1_k,
outlier_matches_kp1_k);

LOG_IF(WARNING, !ransac_success)
<< "Match outlier rejection RANSAC failed on camera " << camera_idx
Expand All @@ -207,25 +174,29 @@ void VOFeatureTrackingPipeline::trackFeaturesSingleCamera(
<< " matches on camera " << camera_idx << ".";

// Assign track ids.
// TODO(smauq): See about clearning this one up, maybe even move this function
// up from aslam into here.
timing::Timer timer_track_manager(
"VOFeatureTrackingPipeline: trackFeaturesSingleCamera - track manager");
track_managers_[camera_idx]->applyMatchesToFrames(
*inlier_matches_kp1_k, frame_kp1, frame_k);

if (visualize_keypoint_matches_) {
cv::Mat image;
aslam_cv_visualization::visualizeMatches(
*frame_kp1, *frame_k, *inlier_matches_kp1_k, &image);
cv::Mat inlier_image;
aslam_cv_visualization::drawKeypointMatches(
*frame_kp1, *frame_k, *inlier_matches_kp1_k, descriptor_type,
&inlier_image);
const std::string topic = feature_tracking_ros_base_topic_ +
"/keypoint_matches_camera_" +
std::to_string(camera_idx);
visualization::RVizVisualizationSink::publish(topic, image);
visualization::RVizVisualizationSink::publish(topic, inlier_image);

cv::Mat outlier_image;
aslam_cv_visualization::visualizeMatches(
*frame_kp1, *frame_k, *outlier_matches_kp1_k, &outlier_image);
aslam_cv_visualization::drawKeypointMatches(
*frame_kp1, *frame_k, *outlier_matches_kp1_k, descriptor_type,
&outlier_image);
const std::string outlier_topic = feature_tracking_ros_base_topic_ +
"/keypoint_outlier_matches_camera_" +
"/keypoint_outliers_camera_" +
std::to_string(camera_idx);
visualization::RVizVisualizationSink::publish(outlier_topic, outlier_image);
}
Expand Down
Loading