1/* ----------------------------------------------------------------------------
2
3 * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4 * Atlanta, Georgia 30332-0415
5 * All Rights Reserved
6 * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7
8 * See LICENSE for the license information
9
10 * -------------------------------------------------------------------------- */
11
12/**
13 * @file GncOptimizer.h
14 * @brief The GncOptimizer class
15 * @author Jingnan Shi
16 * @author Luca Carlone
17 * @author Frank Dellaert
18 *
19 * Implementation of the paper: Yang, Antonante, Tzoumas, Carlone, "Graduated Non-Convexity for Robust Spatial Perception:
20 * From Non-Minimal Solvers to Global Outlier Rejection", ICRA/RAL, 2020. (arxiv version: https://arxiv.org/pdf/1909.08605.pdf)
21 *
22 * See also:
23 * Antonante, Tzoumas, Yang, Carlone, "Outlier-Robust Estimation: Hardness, Minimally-Tuned Algorithms, and Applications",
24 * arxiv: https://arxiv.org/pdf/2007.15109.pdf, 2020.
25 */
26
27#pragma once
28
29#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
30#include <gtsam/nonlinear/GaussNewtonOptimizer.h>
31
32namespace gtsam {
33
34/* ************************************************************************* */
35/// Choice of robust loss function for GNC.
36enum GncLossType {
37 GM /*Geman McClure*/,
38 TLS /*Truncated least squares*/
39};
40
41template<class BaseOptimizerParameters>
42class GncParams {
43 public:
44 /// For each parameter, specify the corresponding optimizer: e.g., GaussNewtonParams -> GaussNewtonOptimizer.
45 typedef typename BaseOptimizerParameters::OptimizerType OptimizerType;
46
47 /// Verbosity levels
48 enum Verbosity {
49 SILENT = 0,
50 SUMMARY,
51 MU,
52 WEIGHTS,
53 VALUES
54 };
55
56 /// Constructor.
57 GncParams(const BaseOptimizerParameters& baseOptimizerParams)
58 : baseOptimizerParams(baseOptimizerParams) {
59 }
60
61 /// Default constructor.
62 GncParams()
63 : baseOptimizerParams() {
64 }
65
66 /// GNC parameters.
67 BaseOptimizerParameters baseOptimizerParams; ///< Optimization parameters used to solve the weighted least squares problem at each GNC iteration
68 /// any other specific GNC parameters:
69 GncLossType lossType = TLS; ///< Default loss
70 size_t maxIterations = 100; ///< Maximum number of iterations
71 double muStep = 1.4; ///< Multiplicative factor to reduce/increase the mu in gnc
72 double relativeCostTol = 1e-5; ///< If relative cost change is below this threshold, stop iterating
73 double weightsTol = 1e-4; ///< If the weights are within weightsTol from being binary, stop iterating (only for TLS)
74 Verbosity verbosity = SILENT; ///< Verbosity level
75
76 /// Use IndexVector for inliers and outliers since it is fast
77 using IndexVector = FastVector<uint64_t>;
78 ///< Slots in the factor graph corresponding to measurements that we know are inliers
79 IndexVector knownInliers;
80 ///< Slots in the factor graph corresponding to measurements that we know are outliers
81 IndexVector knownOutliers;
82
83 /// Set the robust loss function to be used in GNC (chosen among the ones in GncLossType).
84 void setLossType(const GncLossType type) {
85 lossType = type;
86 }
87
88 /// Set the maximum number of iterations in GNC (changing the max nr of iters might lead to less accurate solutions and is not recommended).
89 void setMaxIterations(const size_t maxIter) {
90 std::cout
91 << "setMaxIterations: changing the max nr of iters might lead to less accurate solutions and is not recommended! "
92 << std::endl;
93 maxIterations = maxIter;
94 }
95
96 /// Set the graduated non-convexity step: at each GNC iteration, mu is updated as mu <- mu * muStep.
97 void setMuStep(const double step) {
98 muStep = step;
99 }
100
101 /// Set the maximum relative difference in mu values to stop iterating.
102 void setRelativeCostTol(double value) {
103 relativeCostTol = value;
104 }
105
106 /// Set the maximum difference between the weights and their rounding in {0,1} to stop iterating.
107 void setWeightsTol(double value) {
108 weightsTol = value;
109 }
110
111 /// Set the verbosity level.
112 void setVerbosityGNC(const Verbosity value) {
113 verbosity = value;
114 }
115
116 /** (Optional) Provide a vector of measurements that must be considered inliers. The enties in the vector
117 * corresponds to the slots in the factor graph. For instance, if you have a nonlinear factor graph nfg,
118 * and you provide knownIn = {0, 2, 15}, GNC will not apply outlier rejection to nfg[0], nfg[2], and nfg[15].
119 * This functionality is commonly used in SLAM when one may assume the odometry is outlier free, and
120 * only apply GNC to prune outliers from the loop closures.
121 * */
122 void setKnownInliers(const IndexVector& knownIn) {
123 for (size_t i = 0; i < knownIn.size(); i++){
124 knownInliers.push_back(x: knownIn[i]);
125 }
126 std::sort(first: knownInliers.begin(), last: knownInliers.end());
127 }
128
129 /** (Optional) Provide a vector of measurements that must be considered outliers. The enties in the vector
130 * corresponds to the slots in the factor graph. For instance, if you have a nonlinear factor graph nfg,
131 * and you provide knownOut = {0, 2, 15}, GNC will not apply outlier rejection to nfg[0], nfg[2], and nfg[15].
132 * */
133 void setKnownOutliers(const IndexVector& knownOut) {
134 for (size_t i = 0; i < knownOut.size(); i++){
135 knownOutliers.push_back(x: knownOut[i]);
136 }
137 std::sort(first: knownOutliers.begin(), last: knownOutliers.end());
138 }
139
140 /// Equals.
141 bool equals(const GncParams& other, double tol = 1e-9) const {
142 return baseOptimizerParams.equals(other.baseOptimizerParams)
143 && lossType == other.lossType && maxIterations == other.maxIterations
144 && std::fabs(muStep - other.muStep) <= tol
145 && verbosity == other.verbosity && knownInliers == other.knownInliers
146 && knownOutliers == other.knownOutliers;
147 }
148
149 /// Print.
150 void print(const std::string& str) const {
151 std::cout << str << "\n";
152 switch (lossType) {
153 case GM:
154 std::cout << "lossType: Geman McClure" << "\n";
155 break;
156 case TLS:
157 std::cout << "lossType: Truncated Least-squares" << "\n";
158 break;
159 default:
160 throw std::runtime_error("GncParams::print: unknown loss type.");
161 }
162 std::cout << "maxIterations: " << maxIterations << "\n";
163 std::cout << "muStep: " << muStep << "\n";
164 std::cout << "relativeCostTol: " << relativeCostTol << "\n";
165 std::cout << "weightsTol: " << weightsTol << "\n";
166 std::cout << "verbosity: " << verbosity << "\n";
167 for (size_t i = 0; i < knownInliers.size(); i++)
168 std::cout << "knownInliers: " << knownInliers[i] << "\n";
169 for (size_t i = 0; i < knownOutliers.size(); i++)
170 std::cout << "knownOutliers: " << knownOutliers[i] << "\n";
171 baseOptimizerParams.print("Base optimizer params: ");
172 }
173};
174
175}
176