1/* ----------------------------------------------------------------------------
2
3 * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4 * Atlanta, Georgia 30332-0415
5 * All Rights Reserved
6 * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7
8 * See LICENSE for the license information
9
10 * -------------------------------------------------------------------------- */
11
12/**
13 * @file BetweenFactorEM.h
14 * @author Vadim Indelman
15 **/
16#pragma once
17
18#include <ostream>
19
20#include <gtsam/base/Testable.h>
21#include <gtsam/base/Lie.h>
22#include <gtsam/nonlinear/NonlinearFactor.h>
23#include <gtsam/linear/GaussianFactor.h>
24#include <gtsam/nonlinear/Marginals.h>
25
26namespace gtsam {
27
28/**
29 * A class for a measurement predicted by "between(config[key1],config[key2])"
30 * @tparam VALUE the Value type
31 * @ingroup slam
32 */
33template<class VALUE>
34class BetweenFactorEM: public NonlinearFactor {
35
36public:
37
38 typedef VALUE T;
39
40private:
41
42 typedef BetweenFactorEM<VALUE> This;
43 typedef NonlinearFactor Base;
44
45 Key key1_;
46 Key key2_;
47
48 VALUE measured_; /** The measurement */
49
50 SharedGaussian model_inlier_;
51 SharedGaussian model_outlier_;
52
53 double prior_inlier_;
54 double prior_outlier_;
55
56 bool flag_bump_up_near_zero_probs_;
57
58 /** concept check by type */
59 GTSAM_CONCEPT_LIE_TYPE(T)
60 GTSAM_CONCEPT_TESTABLE_TYPE(T)
61
62public:
63
64 // shorthand for a smart pointer to a factor
65 typedef typename std::shared_ptr<BetweenFactorEM> shared_ptr;
66
67 /** default constructor - only use for serialization */
68 BetweenFactorEM() {
69 }
70
71 /** Constructor */
72 BetweenFactorEM(Key key1, Key key2, const VALUE& measured,
73 const SharedGaussian& model_inlier, const SharedGaussian& model_outlier,
74 const double prior_inlier, const double prior_outlier,
75 const bool flag_bump_up_near_zero_probs = false) :
76 Base(KeyVector{key1, key2}), key1_(key1), key2_(key2), measured_(
77 measured), model_inlier_(model_inlier), model_outlier_(model_outlier), prior_inlier_(
78 prior_inlier), prior_outlier_(prior_outlier), flag_bump_up_near_zero_probs_(
79 flag_bump_up_near_zero_probs) {
80 }
81
82 ~BetweenFactorEM() override {
83 }
84
85 /** implement functions needed for Testable */
86
87 /** print */
88 void print(const std::string& s, const KeyFormatter& keyFormatter =
89 DefaultKeyFormatter) const override {
90 std::cout << s << "BetweenFactorEM(" << keyFormatter(key1_) << ","
91 << keyFormatter(key2_) << ")\n";
92 measured_.print(" measured: ");
93 model_inlier_->print(name: " noise model inlier: ");
94 model_outlier_->print(name: " noise model outlier: ");
95 std::cout << "(prior_inlier, prior_outlier_) = (" << prior_inlier_ << ","
96 << prior_outlier_ << ")\n";
97 // Base::print(s, keyFormatter);
98 }
99
100 /** equals */
101 bool equals(const NonlinearFactor& f, double tol = 1e-9) const override {
102 const This *t = dynamic_cast<const This*>(&f);
103
104 if (t && Base::equals(f))
105 return key1_ == t->key1_ && key2_ == t->key2_
106 &&
107 // model_inlier_->equals(t->model_inlier_ ) && // TODO: fix here
108 // model_outlier_->equals(t->model_outlier_ ) &&
109 prior_outlier_ == t->prior_outlier_
110 && prior_inlier_ == t->prior_inlier_ && measured_.equals(t->measured_);
111 else
112 return false;
113 }
114
115 /** implement functions needed to derive from Factor */
116
117 /* ************************************************************************* */
118 double error(const Values &x) const override {
119 return whitenedError(x).squaredNorm();
120 }
121
122 /* ************************************************************************* */
123 /**
124 * Linearize a non-linearFactorN to get a GaussianFactor,
125 * \f$ Ax-b \approx h(x+\delta x)-z = h(x) + A \delta x - z \f$
126 * Hence \f$ b = z - h(x) = - \mathtt{error\_vector}(x) \f$
127 */
128 /* This version of linearize recalculates the noise model each time */
129 std::shared_ptr<GaussianFactor> linearize(const Values &x) const override {
130 // Only linearize if the factor is active
131 if (!this->active(x))
132 return std::shared_ptr<JacobianFactor>();
133
134 //std::cout<<"About to linearize"<<std::endl;
135 Matrix A1, A2;
136 std::vector<Matrix> A(this->size());
137 Vector b = -whitenedError(x, A);
138 A1 = A[0];
139 A2 = A[1];
140
141 return GaussianFactor::shared_ptr(
142 new JacobianFactor(key1_, A1, key2_, A2, b,
143 noiseModel::Unit::Create(dim: b.size())));
144 }
145
146 /* ************************************************************************* */
147 Vector whitenedError(const Values& x,
148 OptionalMatrixVecType H = nullptr) const {
149
150 bool debug = true;
151
152 const T& p1 = x.at<T>(key1_);
153 const T& p2 = x.at<T>(key2_);
154
155 Matrix H1, H2;
156
157 T hx = p1.between(p2, H1, H2); // h(x)
158 // manifold equivalent of h(x)-z -> log(z,h(x))
159
160 Vector err = measured_.localCoordinates(hx);
161
162 // Calculate indicator probabilities (inlier and outlier)
163 Vector p_inlier_outlier = calcIndicatorProb(x);
164 double p_inlier = p_inlier_outlier[0];
165 double p_outlier = p_inlier_outlier[1];
166
167 Vector err_wh_inlier = model_inlier_->whiten(v: err);
168 Vector err_wh_outlier = model_outlier_->whiten(v: err);
169
170 Matrix invCov_inlier = model_inlier_->R().transpose() * model_inlier_->R();
171 Matrix invCov_outlier = model_outlier_->R().transpose()
172 * model_outlier_->R();
173
174 Vector err_wh_eq;
175 err_wh_eq.resize(size: err_wh_inlier.rows() * 2);
176 err_wh_eq << sqrt(x: p_inlier) * err_wh_inlier.array(), sqrt(x: p_outlier)
177 * err_wh_outlier.array();
178
179 if (H) {
180 // stack Jacobians for the two indicators for each of the key
181
182 Matrix H1_inlier = sqrt(x: p_inlier) * model_inlier_->Whiten(H: H1);
183 Matrix H1_outlier = sqrt(x: p_outlier) * model_outlier_->Whiten(H: H1);
184 Matrix H1_aug = stack(nrMatrices: 2, &H1_inlier, &H1_outlier);
185
186 Matrix H2_inlier = sqrt(x: p_inlier) * model_inlier_->Whiten(H: H2);
187 Matrix H2_outlier = sqrt(x: p_outlier) * model_outlier_->Whiten(H: H2);
188 Matrix H2_aug = stack(nrMatrices: 2, &H2_inlier, &H2_outlier);
189
190 (*H)[0].resize(rows: H1_aug.rows(), cols: H1_aug.cols());
191 (*H)[1].resize(rows: H2_aug.rows(), cols: H2_aug.cols());
192
193 (*H)[0] = H1_aug;
194 (*H)[1] = H2_aug;
195 }
196
197 if (debug) {
198 // std::cout<<"unwhitened error: "<<err[0]<<" "<<err[1]<<" "<<err[2]<<std::endl;
199 // std::cout<<"err_wh_inlier: "<<err_wh_inlier[0]<<" "<<err_wh_inlier[1]<<" "<<err_wh_inlier[2]<<std::endl;
200 // std::cout<<"err_wh_outlier: "<<err_wh_outlier[0]<<" "<<err_wh_outlier[1]<<" "<<err_wh_outlier[2]<<std::endl;
201 //
202 // std::cout<<"p_inlier, p_outlier, sumP: "<<p_inlier<<" "<<p_outlier<<" " << sumP << std::endl;
203 //
204 // std::cout<<"prior_inlier_, prior_outlier_: "<<prior_inlier_<<" "<<prior_outlier_<< std::endl;
205 //
206 // double s_inl = -0.5 * err_wh_inlier.dot(err_wh_inlier);
207 // double s_outl = -0.5 * err_wh_outlier.dot(err_wh_outlier);
208 // std::cout<<"s_inl, s_outl: "<<s_inl<<" "<<s_outl<<std::endl;
209 //
210 // std::cout<<"norm of invCov_inlier, invCov_outlier: "<<invCov_inlier.norm()<<" "<<invCov_outlier.norm()<<std::endl;
211 // double q_inl = invCov_inlier.norm() * exp( -0.5 * err_wh_inlier.dot(err_wh_inlier) );
212 // double q_outl = invCov_outlier.norm() * exp( -0.5 * err_wh_outlier.dot(err_wh_outlier) );
213 // std::cout<<"q_inl, q_outl: "<<q_inl<<" "<<q_outl<<std::endl;
214
215 // Matrix Cov_inlier = invCov_inlier.inverse();
216 // Matrix Cov_outlier = invCov_outlier.inverse();
217 // std::cout<<"Cov_inlier: "<<std::endl<<
218 // Cov_inlier(0,0) << " " << Cov_inlier(0,1) << " " << Cov_inlier(0,2) <<std::endl<<
219 // Cov_inlier(1,0) << " " << Cov_inlier(1,1) << " " << Cov_inlier(1,2) <<std::endl<<
220 // Cov_inlier(2,0) << " " << Cov_inlier(2,1) << " " << Cov_inlier(2,2) <<std::endl;
221 // std::cout<<"Cov_outlier: "<<std::endl<<
222 // Cov_outlier(0,0) << " " << Cov_outlier(0,1) << " " << Cov_outlier(0,2) <<std::endl<<
223 // Cov_outlier(1,0) << " " << Cov_outlier(1,1) << " " << Cov_outlier(1,2) <<std::endl<<
224 // Cov_outlier(2,0) << " " << Cov_outlier(2,1) << " " << Cov_outlier(2,2) <<std::endl;
225 // std::cout<<"===="<<std::endl;
226 }
227
228 return err_wh_eq;
229 }
230
231 // A function overload that takes a vector of matrices and passes it to the
232 // function above which uses a pointer to a vector instead.
233 Vector whitenedError(const Values& x, std::vector<Matrix>& H) const {
234 return whitenedError(x, &H);
235 }
236
237 /* ************************************************************************* */
238 Vector calcIndicatorProb(const Values& x) const {
239
240 bool debug = false;
241
242 Vector err = unwhitenedError(x);
243
244 // Calculate indicator probabilities (inlier and outlier)
245 Vector err_wh_inlier = model_inlier_->whiten(v: err);
246 Vector err_wh_outlier = model_outlier_->whiten(v: err);
247
248 Matrix invCov_inlier = model_inlier_->R().transpose() * model_inlier_->R();
249 Matrix invCov_outlier = model_outlier_->R().transpose()
250 * model_outlier_->R();
251
252 double p_inlier = prior_inlier_ * std::sqrt(x: invCov_inlier.determinant())
253 * exp(x: -0.5 * err_wh_inlier.dot(other: err_wh_inlier));
254 double p_outlier = prior_outlier_ * std::sqrt(x: invCov_outlier.determinant())
255 * exp(x: -0.5 * err_wh_outlier.dot(other: err_wh_outlier));
256
257 if (debug) {
258 std::cout << "in calcIndicatorProb. err_unwh: " << err[0] << ", "
259 << err[1] << ", " << err[2] << std::endl;
260 std::cout << "in calcIndicatorProb. err_wh_inlier: " << err_wh_inlier[0]
261 << ", " << err_wh_inlier[1] << ", " << err_wh_inlier[2] << std::endl;
262 std::cout << "in calcIndicatorProb. err_wh_inlier.dot(err_wh_inlier): "
263 << err_wh_inlier.dot(other: err_wh_inlier) << std::endl;
264 std::cout << "in calcIndicatorProb. err_wh_outlier.dot(err_wh_outlier): "
265 << err_wh_outlier.dot(other: err_wh_outlier) << std::endl;
266
267 std::cout
268 << "in calcIndicatorProb. p_inlier, p_outlier before normalization: "
269 << p_inlier << ", " << p_outlier << std::endl;
270 }
271
272 double sumP = p_inlier + p_outlier;
273 p_inlier /= sumP;
274 p_outlier /= sumP;
275
276 if (flag_bump_up_near_zero_probs_) {
277 // Bump up near-zero probabilities (as in linerFlow.h)
278 double minP = 0.05; // == 0.1 / 2 indicator variables
279 if (p_inlier < minP || p_outlier < minP) {
280 if (p_inlier < minP)
281 p_inlier = minP;
282 if (p_outlier < minP)
283 p_outlier = minP;
284 sumP = p_inlier + p_outlier;
285 p_inlier /= sumP;
286 p_outlier /= sumP;
287 }
288 }
289
290 return (Vector(2) << p_inlier, p_outlier).finished();
291 }
292
293 /* ************************************************************************* */
294 Vector unwhitenedError(const Values& x) const {
295
296 const T& p1 = x.at<T>(key1_);
297 const T& p2 = x.at<T>(key2_);
298
299 Matrix H1, H2;
300
301 T hx = p1.between(p2, H1, H2); // h(x)
302
303 return measured_.localCoordinates(hx);
304 }
305
306 /* ************************************************************************* */
307 void set_flag_bump_up_near_zero_probs(bool flag) {
308 flag_bump_up_near_zero_probs_ = flag;
309 }
310
311 /* ************************************************************************* */
312 bool get_flag_bump_up_near_zero_probs() const {
313 return flag_bump_up_near_zero_probs_;
314 }
315
316 /* ************************************************************************* */
317 SharedGaussian get_model_inlier() const {
318 return model_inlier_;
319 }
320
321 /* ************************************************************************* */
322 SharedGaussian get_model_outlier() const {
323 return model_outlier_;
324 }
325
326 /* ************************************************************************* */
327 Matrix get_model_inlier_cov() const {
328 return (model_inlier_->R().transpose() * model_inlier_->R()).inverse();
329 }
330
331 /* ************************************************************************* */
332 Matrix get_model_outlier_cov() const {
333 return (model_outlier_->R().transpose() * model_outlier_->R()).inverse();
334 }
335
336 /* ************************************************************************* */
337 void updateNoiseModels(const Values& values,
338 const NonlinearFactorGraph& graph) {
339 /* Update model_inlier_ and model_outlier_ to account for uncertainty in robot trajectories
340 * (note these are given in the E step, where indicator probabilities are calculated).
341 *
342 * Principle: R += [H1 H2] * joint_cov12 * [H1 H2]', where H1, H2 are Jacobians of the
343 * unwhitened error w.r.t. states, and R is the measurement covariance (inlier or outlier modes).
344 *
345 * TODO: improve efficiency (info form)
346 */
347
348 // get joint covariance of the involved states
349 KeyVector Keys;
350 Keys.push_back(x: key1_);
351 Keys.push_back(x: key2_);
352 Marginals marginals(graph, values, Marginals::QR);
353 JointMarginal joint_marginal12 = marginals.jointMarginalCovariance(variables: Keys);
354 Matrix cov1 = joint_marginal12(key1_, key1_);
355 Matrix cov2 = joint_marginal12(key2_, key2_);
356 Matrix cov12 = joint_marginal12(key1_, key2_);
357
358 updateNoiseModels_givenCovs(values, cov1, cov2, cov12);
359 }
360
361 /* ************************************************************************* */
362 void updateNoiseModels_givenCovs(const Values& values,
363 const Matrix& cov1, const Matrix& cov2, const Matrix& cov12) {
364 /* Update model_inlier_ and model_outlier_ to account for uncertainty in robot trajectories
365 * (note these are given in the E step, where indicator probabilities are calculated).
366 *
367 * Principle: R += [H1 H2] * joint_cov12 * [H1 H2]', where H1, H2 are Jacobians of the
368 * unwhitened error w.r.t. states, and R is the measurement covariance (inlier or outlier modes).
369 *
370 * TODO: improve efficiency (info form)
371 */
372
373 const T& p1 = values.at<T>(key1_);
374 const T& p2 = values.at<T>(key2_);
375
376 Matrix H1, H2;
377 p1.between(p2, H1, H2); // h(x)
378
379 Matrix H;
380 H.resize(rows: H1.rows(), cols: H1.rows() + H2.rows());
381 H << H1, H2; // H = [H1 H2]
382
383 Matrix joint_cov;
384 joint_cov.resize(rows: cov1.rows() + cov2.rows(), cols: cov1.cols() + cov2.cols());
385 joint_cov << cov1, cov12, cov12.transpose(), cov2;
386
387 Matrix cov_state = H * joint_cov * H.transpose();
388
389 // model_inlier_->print("before:");
390
391 // update inlier and outlier noise models
392 Matrix covRinlier =
393 (model_inlier_->R().transpose() * model_inlier_->R()).inverse();
394 model_inlier_ = noiseModel::Gaussian::Covariance(
395 covariance: covRinlier + cov_state);
396
397 Matrix covRoutlier =
398 (model_outlier_->R().transpose() * model_outlier_->R()).inverse();
399 model_outlier_ = noiseModel::Gaussian::Covariance(
400 covariance: covRoutlier + cov_state);
401
402 // model_inlier_->print("after:");
403 // std::cout<<"covRinlier + cov_state: "<<covRinlier + cov_state<<std::endl;
404 }
405
406 /* ************************************************************************* */
407 /** return the measured */
408 const VALUE& measured() const {
409 return measured_;
410 }
411
412 size_t dim() const override {
413 return model_inlier_->R().rows() + model_inlier_->R().cols();
414 }
415
416private:
417
418#if GTSAM_ENABLE_BOOST_SERIALIZATION
419 /** Serialization function */
420 friend class boost::serialization::access;
421 template<class ARCHIVE>
422 void serialize(ARCHIVE & ar, const unsigned int /*version*/) {
423 ar
424 & boost::serialization::make_nvp("NonlinearFactor",
425 boost::serialization::base_object<Base>(*this));
426 ar & BOOST_SERIALIZATION_NVP(measured_);
427 }
428#endif
429};
430// \class BetweenFactorEM
431
432/// traits
433template<class VALUE>
434struct traits<BetweenFactorEM<VALUE> > : public Testable<BetweenFactorEM<VALUE> > {};
435
436} // namespace gtsam
437