1/* ----------------------------------------------------------------------------
2
3 * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4 * Atlanta, Georgia 30332-0415
5 * All Rights Reserved
6 * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7
8 * See LICENSE for the license information
9
10 * -------------------------------------------------------------------------- */
11
12/**
13 * @file TransformBtwRobotsUnaryFactorEM.h
14 * @brief Unary factor for determining transformation between given trajectories of two robots
15 * @author Vadim Indelman
16 **/
17#pragma once
18
19#include <gtsam/slam/BetweenFactor.h>
20#include <gtsam/nonlinear/NonlinearFactor.h>
21#include <gtsam/nonlinear/NonlinearFactorGraph.h>
22#include <gtsam/nonlinear/Marginals.h>
23#include <gtsam/linear/GaussianFactor.h>
24#include <gtsam/base/Testable.h>
25#include <gtsam/base/Lie.h>
26
27#include <ostream>
28
29namespace gtsam {
30
31 /**
32 * A class for a measurement predicted by "between(config[key1],config[key2])"
33 * @tparam VALUE the Value type
34 * @ingroup slam
35 */
36 template<class VALUE>
37 class TransformBtwRobotsUnaryFactorEM: public NonlinearFactor {
38
39 public:
40
41 typedef VALUE T;
42
43 private:
44
45 typedef TransformBtwRobotsUnaryFactorEM<VALUE> This;
46 typedef NonlinearFactor Base;
47
48 Key key_;
49
50 VALUE measured_; /** The measurement */
51
52 Values valA_; // given values for robot A map\trajectory
53 Values valB_; // given values for robot B map\trajectory
54 Key keyA_; // key of robot A to which the measurement refers
55 Key keyB_; // key of robot B to which the measurement refers
56
57 // TODO: create function to update valA_ and valB_
58
59 SharedGaussian model_inlier_;
60 SharedGaussian model_outlier_;
61
62 double prior_inlier_;
63 double prior_outlier_;
64
65 bool flag_bump_up_near_zero_probs_;
66 mutable bool start_with_M_step_;
67
68 /** concept check by type */
69 GTSAM_CONCEPT_LIE_TYPE(T)
70 GTSAM_CONCEPT_TESTABLE_TYPE(T)
71
72 public:
73
74 // shorthand for a smart pointer to a factor
75 typedef typename std::shared_ptr<TransformBtwRobotsUnaryFactorEM> shared_ptr;
76
77 /** default constructor - only use for serialization */
78 TransformBtwRobotsUnaryFactorEM() {}
79
80 /** Constructor */
81 TransformBtwRobotsUnaryFactorEM(Key key, const VALUE& measured, Key keyA, Key keyB,
82 const Values& valA, const Values& valB,
83 const SharedGaussian& model_inlier, const SharedGaussian& model_outlier,
84 const double prior_inlier, const double prior_outlier,
85 const bool flag_bump_up_near_zero_probs = false,
86 const bool start_with_M_step = false) :
87 Base(KeyVector{key}), key_(key), measured_(measured), keyA_(keyA), keyB_(keyB),
88 model_inlier_(model_inlier), model_outlier_(model_outlier),
89 prior_inlier_(prior_inlier), prior_outlier_(prior_outlier), flag_bump_up_near_zero_probs_(flag_bump_up_near_zero_probs),
90 start_with_M_step_(false){
91
92 setValAValB(valA, valB);
93
94 }
95
96 ~TransformBtwRobotsUnaryFactorEM() override {}
97
98
99 /** Clone */
100 NonlinearFactor::shared_ptr clone() const override { return std::make_shared<This>(*this); }
101
102
103 /** implement functions needed for Testable */
104
105 /** print */
106 void print(const std::string& s, const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override {
107 std::cout << s << "TransformBtwRobotsUnaryFactorEM("
108 << keyFormatter(key_) << ")\n";
109 std::cout << "MR between factor keys: "
110 << keyFormatter(keyA_) << ","
111 << keyFormatter(keyB_) << "\n";
112 measured_.print(" measured: ");
113 model_inlier_->print(name: " noise model inlier: ");
114 model_outlier_->print(name: " noise model outlier: ");
115 std::cout << "(prior_inlier, prior_outlier_) = ("
116 << prior_inlier_ << ","
117 << prior_outlier_ << ")\n";
118 // Base::print(s, keyFormatter);
119 }
120
121 /** equals */
122 bool equals(const NonlinearFactor& f, double tol=1e-9) const override {
123 const This *t = dynamic_cast<const This*> (&f);
124
125 if(t && Base::equals(f))
126 return key_ == t->key_ && measured_.equals(t->measured_) &&
127 // model_inlier_->equals(t->model_inlier_ ) && // TODO: fix here
128 // model_outlier_->equals(t->model_outlier_ ) &&
129 prior_outlier_ == t->prior_outlier_ && prior_inlier_ == t->prior_inlier_;
130 else
131 return false;
132 }
133
134 /** implement functions needed to derive from Factor */
135
136 /* ************************************************************************* */
137 void setValAValB(const Values& valA, const Values& valB){
138 if ( (!valA.exists(j: keyA_)) && (!valB.exists(j: keyA_)) && (!valA.exists(j: keyB_)) && (!valB.exists(j: keyB_)) )
139 throw("something is wrong!");
140
141 // TODO: make sure the two keys belong to different robots
142
143 if (valA.exists(j: keyA_)){
144 valA_ = valA;
145 valB_ = valB;
146 }
147 else {
148 valA_ = valB;
149 valB_ = valA;
150 }
151 }
152
153 /* ************************************************************************* */
154 double error(const Values& x) const override {
155 return whitenedError(x).squaredNorm();
156 }
157
158 /* ************************************************************************* */
159 /**
160 * Linearize a non-linearFactorN to get a GaussianFactor,
161 * \f$ Ax-b \approx h(x+\delta x)-z = h(x) + A \delta x - z \f$
162 * Hence \f$ b = z - h(x) = - \mathtt{error\_vector}(x) \f$
163 */
164 /* This version of linearize recalculates the noise model each time */
165 std::shared_ptr<GaussianFactor> linearize(const Values& x) const override {
166 // Only linearize if the factor is active
167 if (!this->active(x))
168 return std::shared_ptr<JacobianFactor>();
169
170 //std::cout<<"About to linearize"<<std::endl;
171 Matrix A1;
172 std::vector<Matrix> A(this->size());
173 Vector b = -whitenedError(x, A);
174 A1 = A[0];
175
176 return GaussianFactor::shared_ptr(
177 new JacobianFactor(key_, A1, b, noiseModel::Unit::Create(dim: b.size())));
178 }
179
180
181 /* ************************************************************************* */
182 /** A function overload to accept a vector<matrix> instead of a pointer to
183 * the said type.
184 */
185 Vector whitenedError(const Values& x, OptionalMatrixVecType H = nullptr) const {
186
187 bool debug = true;
188
189 Matrix H_compose, H_between1, H_dummy;
190
191 T orgA_T_currA = valA_.at<T>(keyA_);
192 T orgB_T_currB = valB_.at<T>(keyB_);
193
194 T orgA_T_orgB = x.at<T>(key_);
195
196 T orgA_T_currB = orgA_T_orgB.compose(orgB_T_currB, H_compose, H_dummy);
197
198 T currA_T_currB_pred = orgA_T_currA.between(orgA_T_currB, H_dummy, H_between1);
199
200 T currA_T_currB_msr = measured_;
201
202 Vector err = currA_T_currB_msr.localCoordinates(currA_T_currB_pred);
203
204 // Calculate indicator probabilities (inlier and outlier)
205 Vector p_inlier_outlier = calcIndicatorProb(x, err);
206 double p_inlier = p_inlier_outlier[0];
207 double p_outlier = p_inlier_outlier[1];
208
209 if (start_with_M_step_){
210 start_with_M_step_ = false;
211
212 p_inlier = 0.5;
213 p_outlier = 0.5;
214 }
215
216 Vector err_wh_inlier = model_inlier_->whiten(v: err);
217 Vector err_wh_outlier = model_outlier_->whiten(v: err);
218
219 Matrix invCov_inlier = model_inlier_->R().transpose() * model_inlier_->R();
220 Matrix invCov_outlier = model_outlier_->R().transpose() * model_outlier_->R();
221
222 Vector err_wh_eq;
223 err_wh_eq.resize(size: err_wh_inlier.rows()*2);
224 err_wh_eq << sqrt(x: p_inlier) * err_wh_inlier.array() , sqrt(x: p_outlier) * err_wh_outlier.array();
225
226 Matrix H_unwh = H_compose * H_between1;
227
228 if (H){
229
230 Matrix H_inlier = sqrt(x: p_inlier)*model_inlier_->Whiten(H: H_unwh);
231 Matrix H_outlier = sqrt(x: p_outlier)*model_outlier_->Whiten(H: H_unwh);
232 Matrix H_aug = stack(nrMatrices: 2, &H_inlier, &H_outlier);
233
234 (*H)[0].resize(rows: H_aug.rows(),cols: H_aug.cols());
235 (*H)[0] = H_aug;
236 }
237
238 if (debug){
239 // std::cout<<"H_compose - rows, cols, : "<<H_compose.rows()<<", "<< H_compose.cols()<<std::endl;
240 // std::cout<<"H_between1 - rows, cols, : "<<H_between1.rows()<<", "<< H_between1.cols()<<std::endl;
241 // std::cout<<"H_unwh - rows, cols, : "<<H_unwh.rows()<<", "<< H_unwh.cols()<<std::endl;
242 // std::cout<<"H_unwh: "<<std:endl<<H_unwh[0]
243
244 }
245
246
247 return err_wh_eq;
248 }
249
250 /* ************************************************************************* */
251 Vector whitenedError(const Values& x, std::vector<Matrix>& H) const {
252 return whitenedError(x, &H);
253 }
254
255 /* ************************************************************************* */
256 Vector calcIndicatorProb(const Values& x) const {
257
258 Vector err = unwhitenedError(x);
259
260 return this->calcIndicatorProb(x, err);
261 }
262
263 /* ************************************************************************* */
264 Vector calcIndicatorProb(const Values& x, const Vector& err) const {
265
266 // Calculate indicator probabilities (inlier and outlier)
267 Vector err_wh_inlier = model_inlier_->whiten(v: err);
268 Vector err_wh_outlier = model_outlier_->whiten(v: err);
269
270 Matrix invCov_inlier = model_inlier_->R().transpose() * model_inlier_->R();
271 Matrix invCov_outlier = model_outlier_->R().transpose() * model_outlier_->R();
272
273 double p_inlier = prior_inlier_ * sqrt(x: invCov_inlier.norm()) * exp( x: -0.5 * err_wh_inlier.dot(other: err_wh_inlier) );
274 double p_outlier = prior_outlier_ * sqrt(x: invCov_outlier.norm()) * exp( x: -0.5 * err_wh_outlier.dot(other: err_wh_outlier) );
275
276 double sumP = p_inlier + p_outlier;
277 p_inlier /= sumP;
278 p_outlier /= sumP;
279
280 if (flag_bump_up_near_zero_probs_){
281 // Bump up near-zero probabilities (as in linerFlow.h)
282 double minP = 0.05; // == 0.1 / 2 indicator variables
283 if (p_inlier < minP || p_outlier < minP){
284 if (p_inlier < minP)
285 p_inlier = minP;
286 if (p_outlier < minP)
287 p_outlier = minP;
288 sumP = p_inlier + p_outlier;
289 p_inlier /= sumP;
290 p_outlier /= sumP;
291 }
292 }
293
294 return (Vector(2) << p_inlier, p_outlier).finished();
295 }
296
297 /* ************************************************************************* */
298 Vector unwhitenedError(const Values& x) const {
299
300 T orgA_T_currA = valA_.at<T>(keyA_);
301 T orgB_T_currB = valB_.at<T>(keyB_);
302
303 T orgA_T_orgB = x.at<T>(key_);
304
305 T orgA_T_currB = orgA_T_orgB.compose(orgB_T_currB);
306
307 T currA_T_currB_pred = orgA_T_currA.between(orgA_T_currB);
308
309 T currA_T_currB_msr = measured_;
310
311 return currA_T_currB_msr.localCoordinates(currA_T_currB_pred);
312 }
313
314 /* ************************************************************************* */
315 SharedGaussian get_model_inlier() const {
316 return model_inlier_;
317 }
318
319 /* ************************************************************************* */
320 SharedGaussian get_model_outlier() const {
321 return model_outlier_;
322 }
323
324 /* ************************************************************************* */
325 Matrix get_model_inlier_cov() const {
326 return (model_inlier_->R().transpose()*model_inlier_->R()).inverse();
327 }
328
329 /* ************************************************************************* */
330 Matrix get_model_outlier_cov() const {
331 return (model_outlier_->R().transpose()*model_outlier_->R()).inverse();
332 }
333
334 /* ************************************************************************* */
335 void updateNoiseModels(const Values& values, const Marginals& marginals) {
336 /* given marginals version, don't need to marginal multiple times if update a lot */
337
338 KeyVector Keys;
339 Keys.push_back(x: keyA_);
340 Keys.push_back(x: keyB_);
341 JointMarginal joint_marginal12 = marginals.jointMarginalCovariance(variables: Keys);
342 Matrix cov1 = joint_marginal12(keyA_, keyA_);
343 Matrix cov2 = joint_marginal12(keyB_, keyB_);
344 Matrix cov12 = joint_marginal12(keyA_, keyB_);
345
346 updateNoiseModels_givenCovs(values, cov1, cov2, cov12);
347 }
348
349 /* ************************************************************************* */
350 void updateNoiseModels(const Values& values, const NonlinearFactorGraph& graph){
351 /* Update model_inlier_ and model_outlier_ to account for uncertainty in robot trajectories
352 * (note these are given in the E step, where indicator probabilities are calculated).
353 *
354 * Principle: R += [H1 H2] * joint_cov12 * [H1 H2]', where H1, H2 are Jacobians of the
355 * unwhitened error w.r.t. states, and R is the measurement covariance (inlier or outlier modes).
356 *
357 * TODO: improve efficiency (info form)
358 */
359
360 // get joint covariance of the involved states
361
362 Marginals marginals(graph, values, Marginals::QR);
363
364 this->updateNoiseModels(values, marginals);
365 }
366
367 /* ************************************************************************* */
368 void updateNoiseModels_givenCovs(const Values& values, const Matrix& cov1, const Matrix& cov2, const Matrix& cov12){
369 /* Update model_inlier_ and model_outlier_ to account for uncertainty in robot trajectories
370 * (note these are given in the E step, where indicator probabilities are calculated).
371 *
372 * Principle: R += [H1 H2] * joint_cov12 * [H1 H2]', where H1, H2 are Jacobians of the
373 * unwhitened error w.r.t. states, and R is the measurement covariance (inlier or outlier modes).
374 *
375 * TODO: improve efficiency (info form)
376 */
377
378 const T& p1 = values.at<T>(keyA_);
379 const T& p2 = values.at<T>(keyB_);
380
381 Matrix H1, H2;
382 p1.between(p2, H1, H2); // h(x)
383
384 Matrix H;
385 H.resize(rows: H1.rows(), cols: H1.rows()+H2.rows());
386 H << H1, H2; // H = [H1 H2]
387
388 Matrix joint_cov;
389 joint_cov.resize(rows: cov1.rows()+cov2.rows(), cols: cov1.cols()+cov2.cols());
390 joint_cov << cov1, cov12,
391 cov12.transpose(), cov2;
392
393 Matrix cov_state = H*joint_cov*H.transpose();
394
395 // model_inlier_->print("before:");
396
397 // update inlier and outlier noise models
398 Matrix covRinlier = (model_inlier_->R().transpose()*model_inlier_->R()).inverse();
399 model_inlier_ = noiseModel::Gaussian::Covariance(covariance: covRinlier + cov_state);
400
401 Matrix covRoutlier = (model_outlier_->R().transpose()*model_outlier_->R()).inverse();
402 model_outlier_ = noiseModel::Gaussian::Covariance(covariance: covRoutlier + cov_state);
403
404 // model_inlier_->print("after:");
405 // std::cout<<"covRinlier + cov_state: "<<covRinlier + cov_state<<std::endl;
406 }
407
408
409 /* ************************************************************************* */
410
411 size_t dim() const override {
412 return model_inlier_->R().rows() + model_inlier_->R().cols();
413 }
414
415 private:
416
417#if GTSAM_ENABLE_BOOST_SERIALIZATION
418 /** Serialization function */
419 friend class boost::serialization::access;
420 template<class ARCHIVE>
421 void serialize(ARCHIVE & ar, const unsigned int /*version*/) {
422 ar & boost::serialization::make_nvp("NonlinearFactor",
423 boost::serialization::base_object<Base>(*this));
424 //ar & BOOST_SERIALIZATION_NVP(measured_);
425 }
426#endif
427 }; // \class TransformBtwRobotsUnaryFactorEM
428
429 /// traits
430 template<class VALUE>
431 struct traits<TransformBtwRobotsUnaryFactorEM<VALUE> > :
432 public Testable<TransformBtwRobotsUnaryFactorEM<VALUE> > {
433 };
434
435} /// namespace gtsam
436