1/* ----------------------------------------------------------------------------
2
3 * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4 * Atlanta, Georgia 30332-0415
5 * All Rights Reserved
6 * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7
8 * See LICENSE for the license information
9
10 * -------------------------------------------------------------------------- */
11
12/**
13 * @file testNonlinearOptimizer.cpp
14 * @brief Unit tests for NonlinearOptimizer class
15 * @author Frank Dellaert
16 */
17
18#include <tests/smallExample.h>
19#include <gtsam/slam/BetweenFactor.h>
20#include <gtsam/nonlinear/NonlinearFactorGraph.h>
21#include <gtsam/nonlinear/Values.h>
22#include <gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h>
23#include <gtsam/nonlinear/GaussNewtonOptimizer.h>
24#include <gtsam/nonlinear/DoglegOptimizer.h>
25#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
26#include <gtsam/linear/GaussianFactorGraph.h>
27#include <gtsam/linear/NoiseModel.h>
28#include <gtsam/inference/Symbol.h>
29#include <gtsam/geometry/Pose2.h>
30#include <gtsam/base/Matrix.h>
31
32#include <CppUnitLite/TestHarness.h>
33
34
35#include <iostream>
36#include <fstream>
37
38using namespace std;
39using namespace gtsam;
40
41const double tol = 1e-5;
42
43using symbol_shorthand::X;
44using symbol_shorthand::L;
45
46/* ************************************************************************* */
47TEST( NonlinearOptimizer, paramsEquals )
48{
49 // default constructors lead to two identical params
50 GaussNewtonParams gnParams1;
51 GaussNewtonParams gnParams2;
52 CHECK(gnParams1.equals(gnParams2));
53
54 // but the params become different if we change something in gnParams2
55 gnParams2.setVerbosity("DELTA");
56 CHECK(!gnParams1.equals(gnParams2));
57}
58
59/* ************************************************************************* */
60TEST( NonlinearOptimizer, iterateLM )
61{
62 // really non-linear factor graph
63 NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
64
65 // config far from minimum
66 Point2 x0(3,0);
67 Values config;
68 config.insert(j: X(j: 1), val: x0);
69
70 // normal iterate
71 GaussNewtonParams gnParams;
72 GaussNewtonOptimizer gnOptimizer(fg, config, gnParams);
73 gnOptimizer.iterate();
74
75 // LM iterate with lambda 0 should be the same
76 LevenbergMarquardtParams lmParams;
77 lmParams.lambdaInitial = 0.0;
78 LevenbergMarquardtOptimizer lmOptimizer(fg, config, lmParams);
79 lmOptimizer.iterate();
80
81 CHECK(assert_equal(gnOptimizer.values(), lmOptimizer.values(), 1e-9));
82}
83
84/* ************************************************************************* */
85TEST( NonlinearOptimizer, optimize )
86{
87 NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
88
89 // test error at minimum
90 Point2 xstar(0,0);
91 Values cstar;
92 cstar.insert(j: X(j: 1), val: xstar);
93 DOUBLES_EQUAL(0.0,fg.error(cstar),0.0);
94
95 // test error at initial = [(1-cos(3))^2 + (sin(3))^2]*50 =
96 Point2 x0(3,3);
97 Values c0;
98 c0.insert(j: X(j: 1), val: x0);
99 DOUBLES_EQUAL(199.0,fg.error(c0),1e-3);
100
101 // optimize parameters
102 Ordering ord;
103 ord.push_back(x: X(j: 1));
104
105 // Gauss-Newton
106 GaussNewtonParams gnParams;
107 gnParams.ordering = ord;
108 Values actual1 = GaussNewtonOptimizer(fg, c0, gnParams).optimize();
109 DOUBLES_EQUAL(0,fg.error(actual1),tol);
110
111 // Levenberg-Marquardt
112 LevenbergMarquardtParams lmParams;
113 lmParams.ordering = ord;
114 Values actual2 = LevenbergMarquardtOptimizer(fg, c0, lmParams).optimize();
115 DOUBLES_EQUAL(0,fg.error(actual2),tol);
116
117 // Dogleg
118 DoglegParams dlParams;
119 dlParams.ordering = ord;
120 Values actual3 = DoglegOptimizer(fg, c0, dlParams).optimize();
121 DOUBLES_EQUAL(0,fg.error(actual3),tol);
122}
123
124/* ************************************************************************* */
125TEST( NonlinearOptimizer, SimpleLMOptimizer )
126{
127 NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
128
129 Point2 x0(3,3);
130 Values c0;
131 c0.insert(j: X(j: 1), val: x0);
132
133 Values actual = LevenbergMarquardtOptimizer(fg, c0).optimize();
134 DOUBLES_EQUAL(0,fg.error(actual),tol);
135}
136
137/* ************************************************************************* */
138TEST( NonlinearOptimizer, SimpleGNOptimizer )
139{
140 NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
141
142 Point2 x0(3,3);
143 Values c0;
144 c0.insert(j: X(j: 1), val: x0);
145
146 Values actual = GaussNewtonOptimizer(fg, c0).optimize();
147 DOUBLES_EQUAL(0,fg.error(actual),tol);
148}
149
150/* ************************************************************************* */
151TEST( NonlinearOptimizer, SimpleDLOptimizer )
152{
153 NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
154
155 Point2 x0(3,3);
156 Values c0;
157 c0.insert(j: X(j: 1), val: x0);
158
159 Values actual = DoglegOptimizer(fg, c0).optimize();
160 DOUBLES_EQUAL(0,fg.error(actual),tol);
161}
162
163/* ************************************************************************* */
164TEST( NonlinearOptimizer, optimization_method )
165{
166 LevenbergMarquardtParams paramsQR;
167 paramsQR.linearSolverType = LevenbergMarquardtParams::MULTIFRONTAL_QR;
168 LevenbergMarquardtParams paramsChol;
169 paramsChol.linearSolverType = LevenbergMarquardtParams::MULTIFRONTAL_CHOLESKY;
170
171 NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph();
172
173 Point2 x0(3,3);
174 Values c0;
175 c0.insert(j: X(j: 1), val: x0);
176
177 Values actualMFQR = LevenbergMarquardtOptimizer(fg, c0, paramsQR).optimize();
178 DOUBLES_EQUAL(0,fg.error(actualMFQR),tol);
179
180 Values actualMFChol = LevenbergMarquardtOptimizer(fg, c0, paramsChol).optimize();
181 DOUBLES_EQUAL(0,fg.error(actualMFChol),tol);
182}
183
184/* ************************************************************************* */
185TEST( NonlinearOptimizer, Factorization )
186{
187 Values config;
188 config.insert(j: X(j: 1), val: Pose2(0.,0.,0.));
189 config.insert(j: X(j: 2), val: Pose2(1.5,0.,0.));
190
191 NonlinearFactorGraph graph;
192 graph.addPrior(key: X(j: 1), prior: Pose2(0.,0.,0.), model: noiseModel::Isotropic::Sigma(dim: 3, sigma: 1e-10));
193 graph.emplace_shared<BetweenFactor<Pose2>>(args: X(j: 1),args: X(j: 2), args: Pose2(1.,0.,0.), args: noiseModel::Isotropic::Sigma(dim: 3, sigma: 1));
194
195 Ordering ordering;
196 ordering.push_back(x: X(j: 1));
197 ordering.push_back(x: X(j: 2));
198
199 LevenbergMarquardtParams params;
200 LevenbergMarquardtParams::SetLegacyDefaults(&params);
201 LevenbergMarquardtOptimizer optimizer(graph, config, ordering, params);
202 optimizer.iterate();
203
204 Values expected;
205 expected.insert(j: X(j: 1), val: Pose2(0.,0.,0.));
206 expected.insert(j: X(j: 2), val: Pose2(1.,0.,0.));
207 CHECK(assert_equal(expected, optimizer.values(), 1e-5));
208}
209
210/* ************************************************************************* */
211TEST(NonlinearOptimizer, NullFactor) {
212
213 NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph();
214
215 // Add null factor
216 fg.push_back(factor: NonlinearFactorGraph::sharedFactor());
217
218 // test error at minimum
219 Point2 xstar(0,0);
220 Values cstar;
221 cstar.insert(j: X(j: 1), val: xstar);
222 DOUBLES_EQUAL(0.0,fg.error(cstar),0.0);
223
224 // test error at initial = [(1-cos(3))^2 + (sin(3))^2]*50 =
225 Point2 x0(3,3);
226 Values c0;
227 c0.insert(j: X(j: 1), val: x0);
228 DOUBLES_EQUAL(199.0,fg.error(c0),1e-3);
229
230 // optimize parameters
231 Ordering ord;
232 ord.push_back(x: X(j: 1));
233
234 // Gauss-Newton
235 Values actual1 = GaussNewtonOptimizer(fg, c0, ord).optimize();
236 DOUBLES_EQUAL(0,fg.error(actual1),tol);
237
238 // Levenberg-Marquardt
239 Values actual2 = LevenbergMarquardtOptimizer(fg, c0, ord).optimize();
240 DOUBLES_EQUAL(0,fg.error(actual2),tol);
241
242 // Dogleg
243 Values actual3 = DoglegOptimizer(fg, c0, ord).optimize();
244 DOUBLES_EQUAL(0,fg.error(actual3),tol);
245}
246
247/* ************************************************************************* */
248TEST_UNSAFE(NonlinearOptimizer, MoreOptimization) {
249
250 NonlinearFactorGraph fg;
251 fg.addPrior(key: 0, prior: Pose2(0, 0, 0), model: noiseModel::Isotropic::Sigma(dim: 3, sigma: 1));
252 fg.emplace_shared<BetweenFactor<Pose2>>(args: 0, args: 1, args: Pose2(1, 0, M_PI / 2),
253 args: noiseModel::Isotropic::Sigma(dim: 3, sigma: 1));
254 fg.emplace_shared<BetweenFactor<Pose2>>(args: 1, args: 2, args: Pose2(1, 0, M_PI / 2),
255 args: noiseModel::Isotropic::Sigma(dim: 3, sigma: 1));
256
257 Values init;
258 init.insert(j: 0, val: Pose2(3, 4, -M_PI));
259 init.insert(j: 1, val: Pose2(10, 2, -M_PI));
260 init.insert(j: 2, val: Pose2(11, 7, -M_PI));
261
262 Values expected;
263 expected.insert(j: 0, val: Pose2(0, 0, 0));
264 expected.insert(j: 1, val: Pose2(1, 0, M_PI / 2));
265 expected.insert(j: 2, val: Pose2(1, 1, M_PI));
266
267 VectorValues expectedGradient;
268 expectedGradient.insert(j: 0,value: Z_3x1);
269 expectedGradient.insert(j: 1,value: Z_3x1);
270 expectedGradient.insert(j: 2,value: Z_3x1);
271
272 // Try LM and Dogleg
273 LevenbergMarquardtParams params = LevenbergMarquardtParams::LegacyDefaults();
274 {
275 LevenbergMarquardtOptimizer optimizer(fg, init, params);
276
277 // test convergence
278 Values actual = optimizer.optimize();
279 EXPECT(assert_equal(expected, actual));
280
281 // Check that the gradient is zero
282 GaussianFactorGraph::shared_ptr linear = optimizer.linearize();
283 EXPECT(assert_equal(expectedGradient,linear->gradientAtZero()));
284 }
285 EXPECT(assert_equal(expected, DoglegOptimizer(fg, init).optimize()));
286
287// cout << "===================================================================================" << endl;
288
289 // Try LM with diagonal damping
290 Values initBetter;
291 initBetter.insert(j: 0, val: Pose2(3,4,0));
292 initBetter.insert(j: 1, val: Pose2(10,2,M_PI/3));
293 initBetter.insert(j: 2, val: Pose2(11,7,M_PI/2));
294
295 {
296 params.diagonalDamping = true;
297 LevenbergMarquardtOptimizer optimizer(fg, initBetter, params);
298
299 // test the diagonal
300 GaussianFactorGraph::shared_ptr linear = optimizer.linearize();
301 VectorValues d = linear->hessianDiagonal();
302 VectorValues sqrtHessianDiagonal = d;
303 for (auto& [key, value] : sqrtHessianDiagonal) {
304 value = value.cwiseSqrt();
305 }
306 GaussianFactorGraph damped = optimizer.buildDampedSystem(linear: *linear, sqrtHessianDiagonal);
307 VectorValues expectedDiagonal = d + params.lambdaInitial * d;
308 EXPECT(assert_equal(expectedDiagonal, damped.hessianDiagonal()));
309
310 // test convergence (does not!)
311 Values actual = optimizer.optimize();
312 EXPECT(assert_equal(expected, actual));
313
314 // Check that the gradient is zero (it is not!)
315 linear = optimizer.linearize();
316 EXPECT(assert_equal(expectedGradient,linear->gradientAtZero()));
317
318 // Check that the gradient is zero for damped system (it is not!)
319 damped = optimizer.buildDampedSystem(linear: *linear, sqrtHessianDiagonal);
320 VectorValues actualGradient = damped.gradientAtZero();
321 EXPECT(assert_equal(expectedGradient,actualGradient));
322
323 /* This block was made to test the original initial guess "init"
324 // Check errors at convergence and errors in direction of gradient (decreases!)
325 EXPECT_DOUBLES_EQUAL(46.02558,fg.error(actual),1e-5);
326 EXPECT_DOUBLES_EQUAL(44.742237,fg.error(actual.retract(-0.01*actualGradient)),1e-5);
327
328 // Check that solve yields gradient (it's not equal to gradient, as predicted)
329 VectorValues delta = damped.optimize();
330 double factor = actualGradient[0][0]/delta[0][0];
331 EXPECT(assert_equal(actualGradient,factor*delta));
332
333 // Still pointing downhill wrt actual gradient !
334 EXPECT_DOUBLES_EQUAL( 0.1056157,dot(-1*actualGradient,delta),1e-3);
335
336 // delta.print("This is the delta value computed by LM with diagonal damping");
337
338 // Still pointing downhill wrt expected gradient (IT DOES NOT! actually they are perpendicular)
339 EXPECT_DOUBLES_EQUAL( 0.0,dot(-1*expectedGradient,delta),1e-5);
340
341 // Check errors at convergence and errors in direction of solution (does not decrease!)
342 EXPECT_DOUBLES_EQUAL(46.0254859,fg.error(actual.retract(delta)),1e-5);
343
344 // Check errors at convergence and errors at a small step in direction of solution (does not decrease!)
345 EXPECT_DOUBLES_EQUAL(46.0255,fg.error(actual.retract(0.01*delta)),1e-3);
346 */
347 }
348}
349
350/* ************************************************************************* */
351TEST(NonlinearOptimizer, Pose2OptimizationWithHuberNoOutlier) {
352
353 NonlinearFactorGraph fg;
354 fg.addPrior(key: 0, prior: Pose2(0,0,0), model: noiseModel::Isotropic::Sigma(dim: 3,sigma: 1));
355 fg.emplace_shared<BetweenFactor<Pose2>>(args: 0, args: 1, args: Pose2(1,1.1,M_PI/4),
356 args: noiseModel::Robust::Create(robust: noiseModel::mEstimator::Huber::Create(k: 2.0),
357 noise: noiseModel::Isotropic::Sigma(dim: 3,sigma: 1)));
358 fg.emplace_shared<BetweenFactor<Pose2>>(args: 0, args: 1, args: Pose2(1,0.9,M_PI/2),
359 args: noiseModel::Robust::Create(robust: noiseModel::mEstimator::Huber::Create(k: 3.0),
360 noise: noiseModel::Isotropic::Sigma(dim: 3,sigma: 1)));
361
362 Values init;
363 init.insert(j: 0, val: Pose2(0,0,0));
364 init.insert(j: 1, val: Pose2(0.961187, 0.99965, 1.1781));
365
366 Values expected;
367 expected.insert(j: 0, val: Pose2(0,0,0));
368 expected.insert(j: 1, val: Pose2(0.961187, 0.99965, 1.1781));
369
370 LevenbergMarquardtParams lm_params;
371
372 auto gn_result = GaussNewtonOptimizer(fg, init).optimize();
373 auto lm_result = LevenbergMarquardtOptimizer(fg, init, lm_params).optimize();
374 auto dl_result = DoglegOptimizer(fg, init).optimize();
375
376 EXPECT(assert_equal(expected, gn_result, 3e-2));
377 EXPECT(assert_equal(expected, lm_result, 3e-2));
378 EXPECT(assert_equal(expected, dl_result, 3e-2));
379}
380
381/* ************************************************************************* */
382TEST(NonlinearOptimizer, Point2LinearOptimizationWithHuber) {
383
384 NonlinearFactorGraph fg;
385 fg.addPrior(key: 0, prior: Point2(0,0), model: noiseModel::Isotropic::Sigma(dim: 2,sigma: 0.01));
386 fg.emplace_shared<BetweenFactor<Point2>>(args: 0, args: 1, args: Point2(1,1.8),
387 args: noiseModel::Robust::Create(robust: noiseModel::mEstimator::Huber::Create(k: 1.0),
388 noise: noiseModel::Isotropic::Sigma(dim: 2,sigma: 1)));
389 fg.emplace_shared<BetweenFactor<Point2>>(args: 0, args: 1, args: Point2(1,0.9),
390 args: noiseModel::Robust::Create(robust: noiseModel::mEstimator::Huber::Create(k: 1.0),
391 noise: noiseModel::Isotropic::Sigma(dim: 2,sigma: 1)));
392 fg.emplace_shared<BetweenFactor<Point2>>(args: 0, args: 1, args: Point2(1,90),
393 args: noiseModel::Robust::Create(robust: noiseModel::mEstimator::Huber::Create(k: 1.0),
394 noise: noiseModel::Isotropic::Sigma(dim: 2,sigma: 1)));
395
396 Values init;
397 init.insert(j: 0, val: Point2(1,1));
398 init.insert(j: 1, val: Point2(1,0));
399
400 Values expected;
401 expected.insert(j: 0, val: Point2(0,0));
402 expected.insert(j: 1, val: Point2(1,1.85));
403
404 LevenbergMarquardtParams params;
405
406 auto gn_result = GaussNewtonOptimizer(fg, init).optimize();
407 auto lm_result = LevenbergMarquardtOptimizer(fg, init, params).optimize();
408 auto dl_result = DoglegOptimizer(fg, init).optimize();
409
410 EXPECT(assert_equal(expected, gn_result, 1e-4));
411 EXPECT(assert_equal(expected, lm_result, 1e-4));
412 EXPECT(assert_equal(expected, dl_result, 1e-4));
413}
414
415/* ************************************************************************* */
416TEST(NonlinearOptimizer, Pose2OptimizationWithHuber) {
417
418 NonlinearFactorGraph fg;
419 fg.addPrior(key: 0, prior: Pose2(0,0, 0), model: noiseModel::Isotropic::Sigma(dim: 3,sigma: 0.1));
420 fg.emplace_shared<BetweenFactor<Pose2>>(args: 0, args: 1, args: Pose2(0,9, M_PI/2),
421 args: noiseModel::Robust::Create(robust: noiseModel::mEstimator::Huber::Create(k: 0.2),
422 noise: noiseModel::Isotropic::Sigma(dim: 3,sigma: 1)));
423 fg.emplace_shared<BetweenFactor<Pose2>>(args: 0, args: 1, args: Pose2(0, 11, M_PI/2),
424 args: noiseModel::Robust::Create(robust: noiseModel::mEstimator::Huber::Create(k: 0.2),
425 noise: noiseModel::Isotropic::Sigma(dim: 3,sigma: 1)));
426 fg.emplace_shared<BetweenFactor<Pose2>>(args: 0, args: 1, args: Pose2(0, 10, M_PI/2),
427 args: noiseModel::Robust::Create(robust: noiseModel::mEstimator::Huber::Create(k: 0.2),
428 noise: noiseModel::Isotropic::Sigma(dim: 3,sigma: 1)));
429 fg.emplace_shared<BetweenFactor<Pose2>>(args: 0, args: 1, args: Pose2(0,9, 0),
430 args: noiseModel::Robust::Create(robust: noiseModel::mEstimator::Huber::Create(k: 0.2),
431 noise: noiseModel::Isotropic::Sigma(dim: 3,sigma: 1)));
432
433 Values init;
434 init.insert(j: 0, val: Pose2(0, 0, 0));
435 init.insert(j: 1, val: Pose2(0, 10, M_PI/4));
436
437 Values expected;
438 expected.insert(j: 0, val: Pose2(0, 0, 0));
439 expected.insert(j: 1, val: Pose2(0, 10, 1.45212));
440
441 LevenbergMarquardtParams params;
442
443 auto gn_result = GaussNewtonOptimizer(fg, init).optimize();
444 auto lm_result = LevenbergMarquardtOptimizer(fg, init, params).optimize();
445 auto dl_result = DoglegOptimizer(fg, init).optimize();
446
447 EXPECT(assert_equal(expected, gn_result, 1e-1));
448 EXPECT(assert_equal(expected, lm_result, 1e-1));
449 EXPECT(assert_equal(expected, dl_result, 1e-1));
450}
451
452/* ************************************************************************* */
453TEST(NonlinearOptimizer, RobustMeanCalculation) {
454
455 NonlinearFactorGraph fg;
456
457 Values init;
458
459 Values expected;
460
461 auto huber = noiseModel::Robust::Create(robust: noiseModel::mEstimator::Huber::Create(k: 20),
462 noise: noiseModel::Isotropic::Sigma(dim: 1, sigma: 1));
463
464 vector<double> pts{-10,-3,-1,1,3,10,1000};
465 for(auto pt : pts) {
466 fg.addPrior(key: 0, prior: pt, model: huber);
467 }
468
469 init.insert(j: 0, val: 100.0);
470 expected.insert(j: 0, val: 3.33333333);
471
472 DoglegParams params_dl;
473 params_dl.setRelativeErrorTol(1e-10);
474
475 auto gn_result = GaussNewtonOptimizer(fg, init).optimize();
476 auto lm_result = LevenbergMarquardtOptimizer(fg, init).optimize();
477 auto dl_result = DoglegOptimizer(fg, init, params_dl).optimize();
478
479 EXPECT(assert_equal(expected, gn_result, tol));
480 EXPECT(assert_equal(expected, lm_result, tol));
481 EXPECT(assert_equal(expected, dl_result, tol));
482}
483
484/* ************************************************************************* */
485TEST(NonlinearOptimizer, disconnected_graph) {
486 Values expected;
487 expected.insert(j: X(j: 1), val: Pose2(0.,0.,0.));
488 expected.insert(j: X(j: 2), val: Pose2(1.5,0.,0.));
489 expected.insert(j: X(j: 3), val: Pose2(3.0,0.,0.));
490
491 Values init;
492 init.insert(j: X(j: 1), val: Pose2(0.,0.,0.));
493 init.insert(j: X(j: 2), val: Pose2(0.,0.,0.));
494 init.insert(j: X(j: 3), val: Pose2(0.,0.,0.));
495
496 NonlinearFactorGraph graph;
497 graph.addPrior(key: X(j: 1), prior: Pose2(0.,0.,0.), model: noiseModel::Isotropic::Sigma(dim: 3,sigma: 1));
498 graph.emplace_shared<BetweenFactor<Pose2>>(args: X(j: 1),args: X(j: 2), args: Pose2(1.5,0.,0.), args: noiseModel::Isotropic::Sigma(dim: 3,sigma: 1));
499 graph.addPrior(key: X(j: 3), prior: Pose2(3.,0.,0.), model: noiseModel::Isotropic::Sigma(dim: 3,sigma: 1));
500
501 EXPECT(assert_equal(expected, LevenbergMarquardtOptimizer(graph, init).optimize()));
502}
503
504/* ************************************************************************* */
505#include <gtsam/linear/iterative.h>
506
507class IterativeLM : public LevenbergMarquardtOptimizer {
508 /// Solver specific parameters
509 ConjugateGradientParameters cgParams_;
510 Values initial_;
511
512 public:
513 /// Constructor
514 IterativeLM(const NonlinearFactorGraph& graph, const Values& initialValues,
515 const ConjugateGradientParameters& p,
516 const LevenbergMarquardtParams& params =
517 LevenbergMarquardtParams::LegacyDefaults())
518 : LevenbergMarquardtOptimizer(graph, initialValues, params),
519 cgParams_(p),
520 initial_(initialValues) {}
521
522 /// Solve that uses conjugate gradient
523 VectorValues solve(const GaussianFactorGraph& gfg,
524 const NonlinearOptimizerParams& params) const override {
525 VectorValues zeros = initial_.zeroVectors();
526 return conjugateGradientDescent(fg: gfg, x: zeros, parameters: cgParams_);
527 }
528};
529
530/* ************************************************************************* */
531TEST(NonlinearOptimizer, subclass_solver) {
532 Values expected;
533 expected.insert(j: X(j: 1), val: Pose2(0., 0., 0.));
534 expected.insert(j: X(j: 2), val: Pose2(1.5, 0., 0.));
535 expected.insert(j: X(j: 3), val: Pose2(3.0, 0., 0.));
536
537 Values init;
538 init.insert(j: X(j: 1), val: Pose2(0., 0., 0.));
539 init.insert(j: X(j: 2), val: Pose2(0., 0., 0.));
540 init.insert(j: X(j: 3), val: Pose2(0., 0., 0.));
541
542 NonlinearFactorGraph graph;
543 graph.addPrior(key: X(j: 1), prior: Pose2(0., 0., 0.), model: noiseModel::Isotropic::Sigma(dim: 3, sigma: 1));
544 graph.emplace_shared<BetweenFactor<Pose2>>(args: X(j: 1), args: X(j: 2), args: Pose2(1.5, 0., 0.),
545 args: noiseModel::Isotropic::Sigma(dim: 3, sigma: 1));
546 graph.addPrior(key: X(j: 3), prior: Pose2(3., 0., 0.), model: noiseModel::Isotropic::Sigma(dim: 3, sigma: 1));
547
548 ConjugateGradientParameters p;
549 Values actual = IterativeLM(graph, init, p).optimize();
550 EXPECT(assert_equal(expected, actual, 1e-4));
551}
552
553/* ************************************************************************* */
554TEST( NonlinearOptimizer, logfile )
555{
556 NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
557
558 Point2 x0(3,3);
559 Values c0;
560 c0.insert(j: X(j: 1), val: x0);
561
562 // Levenberg-Marquardt
563 LevenbergMarquardtParams lmParams;
564 static const string filename("testNonlinearOptimizer.log");
565 lmParams.logFile = filename;
566 LevenbergMarquardtOptimizer(fg, c0, lmParams).optimize();
567
568// stringstream expected,actual;
569// ifstream ifs(("../../gtsam/tests/" + filename).c_str());
570// if(!ifs) throw std::runtime_error(filename);
571// expected << ifs.rdbuf();
572// ifs.close();
573// ifstream ifs2(filename.c_str());
574// if(!ifs2) throw std::runtime_error(filename);
575// actual << ifs2.rdbuf();
576// EXPECT(actual.str()==expected.str());
577}
578
579/* ************************************************************************* */
580TEST( NonlinearOptimizer, iterationHook_LM )
581{
582 NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
583
584 Point2 x0(3,3);
585 Values c0;
586 c0.insert(j: X(j: 1), val: x0);
587
588 // Levenberg-Marquardt
589 LevenbergMarquardtParams lmParams;
590 size_t lastIterCalled = 0;
591 lmParams.iterationHook = [&](size_t iteration, double oldError, double newError)
592 {
593 // Tests:
594 lastIterCalled = iteration;
595 EXPECT(newError<oldError);
596
597 // Example of evolution printout:
598 //std::cout << "iter: " << iteration << " error: " << oldError << " => " << newError <<"\n";
599 };
600 LevenbergMarquardtOptimizer(fg, c0, lmParams).optimize();
601
602 EXPECT(lastIterCalled>5);
603}
604/* ************************************************************************* */
605TEST( NonlinearOptimizer, iterationHook_CG )
606{
607 NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
608
609 Point2 x0(3,3);
610 Values c0;
611 c0.insert(j: X(j: 1), val: x0);
612
613 // Levenberg-Marquardt
614 NonlinearConjugateGradientOptimizer::Parameters cgParams;
615 size_t lastIterCalled = 0;
616 cgParams.iterationHook = [&](size_t iteration, double oldError, double newError)
617 {
618 // Tests:
619 lastIterCalled = iteration;
620 EXPECT(newError<oldError);
621
622 // Example of evolution printout:
623 //std::cout << "iter: " << iteration << " error: " << oldError << " => " << newError <<"\n";
624 };
625 NonlinearConjugateGradientOptimizer(fg, c0, cgParams).optimize();
626
627 EXPECT(lastIterCalled>5);
628}
629
630
631/* ************************************************************************* */
632//// Minimal traits example
633struct MyType : public Vector3 {
634 using Vector3::Vector3;
635};
636
637namespace gtsam {
638template <>
639struct traits<MyType> {
640 static bool Equals(const MyType& a, const MyType& b, double tol) {
641 return (a - b).array().abs().maxCoeff() < tol;
642 }
643 static void Print(const MyType&, const string&) {}
644 static int GetDimension(const MyType&) { return 3; }
645 static MyType Retract(const MyType& a, const Vector3& b) { return a + b; }
646 static Vector3 Local(const MyType& a, const MyType& b) { return b - a; }
647};
648}
649
650TEST(NonlinearOptimizer, Traits) {
651 NonlinearFactorGraph fg;
652 fg.addPrior(key: 0, prior: MyType(0, 0, 0), model: noiseModel::Isotropic::Sigma(dim: 3, sigma: 1));
653
654 Values init;
655 init.insert(j: 0, val: MyType(0,0,0));
656
657 LevenbergMarquardtOptimizer optimizer(fg, init);
658 Values actual = optimizer.optimize();
659 EXPECT(assert_equal(init, actual));
660}
661
662/* ************************************************************************* */
663int main() {
664 TestResult tr;
665 return TestRegistry::runAllTests(result&: tr);
666}
667/* ************************************************************************* */
668