| 1 | /* ---------------------------------------------------------------------------- |
| 2 | |
| 3 | * GTSAM Copyright 2010, Georgia Tech Research Corporation, |
| 4 | * Atlanta, Georgia 30332-0415 |
| 5 | * All Rights Reserved |
| 6 | * Authors: Frank Dellaert, et al. (see THANKS for the full author list) |
| 7 | |
| 8 | * See LICENSE for the license information |
| 9 | |
| 10 | * -------------------------------------------------------------------------- */ |
| 11 | |
| 12 | /** |
| 13 | * @file testDoglegOptimizer.cpp |
| 14 | * @brief Unit tests for DoglegOptimizer |
| 15 | * @author Richard Roberts |
| 16 | * @author Frank dellaert |
| 17 | */ |
| 18 | |
| 19 | #include <CppUnitLite/TestHarness.h> |
| 20 | |
| 21 | #include <tests/smallExample.h> |
| 22 | #include <gtsam/geometry/Pose2.h> |
| 23 | #include <gtsam/nonlinear/DoglegOptimizer.h> |
| 24 | #include <gtsam/nonlinear/DoglegOptimizerImpl.h> |
| 25 | #include <gtsam/nonlinear/NonlinearEquality.h> |
| 26 | #include <gtsam/slam/BetweenFactor.h> |
| 27 | #include <gtsam/nonlinear/ISAM2.h> |
| 28 | #include <gtsam/slam/SmartProjectionPoseFactor.h> |
| 29 | #include "examples/SFMdata.h" |
| 30 | |
| 31 | #include <functional> |
| 32 | |
| 33 | using namespace std; |
| 34 | using namespace gtsam; |
| 35 | |
| 36 | // Convenience for named keys |
| 37 | using symbol_shorthand::X; |
| 38 | |
| 39 | /* ************************************************************************* */ |
| 40 | TEST(DoglegOptimizer, ComputeBlend) { |
| 41 | // Create an arbitrary Bayes Net |
| 42 | GaussianBayesNet gbn; |
| 43 | gbn.emplace_shared<GaussianConditional>( |
| 44 | args: 0, args: Vector2(1.0,2.0), args&: (Matrix(2, 2) << 3.0,4.0,0.0,6.0).finished(), |
| 45 | args: 3, args&: (Matrix(2, 2) << 7.0,8.0,9.0,10.0).finished(), |
| 46 | args: 4, args&: (Matrix(2, 2) << 11.0,12.0,13.0,14.0).finished()); |
| 47 | gbn.emplace_shared<GaussianConditional>( |
| 48 | args: 1, args: Vector2(15.0,16.0), args&: (Matrix(2, 2) << 17.0,18.0,0.0,20.0).finished(), |
| 49 | args: 2, args&: (Matrix(2, 2) << 21.0,22.0,23.0,24.0).finished(), |
| 50 | args: 4, args&: (Matrix(2, 2) << 25.0,26.0,27.0,28.0).finished()); |
| 51 | gbn.emplace_shared<GaussianConditional>( |
| 52 | args: 2, args: Vector2(29.0,30.0), args&: (Matrix(2, 2) << 31.0,32.0,0.0,34.0).finished(), |
| 53 | args: 3, args&: (Matrix(2, 2) << 35.0,36.0,37.0,38.0).finished()); |
| 54 | gbn.emplace_shared<GaussianConditional>( |
| 55 | args: 3, args: Vector2(39.0,40.0), args&: (Matrix(2, 2) << 41.0,42.0,0.0,44.0).finished(), |
| 56 | args: 4, args&: (Matrix(2, 2) << 45.0,46.0,47.0,48.0).finished()); |
| 57 | gbn.emplace_shared<GaussianConditional>( |
| 58 | args: 4, args: Vector2(49.0,50.0), args&: (Matrix(2, 2) << 51.0,52.0,0.0,54.0).finished()); |
| 59 | |
| 60 | // Compute steepest descent point |
| 61 | VectorValues xu = gbn.optimizeGradientSearch(); |
| 62 | |
| 63 | // Compute Newton's method point |
| 64 | VectorValues xn = gbn.optimize(); |
| 65 | |
| 66 | // The Newton's method point should be more "adventurous", i.e. larger, than the steepest descent point |
| 67 | EXPECT(xu.vector().norm() < xn.vector().norm()); |
| 68 | |
| 69 | // Compute blend |
| 70 | double Delta = 1.5; |
| 71 | VectorValues xb = DoglegOptimizerImpl::ComputeBlend(delta: Delta, x_u: xu, x_n: xn); |
| 72 | DOUBLES_EQUAL(Delta, xb.vector().norm(), 1e-10); |
| 73 | } |
| 74 | |
| 75 | /* ************************************************************************* */ |
| 76 | TEST(DoglegOptimizer, ComputeBlendEdgeCases) { |
| 77 | // Test Derived from Issue #1861 |
| 78 | // Evaluate ComputeBlend Behavior for edge cases where the trust region |
| 79 | // is equal in size to that of the newton step or the gradient step. |
| 80 | |
| 81 | // Simulated Newton (n) and Gradient Descent (u) step vectors w/ ||n|| > ||u|| |
| 82 | VectorValues::Dims dims; |
| 83 | dims[0] = 3; |
| 84 | VectorValues n(Vector3(0.3233546123, -0.2133456123, 0.3664345632), dims); |
| 85 | VectorValues u(Vector3(0.0023456342, -0.04535687, 0.087345661212), dims); |
| 86 | |
| 87 | // Test upper edge case where trust region is equal to magnitude of newton step |
| 88 | EXPECT(assert_equal(n, DoglegOptimizerImpl::ComputeBlend(n.norm(), u, n, false))); |
| 89 | // Test lower edge case where trust region is equal to magnitude of gradient step |
| 90 | EXPECT(assert_equal(u, DoglegOptimizerImpl::ComputeBlend(u.norm(), u, n, false))); |
| 91 | } |
| 92 | |
| 93 | /* ************************************************************************* */ |
| 94 | TEST(DoglegOptimizer, ComputeDoglegPoint) { |
| 95 | // Create an arbitrary Bayes Net |
| 96 | GaussianBayesNet gbn; |
| 97 | gbn.emplace_shared<GaussianConditional>( |
| 98 | args: 0, args: Vector2(1.0,2.0), args&: (Matrix(2, 2) << 3.0,4.0,0.0,6.0).finished(), |
| 99 | args: 3, args&: (Matrix(2, 2) << 7.0,8.0,9.0,10.0).finished(), |
| 100 | args: 4, args&: (Matrix(2, 2) << 11.0,12.0,13.0,14.0).finished()); |
| 101 | gbn.emplace_shared<GaussianConditional>( |
| 102 | args: 1, args: Vector2(15.0,16.0), args&: (Matrix(2, 2) << 17.0,18.0,0.0,20.0).finished(), |
| 103 | args: 2, args&: (Matrix(2, 2) << 21.0,22.0,23.0,24.0).finished(), |
| 104 | args: 4, args&: (Matrix(2, 2) << 25.0,26.0,27.0,28.0).finished()); |
| 105 | gbn.emplace_shared<GaussianConditional>( |
| 106 | args: 2, args: Vector2(29.0,30.0), args&: (Matrix(2, 2) << 31.0,32.0,0.0,34.0).finished(), |
| 107 | args: 3, args&: (Matrix(2, 2) << 35.0,36.0,37.0,38.0).finished()); |
| 108 | gbn.emplace_shared<GaussianConditional>( |
| 109 | args: 3, args: Vector2(39.0,40.0), args&: (Matrix(2, 2) << 41.0,42.0,0.0,44.0).finished(), |
| 110 | args: 4, args&: (Matrix(2, 2) << 45.0,46.0,47.0,48.0).finished()); |
| 111 | gbn.emplace_shared<GaussianConditional>( |
| 112 | args: 4, args: Vector2(49.0,50.0), args&: (Matrix(2, 2) << 51.0,52.0,0.0,54.0).finished()); |
| 113 | |
| 114 | // Compute dogleg point for different deltas |
| 115 | |
| 116 | double Delta1 = 0.5; // Less than steepest descent |
| 117 | VectorValues actual1 = DoglegOptimizerImpl::ComputeDoglegPoint(delta: Delta1, dx_u: gbn.optimizeGradientSearch(), dx_n: gbn.optimize()); |
| 118 | DOUBLES_EQUAL(Delta1, actual1.vector().norm(), 1e-5); |
| 119 | |
| 120 | double Delta2 = 1.5; // Between steepest descent and Newton's method |
| 121 | VectorValues expected2 = DoglegOptimizerImpl::ComputeBlend(delta: Delta2, x_u: gbn.optimizeGradientSearch(), x_n: gbn.optimize()); |
| 122 | VectorValues actual2 = DoglegOptimizerImpl::ComputeDoglegPoint(delta: Delta2, dx_u: gbn.optimizeGradientSearch(), dx_n: gbn.optimize()); |
| 123 | DOUBLES_EQUAL(Delta2, actual2.vector().norm(), 1e-5); |
| 124 | EXPECT(assert_equal(expected2, actual2)); |
| 125 | |
| 126 | double Delta3 = 5.0; // Larger than Newton's method point |
| 127 | VectorValues expected3 = gbn.optimize(); |
| 128 | VectorValues actual3 = DoglegOptimizerImpl::ComputeDoglegPoint(delta: Delta3, dx_u: gbn.optimizeGradientSearch(), dx_n: gbn.optimize()); |
| 129 | EXPECT(assert_equal(expected3, actual3)); |
| 130 | } |
| 131 | |
| 132 | /* ************************************************************************* */ |
| 133 | TEST(DoglegOptimizer, Iterate) { |
| 134 | // really non-linear factor graph |
| 135 | NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph(); |
| 136 | |
| 137 | // config far from minimum |
| 138 | Point2 x0(3,0); |
| 139 | Values config; |
| 140 | config.insert(j: X(j: 1), val: x0); |
| 141 | |
| 142 | double Delta = 1.0; |
| 143 | for(size_t it=0; it<10; ++it) { |
| 144 | auto linearized = fg.linearize(linearizationPoint: config); |
| 145 | |
| 146 | // Iterate assumes that linear error = nonlinear error at the linearization point, and this should be true |
| 147 | double nonlinearError = fg.error(values: config); |
| 148 | double linearError = linearized->error(x: config.zeroVectors()); |
| 149 | DOUBLES_EQUAL(nonlinearError, linearError, 1e-5); |
| 150 | |
| 151 | auto gbn = linearized->eliminateSequential(); |
| 152 | VectorValues dx_u = gbn->optimizeGradientSearch(); |
| 153 | VectorValues dx_n = gbn->optimize(); |
| 154 | DoglegOptimizerImpl::IterationResult result = DoglegOptimizerImpl::Iterate( |
| 155 | delta: Delta, mode: DoglegOptimizerImpl::SEARCH_EACH_ITERATION, dx_u, dx_n, Rd: *gbn, f: fg, |
| 156 | x0: config, f_error: fg.error(values: config)); |
| 157 | Delta = result.delta; |
| 158 | EXPECT(result.f_error < fg.error(config)); // Check that error decreases |
| 159 | |
| 160 | Values newConfig(config.retract(delta: result.dx_d)); |
| 161 | config = newConfig; |
| 162 | DOUBLES_EQUAL(fg.error(config), result.f_error, 1e-5); // Check that error is correctly filled in |
| 163 | } |
| 164 | } |
| 165 | |
| 166 | /* ************************************************************************* */ |
| 167 | TEST(DoglegOptimizer, Constraint) { |
| 168 | // Create a pose-graph graph with a constraint on the first pose |
| 169 | NonlinearFactorGraph graph; |
| 170 | const Pose2 origin(0, 0, 0), pose2(2, 0, 0); |
| 171 | graph.emplace_shared<NonlinearEquality<Pose2> >(args: 1, args: origin); |
| 172 | auto model = noiseModel::Diagonal::Sigmas(sigmas: Vector3(0.2, 0.2, 0.1)); |
| 173 | graph.emplace_shared<BetweenFactor<Pose2> >(args: 1, args: 2, args: pose2, args&: model); |
| 174 | |
| 175 | // Create feasible initial estimate |
| 176 | Values initial; |
| 177 | initial.insert(j: 1, val: origin); // feasible ! |
| 178 | initial.insert(j: 2, val: Pose2(2.3, 0.1, -0.2)); |
| 179 | |
| 180 | // Optimize the initial values using DoglegOptimizer |
| 181 | DoglegParams params; |
| 182 | params.setVerbosityDL("VERBOSITY" ); |
| 183 | DoglegOptimizer optimizer(graph, initial, params); |
| 184 | Values result = optimizer.optimize(); |
| 185 | |
| 186 | // Check result |
| 187 | EXPECT(assert_equal(pose2, result.at<Pose2>(2))); |
| 188 | |
| 189 | // Create infeasible initial estimate |
| 190 | Values infeasible; |
| 191 | infeasible.insert(j: 1, val: Pose2(0.1, 0, 0)); // infeasible ! |
| 192 | infeasible.insert(j: 2, val: Pose2(2.3, 0.1, -0.2)); |
| 193 | |
| 194 | // Try optimizing with infeasible initial estimate |
| 195 | DoglegOptimizer optimizer2(graph, infeasible, params); |
| 196 | |
| 197 | #ifdef GTSAM_USE_TBB |
| 198 | CHECK_EXCEPTION(optimizer2.optimize(), std::exception); |
| 199 | #else |
| 200 | CHECK_EXCEPTION(optimizer2.optimize(), std::invalid_argument); |
| 201 | #endif |
| 202 | } |
| 203 | |
| 204 | /* ************************************************************************* */ |
| 205 | /** |
| 206 | * Test created to fix issue in ISAM2 when using the DogLegOptimizer. |
| 207 | * Originally reported by kvmanohar22 in issue #301 |
| 208 | * https://github.com/borglab/gtsam/issues/301 |
| 209 | * |
| 210 | * This test is based on a script provided by kvmanohar22 |
| 211 | * to help reproduce the issue. |
| 212 | */ |
| 213 | TEST(DogLegOptimizer, VariableUpdate) { |
| 214 | // Make the typename short so it looks much cleaner |
| 215 | typedef SmartProjectionPoseFactor<Cal3_S2> SmartFactor; |
| 216 | |
| 217 | // create a typedef to the camera type |
| 218 | typedef PinholePose<Cal3_S2> Camera; |
| 219 | // Define the camera calibration parameters |
| 220 | Cal3_S2::shared_ptr K(new Cal3_S2(50.0, 50.0, 0.0, 50.0, 50.0)); |
| 221 | |
| 222 | // Define the camera observation noise model |
| 223 | noiseModel::Isotropic::shared_ptr measurementNoise = |
| 224 | noiseModel::Isotropic::Sigma(dim: 2, sigma: 1.0); // one pixel in u and v |
| 225 | |
| 226 | // Create the set of ground-truth landmarks and poses |
| 227 | vector<Point3> points = createPoints(); |
| 228 | vector<Pose3> poses = createPoses(); |
| 229 | |
| 230 | // Create a factor graph |
| 231 | NonlinearFactorGraph graph; |
| 232 | |
| 233 | ISAM2DoglegParams doglegparams = ISAM2DoglegParams(); |
| 234 | doglegparams.verbose = false; |
| 235 | ISAM2Params isam2_params; |
| 236 | isam2_params.evaluateNonlinearError = true; |
| 237 | isam2_params.relinearizeThreshold = 0.0; |
| 238 | isam2_params.enableRelinearization = true; |
| 239 | isam2_params.optimizationParams = doglegparams; |
| 240 | isam2_params.relinearizeSkip = 1; |
| 241 | ISAM2 isam2(isam2_params); |
| 242 | |
| 243 | // Simulated measurements from each camera pose, adding them to the factor |
| 244 | // graph |
| 245 | unordered_map<int, SmartFactor::shared_ptr> smart_factors; |
| 246 | for (size_t j = 0; j < points.size(); ++j) { |
| 247 | // every landmark represent a single landmark, we use shared pointer to init |
| 248 | // the factor, and then insert measurements. |
| 249 | SmartFactor::shared_ptr smartfactor(new SmartFactor(measurementNoise, K)); |
| 250 | |
| 251 | for (size_t i = 0; i < poses.size(); ++i) { |
| 252 | // generate the 2D measurement |
| 253 | Camera camera(poses[i], K); |
| 254 | Point2 measurement = camera.project(pw: points[j]); |
| 255 | |
| 256 | // call add() function to add measurement into a single factor, here we |
| 257 | // need to add: |
| 258 | // 1. the 2D measurement |
| 259 | // 2. the corresponding camera's key |
| 260 | // 3. camera noise model |
| 261 | // 4. camera calibration |
| 262 | |
| 263 | // add only first 3 measurements and update the later measurements |
| 264 | // incrementally |
| 265 | if (i < 3) smartfactor->add(measured: measurement, key: i); |
| 266 | } |
| 267 | |
| 268 | // insert the smart factor in the graph |
| 269 | smart_factors[j] = smartfactor; |
| 270 | graph.push_back(factor: smartfactor); |
| 271 | } |
| 272 | |
| 273 | // Add a prior on pose x0. This indirectly specifies where the origin is. |
| 274 | // 30cm std on x,y,z 0.1 rad on roll,pitch,yaw |
| 275 | noiseModel::Diagonal::shared_ptr noise = noiseModel::Diagonal::Sigmas( |
| 276 | sigmas: (Vector(6) << Vector3::Constant(value: 0.3), Vector3::Constant(value: 0.1)).finished()); |
| 277 | graph.emplace_shared<PriorFactor<Pose3> >(args: 0, args&: poses[0], args&: noise); |
| 278 | |
| 279 | // Because the structure-from-motion problem has a scale ambiguity, the |
| 280 | // problem is still under-constrained. Here we add a prior on the second pose |
| 281 | // x1, so this will fix the scale by indicating the distance between x0 and |
| 282 | // x1. Because these two are fixed, the rest of the poses will be also be |
| 283 | // fixed. |
| 284 | graph.emplace_shared<PriorFactor<Pose3> >(args: 1, args&: poses[1], |
| 285 | args&: noise); // add directly to graph |
| 286 | |
| 287 | // Create the initial estimate to the solution |
| 288 | // Intentionally initialize the variables off from the ground truth |
| 289 | Values initialEstimate; |
| 290 | Pose3 delta(Rot3::Rodrigues(wx: -0.1, wy: 0.2, wz: 0.25), Point3(0.05, -0.10, 0.20)); |
| 291 | for (size_t i = 0; i < 3; ++i) |
| 292 | initialEstimate.insert(j: i, val: poses[i].compose(g: delta)); |
| 293 | // initialEstimate.print("Initial Estimates:\n"); |
| 294 | |
| 295 | // Optimize the graph and print results |
| 296 | isam2.update(newFactors: graph, newTheta: initialEstimate); |
| 297 | Values result = isam2.calculateEstimate(); |
| 298 | // result.print("Results:\n"); |
| 299 | |
| 300 | // we add new measurements from this pose |
| 301 | size_t pose_idx = 3; |
| 302 | |
| 303 | // Now update existing smart factors with new observations |
| 304 | for (size_t j = 0; j < points.size(); ++j) { |
| 305 | SmartFactor::shared_ptr smartfactor = smart_factors[j]; |
| 306 | |
| 307 | // add the 4th measurement |
| 308 | Camera camera(poses[pose_idx], K); |
| 309 | Point2 measurement = camera.project(pw: points[j]); |
| 310 | smartfactor->add(measured: measurement, key: pose_idx); |
| 311 | } |
| 312 | |
| 313 | graph.resize(size: 0); |
| 314 | initialEstimate.clear(); |
| 315 | |
| 316 | // update initial estimate for the new pose |
| 317 | initialEstimate.insert(j: pose_idx, val: poses[pose_idx].compose(g: delta)); |
| 318 | |
| 319 | // this should break the system |
| 320 | isam2.update(newFactors: graph, newTheta: initialEstimate); |
| 321 | result = isam2.calculateEstimate(); |
| 322 | auto resultKeys = result.keys(); |
| 323 | EXPECT(std::find(resultKeys.begin(), resultKeys.end(), pose_idx) != |
| 324 | resultKeys.end()); |
| 325 | } |
| 326 | |
| 327 | /* ************************************************************************* */ |
| 328 | int main() { TestResult tr; return TestRegistry::runAllTests(result&: tr); } |
| 329 | /* ************************************************************************* */ |
| 330 | |