| 1 | /** |
| 2 | * @file NonlinearClusterTree.h |
| 3 | * @author Frank Dellaert |
| 4 | * @date March, 2016 |
| 5 | */ |
| 6 | |
| 7 | #pragma once |
| 8 | |
| 9 | #include <gtsam/nonlinear/NonlinearFactorGraph.h> |
| 10 | #include <gtsam/linear/GaussianBayesTree.h> |
| 11 | #include <gtsam/inference/ClusterTree.h> |
| 12 | |
| 13 | namespace gtsam { |
| 14 | class NonlinearClusterTree : public ClusterTree<NonlinearFactorGraph> { |
| 15 | public: |
| 16 | NonlinearClusterTree() {} |
| 17 | |
| 18 | struct NonlinearCluster : Cluster { |
| 19 | // Given graph, index, add factors with specified keys into |
| 20 | // Factors are erased in the graph |
| 21 | // TODO(frank): fairly hacky and inefficient. Think about iterating the graph once instead |
| 22 | NonlinearCluster(const VariableIndex& variableIndex, const KeyVector& keys, |
| 23 | NonlinearFactorGraph* graph) { |
| 24 | for (const Key key : keys) { |
| 25 | std::vector<NonlinearFactor::shared_ptr> factors; |
| 26 | for (auto i : variableIndex[key]) |
| 27 | if (graph->at(i)) { |
| 28 | factors.push_back(x: graph->at(i)); |
| 29 | graph->remove(i); |
| 30 | } |
| 31 | Cluster::addFactors(key, factorsToAdd: factors); |
| 32 | } |
| 33 | } |
| 34 | |
| 35 | GaussianFactorGraph::shared_ptr linearize(const Values& values) { |
| 36 | return factors.linearize(linearizationPoint: values); |
| 37 | } |
| 38 | |
| 39 | static NonlinearCluster* DownCast(const std::shared_ptr<Cluster>& cluster) { |
| 40 | auto nonlinearCluster = |
| 41 | std::dynamic_pointer_cast<NonlinearCluster>(r: cluster); |
| 42 | if (!nonlinearCluster) |
| 43 | throw std::runtime_error("Expected NonlinearCluster" ); |
| 44 | return nonlinearCluster.get(); |
| 45 | } |
| 46 | |
| 47 | // linearize local custer factors straight into hessianFactor, which is returned |
| 48 | // If no ordering given, uses colamd |
| 49 | HessianFactor::shared_ptr linearizeToHessianFactor( |
| 50 | const Values& values, |
| 51 | const NonlinearFactorGraph::Dampen& dampen = nullptr) const { |
| 52 | Ordering ordering; |
| 53 | ordering = Ordering::ColamdConstrainedFirst(graph: factors, constrainFirst: orderedFrontalKeys, forceOrder: true); |
| 54 | return factors.linearizeToHessianFactor(values, ordering, dampen); |
| 55 | } |
| 56 | |
| 57 | // linearize local custer factors straight into hessianFactor, which is returned |
| 58 | // If no ordering given, uses colamd |
| 59 | HessianFactor::shared_ptr linearizeToHessianFactor( |
| 60 | const Values& values, const Ordering& ordering, |
| 61 | const NonlinearFactorGraph::Dampen& dampen = nullptr) const { |
| 62 | return factors.linearizeToHessianFactor(values, ordering, dampen); |
| 63 | } |
| 64 | |
| 65 | // Helper function: recursively eliminate subtree rooted at this Cluster into a Bayes net and factor on parent |
| 66 | // TODO(frank): Use TBB to support deep trees and parallelism |
| 67 | std::pair<GaussianBayesNet, HessianFactor::shared_ptr> linearizeAndEliminate( |
| 68 | const Values& values, |
| 69 | const HessianFactor::shared_ptr& localFactor) const { |
| 70 | // Get contributions f(front) from children, as well as p(children|front) |
| 71 | GaussianBayesNet bayesNet; |
| 72 | for (const auto& child : children) { |
| 73 | auto message = DownCast(cluster: child)->linearizeAndEliminate(values, bayesNet: &bayesNet); |
| 74 | message->updateHessian(other: localFactor.get()); |
| 75 | } |
| 76 | auto gaussianConditional = localFactor->eliminateCholesky(keys: orderedFrontalKeys); |
| 77 | bayesNet.add(factor: gaussianConditional); |
| 78 | return {bayesNet, localFactor}; |
| 79 | } |
| 80 | |
| 81 | // Recursively eliminate subtree rooted at this Cluster into a Bayes net and factor on parent |
| 82 | // TODO(frank): Use TBB to support deep trees and parallelism |
| 83 | std::pair<GaussianBayesNet, HessianFactor::shared_ptr> linearizeAndEliminate( |
| 84 | const Values& values, |
| 85 | const NonlinearFactorGraph::Dampen& dampen = nullptr) const { |
| 86 | // Linearize and create HessianFactor f(front,separator) |
| 87 | HessianFactor::shared_ptr localFactor = linearizeToHessianFactor(values, dampen); |
| 88 | return linearizeAndEliminate(values, localFactor); |
| 89 | } |
| 90 | |
| 91 | // Recursively eliminate subtree rooted at this Cluster into a Bayes net and factor on parent |
| 92 | // TODO(frank): Use TBB to support deep trees and parallelism |
| 93 | std::pair<GaussianBayesNet, HessianFactor::shared_ptr> linearizeAndEliminate( |
| 94 | const Values& values, const Ordering& ordering, |
| 95 | const NonlinearFactorGraph::Dampen& dampen = nullptr) const { |
| 96 | // Linearize and create HessianFactor f(front,separator) |
| 97 | HessianFactor::shared_ptr localFactor = linearizeToHessianFactor(values, ordering, dampen); |
| 98 | return linearizeAndEliminate(values, localFactor); |
| 99 | } |
| 100 | |
| 101 | // Recursively eliminate subtree rooted at this Cluster |
| 102 | // Version that updates existing Bayes net and returns a new Hessian factor on parent clique |
| 103 | // It is possible to pass in a nullptr for the bayesNet if only interested in the new factor |
| 104 | HessianFactor::shared_ptr linearizeAndEliminate( |
| 105 | const Values& values, GaussianBayesNet* bayesNet, |
| 106 | const NonlinearFactorGraph::Dampen& dampen = nullptr) const { |
| 107 | auto bayesNet_newFactor_pair = linearizeAndEliminate(values, dampen); |
| 108 | if (bayesNet) { |
| 109 | bayesNet->push_back(container: bayesNet_newFactor_pair.first); |
| 110 | } |
| 111 | return bayesNet_newFactor_pair.second; |
| 112 | } |
| 113 | |
| 114 | // Recursively eliminate subtree rooted at this Cluster |
| 115 | // Version that updates existing Bayes net and returns a new Hessian factor on parent clique |
| 116 | // It is possible to pass in a nullptr for the bayesNet if only interested in the new factor |
| 117 | HessianFactor::shared_ptr linearizeAndEliminate( |
| 118 | const Values& values, GaussianBayesNet* bayesNet, |
| 119 | const Ordering& ordering, |
| 120 | const NonlinearFactorGraph::Dampen& dampen = nullptr) const { |
| 121 | auto bayesNet_newFactor_pair = linearizeAndEliminate(values, ordering, dampen); |
| 122 | if (bayesNet) { |
| 123 | bayesNet->push_back(container: bayesNet_newFactor_pair.first); |
| 124 | } |
| 125 | return bayesNet_newFactor_pair.second; |
| 126 | } |
| 127 | }; |
| 128 | |
| 129 | // Linearize and update linearization point with values |
| 130 | Values updateCholesky(const Values& values) { |
| 131 | GaussianBayesNet bayesNet; |
| 132 | for (const auto& root : roots_) { |
| 133 | auto result = NonlinearCluster::DownCast(cluster: root)->linearizeAndEliminate(values); |
| 134 | bayesNet.push_back(container: result.first); |
| 135 | } |
| 136 | VectorValues delta = bayesNet.optimize(); |
| 137 | return values.retract(delta); |
| 138 | } |
| 139 | }; |
| 140 | } // namespace gtsam |
| 141 | |