From d82fcc0aa901bd47b4ae8a5345b6973a9f140104 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 6 Jan 2023 09:50:47 -0500 Subject: [PATCH 1/7] DefaultOrderingFunc in EliminationTraits --- gtsam/discrete/DiscreteFactorGraph.h | 12 ++++++++++-- .../inference/EliminateableFactorGraph-inst.h | 18 ++++++++++++++++-- gtsam/linear/GaussianFactorGraph.h | 6 ++++++ gtsam/symbolic/SymbolicFactorGraph.h | 6 ++++++ 4 files changed, 38 insertions(+), 4 deletions(-) diff --git a/gtsam/discrete/DiscreteFactorGraph.h b/gtsam/discrete/DiscreteFactorGraph.h index 818eeda4e7..e665ea88b1 100644 --- a/gtsam/discrete/DiscreteFactorGraph.h +++ b/gtsam/discrete/DiscreteFactorGraph.h @@ -62,9 +62,17 @@ template<> struct EliminationTraits typedef DiscreteBayesTree BayesTreeType; ///< Type of Bayes tree typedef DiscreteJunctionTree JunctionTreeType; ///< Type of Junction tree /// The default dense elimination function - static std::pair, boost::shared_ptr > + static std::pair, + boost::shared_ptr > DefaultEliminate(const FactorGraphType& factors, const Ordering& keys) { - return EliminateDiscrete(factors, keys); } + return EliminateDiscrete(factors, keys); + } + /// The default ordering generation function + static Ordering DefaultOrderingFunc( + const FactorGraphType& graph, + boost::optional variableIndex) { + return Ordering::Colamd(*variableIndex); + } }; /* ************************************************************************* */ diff --git a/gtsam/inference/EliminateableFactorGraph-inst.h b/gtsam/inference/EliminateableFactorGraph-inst.h index 35e7505c95..bebce14cd2 100644 --- a/gtsam/inference/EliminateableFactorGraph-inst.h +++ b/gtsam/inference/EliminateableFactorGraph-inst.h @@ -44,9 +44,16 @@ namespace gtsam { if (orderingType == Ordering::METIS) { Ordering computedOrdering = Ordering::Metis(asDerived()); return eliminateSequential(computedOrdering, function, variableIndex); - } else { + } else if (orderingType == Ordering::COLAMD) { Ordering computedOrdering = Ordering::Colamd(*variableIndex); return eliminateSequential(computedOrdering, function, variableIndex); + } else if (orderingType == Ordering::NATURAL) { + Ordering computedOrdering = Ordering::Natural(asDerived()); + return eliminateSequential(computedOrdering, function, variableIndex); + } else { + Ordering computedOrdering = EliminationTraitsType::DefaultOrderingFunc( + asDerived(), variableIndex); + return eliminateSequential(computedOrdering, function, variableIndex); } } } @@ -100,9 +107,16 @@ namespace gtsam { if (orderingType == Ordering::METIS) { Ordering computedOrdering = Ordering::Metis(asDerived()); return eliminateMultifrontal(computedOrdering, function, variableIndex); - } else { + } else if (orderingType == Ordering::COLAMD) { Ordering computedOrdering = Ordering::Colamd(*variableIndex); return eliminateMultifrontal(computedOrdering, function, variableIndex); + } else if (orderingType == Ordering::NATURAL) { + Ordering computedOrdering = Ordering::Natural(asDerived()); + return eliminateMultifrontal(computedOrdering, function, variableIndex); + } else { + Ordering computedOrdering = EliminationTraitsType::DefaultOrderingFunc( + asDerived(), variableIndex); + return eliminateMultifrontal(computedOrdering, function, variableIndex); } } } diff --git a/gtsam/linear/GaussianFactorGraph.h b/gtsam/linear/GaussianFactorGraph.h index 0d5057aa88..c688eb13fd 100644 --- a/gtsam/linear/GaussianFactorGraph.h +++ b/gtsam/linear/GaussianFactorGraph.h @@ -54,6 +54,12 @@ namespace gtsam { static std::pair, boost::shared_ptr > DefaultEliminate(const FactorGraphType& factors, const Ordering& keys) { return EliminatePreferCholesky(factors, keys); } + /// The default ordering generation function + static Ordering DefaultOrderingFunc( + const FactorGraphType& graph, + boost::optional variableIndex) { + return Ordering::Colamd(*variableIndex); + } }; /* ************************************************************************* */ diff --git a/gtsam/symbolic/SymbolicFactorGraph.h b/gtsam/symbolic/SymbolicFactorGraph.h index 36379fd831..8bb75cb97e 100644 --- a/gtsam/symbolic/SymbolicFactorGraph.h +++ b/gtsam/symbolic/SymbolicFactorGraph.h @@ -46,6 +46,12 @@ namespace gtsam { static std::pair, boost::shared_ptr > DefaultEliminate(const FactorGraphType& factors, const Ordering& keys) { return EliminateSymbolic(factors, keys); } + /// The default ordering generation function + static Ordering DefaultOrderingFunc( + const FactorGraphType& graph, + boost::optional variableIndex) { + return Ordering::Colamd(*variableIndex); + } }; /* ************************************************************************* */ From e43fd3e8ca1e4a6d035a48d342e04f66488295cc Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 6 Jan 2023 09:51:20 -0500 Subject: [PATCH 2/7] Make HybridOrdering a function and use it for Hybrid DefaultOrderingFunc --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 30 +++++++++++----------- gtsam/hybrid/HybridGaussianFactorGraph.h | 23 +++++++++++------ 2 files changed, 30 insertions(+), 23 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index f6b713a768..3be438e435 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -58,6 +58,21 @@ namespace gtsam { /// Specialize EliminateableFactorGraph for HybridGaussianFactorGraph: template class EliminateableFactorGraph; +/* ************************************************************************ */ +const Ordering HybridOrdering(const HybridGaussianFactorGraph &graph) { + KeySet discrete_keys = graph.discreteKeys(); + for (auto &factor : graph) { + for (const DiscreteKey &k : factor->discreteKeys()) { + discrete_keys.insert(k.first); + } + } + + const VariableIndex index(graph); + Ordering ordering = Ordering::ColamdConstrainedLast( + index, KeyVector(discrete_keys.begin(), discrete_keys.end()), true); + return ordering; +} + /* ************************************************************************ */ static GaussianFactorGraphTree addGaussian( const GaussianFactorGraphTree &gfgTree, @@ -448,21 +463,6 @@ void HybridGaussianFactorGraph::add(DecisionTreeFactor::shared_ptr factor) { FactorGraph::add(boost::make_shared(factor)); } -/* ************************************************************************ */ -const Ordering HybridGaussianFactorGraph::getHybridOrdering() const { - KeySet discrete_keys = discreteKeys(); - for (auto &factor : factors_) { - for (const DiscreteKey &k : factor->discreteKeys()) { - discrete_keys.insert(k.first); - } - } - - const VariableIndex index(factors_); - Ordering ordering = Ordering::ColamdConstrainedLast( - index, KeyVector(discrete_keys.begin(), discrete_keys.end()), true); - return ordering; -} - /* ************************************************************************ */ AlgebraicDecisionTree HybridGaussianFactorGraph::error( const VectorValues &continuousValues) const { diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 144d144bbd..44ef7d784d 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -53,6 +53,15 @@ GTSAM_EXPORT std::pair, HybridFactor::shared_ptr> EliminateHybrid(const HybridGaussianFactorGraph& factors, const Ordering& keys); +/** + * @brief Return a Colamd constrained ordering where the discrete keys are + * eliminated after the continuous keys. + * + * @return const Ordering + */ +GTSAM_EXPORT const Ordering +HybridOrdering(const HybridGaussianFactorGraph& graph); + /* ************************************************************************* */ template <> struct EliminationTraits { @@ -74,6 +83,12 @@ struct EliminationTraits { DefaultEliminate(const FactorGraphType& factors, const Ordering& keys) { return EliminateHybrid(factors, keys); } + /// The default ordering generation function + static Ordering DefaultOrderingFunc( + const FactorGraphType& graph, + boost::optional variableIndex) { + return HybridOrdering(graph); + } }; /** @@ -228,14 +243,6 @@ class GTSAM_EXPORT HybridGaussianFactorGraph */ double probPrime(const HybridValues& values) const; - /** - * @brief Return a Colamd constrained ordering where the discrete keys are - * eliminated after the continuous keys. - * - * @return const Ordering - */ - const Ordering getHybridOrdering() const; - /** * @brief Create a decision tree of factor graphs out of this hybrid factor * graph. From 74998336d96d7083a287f3b69a770155ed5437c6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 6 Jan 2023 10:12:50 -0500 Subject: [PATCH 3/7] update tests --- gtsam/hybrid/tests/testHybridBayesNet.cpp | 15 +- gtsam/hybrid/tests/testHybridBayesTree.cpp | 15 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 5 +- .../tests/testHybridGaussianFactorGraph.cpp | 38 ++--- gtsam/hybrid/tests/testHybridPruning.cpp | 158 ++++++++++++++++++ .../hybrid/tests/testSerializationHybrid.cpp | 7 +- python/gtsam/tests/test_HybridFactorGraph.py | 66 ++++---- .../tests/test_HybridNonlinearFactorGraph.py | 19 ++- 8 files changed, 220 insertions(+), 103 deletions(-) create mode 100644 gtsam/hybrid/tests/testHybridPruning.cpp diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 0f0a85516d..3badc34a49 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -185,9 +185,8 @@ TEST(HybridBayesNet, OptimizeAssignment) { TEST(HybridBayesNet, Optimize) { Switching s(4, 1.0, 0.1, {0, 1, 2, 3}, "1/1 1/1"); - Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering(); HybridBayesNet::shared_ptr hybridBayesNet = - s.linearizedFactorGraph.eliminateSequential(hybridOrdering); + s.linearizedFactorGraph.eliminateSequential(); HybridValues delta = hybridBayesNet->optimize(); @@ -212,9 +211,8 @@ TEST(HybridBayesNet, Optimize) { TEST(HybridBayesNet, Error) { Switching s(3); - Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering(); HybridBayesNet::shared_ptr hybridBayesNet = - s.linearizedFactorGraph.eliminateSequential(hybridOrdering); + s.linearizedFactorGraph.eliminateSequential(); HybridValues delta = hybridBayesNet->optimize(); auto error_tree = hybridBayesNet->error(delta.continuous()); @@ -266,9 +264,8 @@ TEST(HybridBayesNet, Error) { TEST(HybridBayesNet, Prune) { Switching s(4); - Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering(); HybridBayesNet::shared_ptr hybridBayesNet = - s.linearizedFactorGraph.eliminateSequential(hybridOrdering); + s.linearizedFactorGraph.eliminateSequential(); HybridValues delta = hybridBayesNet->optimize(); @@ -284,9 +281,8 @@ TEST(HybridBayesNet, Prune) { TEST(HybridBayesNet, UpdateDiscreteConditionals) { Switching s(4); - Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering(); HybridBayesNet::shared_ptr hybridBayesNet = - s.linearizedFactorGraph.eliminateSequential(hybridOrdering); + s.linearizedFactorGraph.eliminateSequential(); size_t maxNrLeaves = 3; auto discreteConditionals = hybridBayesNet->discreteConditionals(); @@ -353,8 +349,7 @@ TEST(HybridBayesNet, Sampling) { // Create the factor graph from the nonlinear factor graph. HybridGaussianFactorGraph::shared_ptr fg = nfg.linearize(initial); // Eliminate into BN - Ordering ordering = fg->getHybridOrdering(); - HybridBayesNet::shared_ptr bn = fg->eliminateSequential(ordering); + HybridBayesNet::shared_ptr bn = fg->eliminateSequential(); // Set up sampling std::mt19937_64 gen(11); diff --git a/gtsam/hybrid/tests/testHybridBayesTree.cpp b/gtsam/hybrid/tests/testHybridBayesTree.cpp index b957a67d04..44a9688952 100644 --- a/gtsam/hybrid/tests/testHybridBayesTree.cpp +++ b/gtsam/hybrid/tests/testHybridBayesTree.cpp @@ -37,9 +37,8 @@ using symbol_shorthand::X; TEST(HybridBayesTree, OptimizeMultifrontal) { Switching s(4); - Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering(); HybridBayesTree::shared_ptr hybridBayesTree = - s.linearizedFactorGraph.eliminateMultifrontal(hybridOrdering); + s.linearizedFactorGraph.eliminateMultifrontal(); HybridValues delta = hybridBayesTree->optimize(); VectorValues expectedValues; @@ -203,17 +202,7 @@ TEST(HybridBayesTree, Choose) { GaussianBayesTree gbt = isam.choose(assignment); - Ordering ordering; - ordering += X(0); - ordering += X(1); - ordering += X(2); - ordering += X(3); - ordering += M(0); - ordering += M(1); - ordering += M(2); - - // TODO(Varun) get segfault if ordering not provided - auto bayesTree = s.linearizedFactorGraph.eliminateMultifrontal(ordering); + auto bayesTree = s.linearizedFactorGraph.eliminateMultifrontal(); auto expected_gbt = bayesTree->choose(assignment); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 84f686c599..86cf3fad61 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -90,7 +90,7 @@ TEST(HybridEstimation, Full) { } HybridBayesNet::shared_ptr bayesNet = - graph.eliminateSequential(hybridOrdering); + graph.eliminateSequential(); EXPECT_LONGS_EQUAL(2 * K - 1, bayesNet->size()); @@ -481,8 +481,7 @@ TEST(HybridEstimation, CorrectnessViaSampling) { const auto fg = createHybridGaussianFactorGraph(); // 2. Eliminate into BN - const Ordering ordering = fg->getHybridOrdering(); - const HybridBayesNet::shared_ptr bn = fg->eliminateSequential(ordering); + const HybridBayesNet::shared_ptr bn = fg->eliminateSequential(); // Set up sampling std::mt19937_64 rng(11); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 422cdf64ec..d8bf777622 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -130,8 +130,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt)); - auto result = - hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {M(1)})); + auto result = hfg.eliminateSequential(); auto dc = result->at(2)->asDiscrete(); DiscreteValues dv; @@ -161,8 +160,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { // Joint discrete probability table for c1, c2 hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); - HybridBayesNet::shared_ptr result = hfg.eliminateSequential( - Ordering::ColamdConstrainedLast(hfg, {M(1), M(2)})); + HybridBayesNet::shared_ptr result = hfg.eliminateSequential(); // There are 4 variables (2 continuous + 2 discrete) in the bayes net. EXPECT_LONGS_EQUAL(4, result->size()); @@ -187,8 +185,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { // variable throws segfault // hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); - HybridBayesTree::shared_ptr result = - hfg.eliminateMultifrontal(hfg.getHybridOrdering()); + HybridBayesTree::shared_ptr result = hfg.eliminateMultifrontal(); // The bayes tree should have 3 cliques EXPECT_LONGS_EQUAL(3, result->size()); @@ -218,7 +215,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) { hfg.add(HybridDiscreteFactor(DecisionTreeFactor(m, {2, 8}))); // Get a constrained ordering keeping c1 last - auto ordering_full = hfg.getHybridOrdering(); + auto ordering_full = HybridOrdering(hfg); // Returns a Hybrid Bayes Tree with distribution P(x0|x1)P(x1|c1)P(c1) HybridBayesTree::shared_ptr hbt = hfg.eliminateMultifrontal(ordering_full); @@ -518,8 +515,7 @@ TEST(HybridGaussianFactorGraph, optimize) { hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); - auto result = - hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {C(1)})); + auto result = hfg.eliminateSequential(); HybridValues hv = result->optimize(); @@ -572,9 +568,7 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrime) { HybridGaussianFactorGraph graph = s.linearizedFactorGraph; - Ordering hybridOrdering = graph.getHybridOrdering(); - HybridBayesNet::shared_ptr hybridBayesNet = - graph.eliminateSequential(hybridOrdering); + HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential(); const HybridValues delta = hybridBayesNet->optimize(); const double error = graph.error(delta); @@ -593,9 +587,7 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { HybridGaussianFactorGraph graph = s.linearizedFactorGraph; - Ordering hybridOrdering = graph.getHybridOrdering(); - HybridBayesNet::shared_ptr hybridBayesNet = - graph.eliminateSequential(hybridOrdering); + HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential(); HybridValues delta = hybridBayesNet->optimize(); auto error_tree = graph.error(delta.continuous()); @@ -684,10 +676,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { expectedBayesNet.emplace_back(new DiscreteConditional(mode, "74/26")); // Test elimination - Ordering ordering; - ordering.push_back(X(0)); - ordering.push_back(M(0)); - const auto posterior = fg.eliminateSequential(ordering); + const auto posterior = fg.eliminateSequential(); EXPECT(assert_equal(expectedBayesNet, *posterior, 0.01)); } @@ -719,10 +708,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) { expectedBayesNet.emplace_back(new DiscreteConditional(mode, "23/77")); // Test elimination - Ordering ordering; - ordering.push_back(X(0)); - ordering.push_back(M(0)); - const auto posterior = fg.eliminateSequential(ordering); + const auto posterior = fg.eliminateSequential(); EXPECT(assert_equal(expectedBayesNet, *posterior, 0.01)); } @@ -741,11 +727,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny22) { EXPECT_LONGS_EQUAL(5, fg.size()); // Test elimination - Ordering ordering; - ordering.push_back(X(0)); - ordering.push_back(M(0)); - ordering.push_back(M(1)); - const auto posterior = fg.eliminateSequential(ordering); + const auto posterior = fg.eliminateSequential(); // Compute the log-ratio between the Bayes net and the factor graph. auto compute_ratio = [&](HybridValues *sample) -> double { diff --git a/gtsam/hybrid/tests/testHybridPruning.cpp b/gtsam/hybrid/tests/testHybridPruning.cpp new file mode 100644 index 0000000000..2e564013dd --- /dev/null +++ b/gtsam/hybrid/tests/testHybridPruning.cpp @@ -0,0 +1,158 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridPruning.cpp + * @brief Unit tests for end-to-end Hybrid Estimation + * @author Varun Agrawal + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// Include for test suite +#include + +#include "Switching.h" + +using namespace std; +using namespace gtsam; + +using symbol_shorthand::X; + +/****************************************************************************/ +// Test approximate inference with an additional pruning step. +TEST_DISABLED(HybridPruning, ISAM) { + size_t K = 16; + std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, + 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; + // Ground truth discrete seq + std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, + 1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; + // Switching example of robot moving in 1D + // with given measurements and equal mode priors. + Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); + HybridNonlinearISAM isam; + HybridNonlinearFactorGraph graph; + Values initial; + + // Add the X(0) prior + graph.push_back(switching.nonlinearFactorGraph.at(0)); + initial.insert(X(0), switching.linearizationPoint.at(X(0))); + + HybridGaussianFactorGraph linearized; + HybridGaussianFactorGraph bayesNet; + + for (size_t k = 1; k < K; k++) { + // Motion Model + graph.push_back(switching.nonlinearFactorGraph.at(k)); + // Measurement + graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1)); + + initial.insert(X(k), switching.linearizationPoint.at(X(k))); + + isam.update(graph, initial, 3); + graph.resize(0); + initial.clear(); + } + + Values result = isam.estimate(); + DiscreteValues assignment = isam.assignment(); + + DiscreteValues expected_discrete; + for (size_t k = 0; k < K - 1; k++) { + expected_discrete[M(k)] = discrete_seq[k]; + } + + std::cout << "\n\n\nNonlinear Version!!\n\n" << std::endl; + GTSAM_PRINT(expected_discrete); + GTSAM_PRINT(assignment); + EXPECT(assert_equal(expected_discrete, assignment)); + + Values expected_continuous; + for (size_t k = 0; k < K; k++) { + expected_continuous.insert(X(k), measurements[k]); + } + EXPECT(assert_equal(expected_continuous, result)); +} + +/****************************************************************************/ +// Test approximate inference with an additional pruning step. +TEST(HybridPruning, GaussianISAM) { + size_t K = 16; + std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, + 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; + // Ground truth discrete seq + std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, + 1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; + // Switching example of robot moving in 1D + // with given measurements and equal mode priors. + Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); + HybridGaussianISAM isam; + HybridGaussianFactorGraph graph; + Values initial; + + // Add the X(0) prior + graph.push_back(switching.linearizedFactorGraph.at(0)); + initial.insert(X(0), switching.linearizationPoint.at(X(0))); + + HybridGaussianFactorGraph linearized; + HybridGaussianFactorGraph bayesNet; + + for (size_t k = 1; k < K; k++) { + // Motion Model + graph.push_back(switching.linearizedFactorGraph.at(k)); + // Measurement + graph.push_back(switching.linearizedFactorGraph.at(k + K - 1)); + + // initial.insert(X(k), switching.linearizationPoint.at(X(k))); + + isam.update(graph, 3); + graph.resize(0); + // initial.clear(); + } + + HybridValues values = isam.optimize(); + + DiscreteValues expected_discrete; + for (size_t k = 0; k < K - 1; k++) { + expected_discrete[M(k)] = discrete_seq[k]; + } + + EXPECT(assert_equal(expected_discrete, values.discrete())); + + // Values expected_continuous; + // for (size_t k = 0; k < K; k++) { + // expected_continuous.insert(X(k), measurements[k]); + // } + // EXPECT(assert_equal(expected_continuous, result)); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 941a1cdb3a..94f614bc75 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -150,8 +150,7 @@ TEST(HybridSerialization, GaussianMixture) { // Test HybridBayesNet serialization. TEST(HybridSerialization, HybridBayesNet) { Switching s(2); - Ordering ordering = s.linearizedFactorGraph.getHybridOrdering(); - HybridBayesNet hbn = *(s.linearizedFactorGraph.eliminateSequential(ordering)); + HybridBayesNet hbn = *(s.linearizedFactorGraph.eliminateSequential()); EXPECT(equalsObj(hbn)); EXPECT(equalsXML(hbn)); @@ -162,9 +161,7 @@ TEST(HybridSerialization, HybridBayesNet) { // Test HybridBayesTree serialization. TEST(HybridSerialization, HybridBayesTree) { Switching s(2); - Ordering ordering = s.linearizedFactorGraph.getHybridOrdering(); - HybridBayesTree hbt = - *(s.linearizedFactorGraph.eliminateMultifrontal(ordering)); + HybridBayesTree hbt = *(s.linearizedFactorGraph.eliminateMultifrontal()); EXPECT(equalsObj(hbt)); EXPECT(equalsXML(hbt)); diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 956a6f5720..e40d5bb9f6 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -25,7 +25,6 @@ class TestHybridGaussianFactorGraph(GtsamTestCase): """Unit tests for HybridGaussianFactorGraph.""" - def test_create(self): """Test construction of hybrid factor graph.""" model = noiseModel.Unit.Create(3) @@ -42,9 +41,7 @@ def test_create(self): hfg.push_back(jf2) hfg.push_back(gmf) - hbn = hfg.eliminateSequential( - Ordering.ColamdConstrainedLastHybridGaussianFactorGraph( - hfg, [C(0)])) + hbn = hfg.eliminateSequential() self.assertEqual(hbn.size(), 2) @@ -74,15 +71,14 @@ def test_optimize(self): dtf = gtsam.DecisionTreeFactor([(C(0), 2)], "0 1") hfg.push_back(dtf) - hbn = hfg.eliminateSequential( - Ordering.ColamdConstrainedLastHybridGaussianFactorGraph( - hfg, [C(0)])) + hbn = hfg.eliminateSequential() hv = hbn.optimize() self.assertEqual(hv.atDiscrete(C(0)), 1) @staticmethod - def tiny(num_measurements: int = 1, prior_mean: float = 5.0, + def tiny(num_measurements: int = 1, + prior_mean: float = 5.0, prior_sigma: float = 0.5) -> HybridBayesNet: """ Create a tiny two variable hybrid model which represents @@ -129,20 +125,23 @@ def test_evaluate(self): bayesNet2 = self.tiny(prior_sigma=5.0, num_measurements=1) # bn1: # 1/sqrt(2*pi*0.5^2) # bn2: # 1/sqrt(2*pi*5.0^2) - expected_ratio = np.sqrt(2*np.pi*5.0**2)/np.sqrt(2*np.pi*0.5**2) + expected_ratio = np.sqrt(2 * np.pi * 5.0**2) / np.sqrt( + 2 * np.pi * 0.5**2) mean0 = HybridValues() mean0.insert(X(0), [5.0]) mean0.insert(Z(0), [5.0]) mean0.insert(M(0), 0) self.assertAlmostEqual(bayesNet1.evaluate(mean0) / - bayesNet2.evaluate(mean0), expected_ratio, + bayesNet2.evaluate(mean0), + expected_ratio, delta=1e-9) mean1 = HybridValues() mean1.insert(X(0), [5.0]) mean1.insert(Z(0), [5.0]) mean1.insert(M(0), 1) self.assertAlmostEqual(bayesNet1.evaluate(mean1) / - bayesNet2.evaluate(mean1), expected_ratio, + bayesNet2.evaluate(mean1), + expected_ratio, delta=1e-9) @staticmethod @@ -171,11 +170,13 @@ def factor_graph_from_bayes_net(cls, bayesNet: HybridBayesNet, return fg @classmethod - def estimate_marginals(cls, target, proposal_density: HybridBayesNet, + def estimate_marginals(cls, + target, + proposal_density: HybridBayesNet, N=10000): """Do importance sampling to estimate discrete marginal P(mode).""" # Allocate space for marginals on mode. - marginals = np.zeros((2,)) + marginals = np.zeros((2, )) # Do importance sampling. for s in range(N): @@ -210,14 +211,15 @@ def unnormalized_posterior(x): return bayesNet.evaluate(x) # Create proposal density on (x0, mode), making sure it has same mean: - posterior_information = 1/(prior_sigma**2) + 1/(0.5**2) + posterior_information = 1 / (prior_sigma**2) + 1 / (0.5**2) posterior_sigma = posterior_information**(-0.5) - proposal_density = self.tiny( - num_measurements=0, prior_mean=5.0, prior_sigma=posterior_sigma) + proposal_density = self.tiny(num_measurements=0, + prior_mean=5.0, + prior_sigma=posterior_sigma) # Estimate marginals using importance sampling. - marginals = self.estimate_marginals( - target=unnormalized_posterior, proposal_density=proposal_density) + marginals = self.estimate_marginals(target=unnormalized_posterior, + proposal_density=proposal_density) # print(f"True mode: {values.atDiscrete(M(0))}") # print(f"P(mode=0; Z) = {marginals[0]}") # print(f"P(mode=1; Z) = {marginals[1]}") @@ -230,10 +232,7 @@ def unnormalized_posterior(x): self.assertEqual(fg.size(), 3) # Test elimination. - ordering = gtsam.Ordering() - ordering.push_back(X(0)) - ordering.push_back(M(0)) - posterior = fg.eliminateSequential(ordering) + posterior = fg.eliminateSequential() def true_posterior(x): """Posterior from elimination.""" @@ -241,8 +240,8 @@ def true_posterior(x): return posterior.evaluate(x) # Estimate marginals using importance sampling. - marginals = self.estimate_marginals( - target=true_posterior, proposal_density=proposal_density) + marginals = self.estimate_marginals(target=true_posterior, + proposal_density=proposal_density) # print(f"True mode: {values.atDiscrete(M(0))}") # print(f"P(mode=0; z0) = {marginals[0]}") # print(f"P(mode=1; z0) = {marginals[1]}") @@ -253,8 +252,7 @@ def true_posterior(x): @staticmethod def calculate_ratio(bayesNet: HybridBayesNet, - fg: HybridGaussianFactorGraph, - sample: HybridValues): + fg: HybridGaussianFactorGraph, sample: HybridValues): """Calculate ratio between Bayes net and factor graph.""" return bayesNet.evaluate(sample) / fg.probPrime(sample) if \ fg.probPrime(sample) > 0 else 0 @@ -285,14 +283,15 @@ def unnormalized_posterior(x): return bayesNet.evaluate(x) # Create proposal density on (x0, mode), making sure it has same mean: - posterior_information = 1/(prior_sigma**2) + 2.0/(3.0**2) + posterior_information = 1 / (prior_sigma**2) + 2.0 / (3.0**2) posterior_sigma = posterior_information**(-0.5) - proposal_density = self.tiny( - num_measurements=0, prior_mean=5.0, prior_sigma=posterior_sigma) + proposal_density = self.tiny(num_measurements=0, + prior_mean=5.0, + prior_sigma=posterior_sigma) # Estimate marginals using importance sampling. - marginals = self.estimate_marginals( - target=unnormalized_posterior, proposal_density=proposal_density) + marginals = self.estimate_marginals(target=unnormalized_posterior, + proposal_density=proposal_density) # print(f"True mode: {values.atDiscrete(M(0))}") # print(f"P(mode=0; Z) = {marginals[0]}") # print(f"P(mode=1; Z) = {marginals[1]}") @@ -319,10 +318,7 @@ def unnormalized_posterior(x): self.assertAlmostEqual(ratio, expected_ratio) # Test elimination. - ordering = gtsam.Ordering() - ordering.push_back(X(0)) - ordering.push_back(M(0)) - posterior = fg.eliminateSequential(ordering) + posterior = fg.eliminateSequential() # Calculate ratio between Bayes net probability and the factor graph: expected_ratio = self.calculate_ratio(posterior, fg, values) diff --git a/python/gtsam/tests/test_HybridNonlinearFactorGraph.py b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py index 3ac0d5c6f9..171fae60f2 100644 --- a/python/gtsam/tests/test_HybridNonlinearFactorGraph.py +++ b/python/gtsam/tests/test_HybridNonlinearFactorGraph.py @@ -14,22 +14,27 @@ import unittest -import gtsam import numpy as np from gtsam.symbol_shorthand import C, X from gtsam.utils.test_case import GtsamTestCase +import gtsam + class TestHybridGaussianFactorGraph(GtsamTestCase): """Unit tests for HybridGaussianFactorGraph.""" - def test_nonlinear_hybrid(self): nlfg = gtsam.HybridNonlinearFactorGraph() dk = gtsam.DiscreteKeys() dk.push_back((10, 2)) - nlfg.add(gtsam.BetweenFactorPoint3(1, 2, gtsam.Point3(1, 2, 3), gtsam.noiseModel.Diagonal.Variances([1, 1, 1]))) nlfg.add( - gtsam.PriorFactorPoint3(2, gtsam.Point3(1, 2, 3), gtsam.noiseModel.Diagonal.Variances([0.5, 0.5, 0.5]))) + gtsam.BetweenFactorPoint3( + 1, 2, gtsam.Point3(1, 2, 3), + gtsam.noiseModel.Diagonal.Variances([1, 1, 1]))) + nlfg.add( + gtsam.PriorFactorPoint3( + 2, gtsam.Point3(1, 2, 3), + gtsam.noiseModel.Diagonal.Variances([0.5, 0.5, 0.5]))) nlfg.push_back( gtsam.MixtureFactor([1], dk, [ gtsam.PriorFactorPoint3(1, gtsam.Point3(0, 0, 0), @@ -42,11 +47,7 @@ def test_nonlinear_hybrid(self): values.insert_point3(1, gtsam.Point3(0, 0, 0)) values.insert_point3(2, gtsam.Point3(2, 3, 1)) hfg = nlfg.linearize(values) - o = gtsam.Ordering() - o.push_back(1) - o.push_back(2) - o.push_back(10) - hbn = hfg.eliminateSequential(o) + hbn = hfg.eliminateSequential() hbv = hbn.optimize() self.assertEqual(hbv.atDiscrete(10), 0) From 7133236c65893801d0bf2908c36ed62d8b80840b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 6 Jan 2023 10:19:05 -0500 Subject: [PATCH 4/7] common return statements --- gtsam/inference/EliminateableFactorGraph-inst.h | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/gtsam/inference/EliminateableFactorGraph-inst.h b/gtsam/inference/EliminateableFactorGraph-inst.h index bebce14cd2..9e01d07651 100644 --- a/gtsam/inference/EliminateableFactorGraph-inst.h +++ b/gtsam/inference/EliminateableFactorGraph-inst.h @@ -43,18 +43,15 @@ namespace gtsam { // VariableIndex already here because we computed one if needed in the previous 'if' block. if (orderingType == Ordering::METIS) { Ordering computedOrdering = Ordering::Metis(asDerived()); - return eliminateSequential(computedOrdering, function, variableIndex); } else if (orderingType == Ordering::COLAMD) { Ordering computedOrdering = Ordering::Colamd(*variableIndex); - return eliminateSequential(computedOrdering, function, variableIndex); } else if (orderingType == Ordering::NATURAL) { Ordering computedOrdering = Ordering::Natural(asDerived()); - return eliminateSequential(computedOrdering, function, variableIndex); } else { Ordering computedOrdering = EliminationTraitsType::DefaultOrderingFunc( asDerived(), variableIndex); - return eliminateSequential(computedOrdering, function, variableIndex); } + return eliminateSequential(computedOrdering, function, variableIndex); } } @@ -106,18 +103,15 @@ namespace gtsam { // the previous 'if' block. if (orderingType == Ordering::METIS) { Ordering computedOrdering = Ordering::Metis(asDerived()); - return eliminateMultifrontal(computedOrdering, function, variableIndex); } else if (orderingType == Ordering::COLAMD) { Ordering computedOrdering = Ordering::Colamd(*variableIndex); - return eliminateMultifrontal(computedOrdering, function, variableIndex); } else if (orderingType == Ordering::NATURAL) { Ordering computedOrdering = Ordering::Natural(asDerived()); - return eliminateMultifrontal(computedOrdering, function, variableIndex); } else { Ordering computedOrdering = EliminationTraitsType::DefaultOrderingFunc( asDerived(), variableIndex); - return eliminateMultifrontal(computedOrdering, function, variableIndex); } + return eliminateMultifrontal(computedOrdering, function, variableIndex); } } From 2f6d541656dc60bb2c04ed54775dc33700df67bb Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 6 Jan 2023 10:33:42 -0500 Subject: [PATCH 5/7] Revert "common return statements" This reverts commit 7133236c65893801d0bf2908c36ed62d8b80840b. --- gtsam/inference/EliminateableFactorGraph-inst.h | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/gtsam/inference/EliminateableFactorGraph-inst.h b/gtsam/inference/EliminateableFactorGraph-inst.h index 9e01d07651..bebce14cd2 100644 --- a/gtsam/inference/EliminateableFactorGraph-inst.h +++ b/gtsam/inference/EliminateableFactorGraph-inst.h @@ -43,15 +43,18 @@ namespace gtsam { // VariableIndex already here because we computed one if needed in the previous 'if' block. if (orderingType == Ordering::METIS) { Ordering computedOrdering = Ordering::Metis(asDerived()); + return eliminateSequential(computedOrdering, function, variableIndex); } else if (orderingType == Ordering::COLAMD) { Ordering computedOrdering = Ordering::Colamd(*variableIndex); + return eliminateSequential(computedOrdering, function, variableIndex); } else if (orderingType == Ordering::NATURAL) { Ordering computedOrdering = Ordering::Natural(asDerived()); + return eliminateSequential(computedOrdering, function, variableIndex); } else { Ordering computedOrdering = EliminationTraitsType::DefaultOrderingFunc( asDerived(), variableIndex); + return eliminateSequential(computedOrdering, function, variableIndex); } - return eliminateSequential(computedOrdering, function, variableIndex); } } @@ -103,15 +106,18 @@ namespace gtsam { // the previous 'if' block. if (orderingType == Ordering::METIS) { Ordering computedOrdering = Ordering::Metis(asDerived()); + return eliminateMultifrontal(computedOrdering, function, variableIndex); } else if (orderingType == Ordering::COLAMD) { Ordering computedOrdering = Ordering::Colamd(*variableIndex); + return eliminateMultifrontal(computedOrdering, function, variableIndex); } else if (orderingType == Ordering::NATURAL) { Ordering computedOrdering = Ordering::Natural(asDerived()); + return eliminateMultifrontal(computedOrdering, function, variableIndex); } else { Ordering computedOrdering = EliminationTraitsType::DefaultOrderingFunc( asDerived(), variableIndex); + return eliminateMultifrontal(computedOrdering, function, variableIndex); } - return eliminateMultifrontal(computedOrdering, function, variableIndex); } } From 3f201f3f4b9010c7741c574bc8cf81a68f74c0b0 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 6 Jan 2023 12:18:45 -0500 Subject: [PATCH 6/7] specify ordering to match that of HybridGaussianISAM --- gtsam/hybrid/tests/testHybridBayesTree.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/testHybridBayesTree.cpp b/gtsam/hybrid/tests/testHybridBayesTree.cpp index 44a9688952..08f94d88e0 100644 --- a/gtsam/hybrid/tests/testHybridBayesTree.cpp +++ b/gtsam/hybrid/tests/testHybridBayesTree.cpp @@ -202,7 +202,9 @@ TEST(HybridBayesTree, Choose) { GaussianBayesTree gbt = isam.choose(assignment); - auto bayesTree = s.linearizedFactorGraph.eliminateMultifrontal(); + // Specify ordering so it matches that of HybridGaussianISAM. + Ordering ordering(KeyVector{X(0), X(1), X(2), X(3), M(0), M(1), M(2)}); + auto bayesTree = s.linearizedFactorGraph.eliminateMultifrontal(ordering); auto expected_gbt = bayesTree->choose(assignment); From ec5149265a778554325cf4dfa5bda3285de4d76d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 7 Jan 2023 06:33:30 -0500 Subject: [PATCH 7/7] remove extraneous file --- gtsam/hybrid/tests/testHybridPruning.cpp | 158 ----------------------- 1 file changed, 158 deletions(-) delete mode 100644 gtsam/hybrid/tests/testHybridPruning.cpp diff --git a/gtsam/hybrid/tests/testHybridPruning.cpp b/gtsam/hybrid/tests/testHybridPruning.cpp deleted file mode 100644 index 2e564013dd..0000000000 --- a/gtsam/hybrid/tests/testHybridPruning.cpp +++ /dev/null @@ -1,158 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testHybridPruning.cpp - * @brief Unit tests for end-to-end Hybrid Estimation - * @author Varun Agrawal - */ - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -// Include for test suite -#include - -#include "Switching.h" - -using namespace std; -using namespace gtsam; - -using symbol_shorthand::X; - -/****************************************************************************/ -// Test approximate inference with an additional pruning step. -TEST_DISABLED(HybridPruning, ISAM) { - size_t K = 16; - std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, - 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; - // Ground truth discrete seq - std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, - 1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; - // Switching example of robot moving in 1D - // with given measurements and equal mode priors. - Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); - HybridNonlinearISAM isam; - HybridNonlinearFactorGraph graph; - Values initial; - - // Add the X(0) prior - graph.push_back(switching.nonlinearFactorGraph.at(0)); - initial.insert(X(0), switching.linearizationPoint.at(X(0))); - - HybridGaussianFactorGraph linearized; - HybridGaussianFactorGraph bayesNet; - - for (size_t k = 1; k < K; k++) { - // Motion Model - graph.push_back(switching.nonlinearFactorGraph.at(k)); - // Measurement - graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1)); - - initial.insert(X(k), switching.linearizationPoint.at(X(k))); - - isam.update(graph, initial, 3); - graph.resize(0); - initial.clear(); - } - - Values result = isam.estimate(); - DiscreteValues assignment = isam.assignment(); - - DiscreteValues expected_discrete; - for (size_t k = 0; k < K - 1; k++) { - expected_discrete[M(k)] = discrete_seq[k]; - } - - std::cout << "\n\n\nNonlinear Version!!\n\n" << std::endl; - GTSAM_PRINT(expected_discrete); - GTSAM_PRINT(assignment); - EXPECT(assert_equal(expected_discrete, assignment)); - - Values expected_continuous; - for (size_t k = 0; k < K; k++) { - expected_continuous.insert(X(k), measurements[k]); - } - EXPECT(assert_equal(expected_continuous, result)); -} - -/****************************************************************************/ -// Test approximate inference with an additional pruning step. -TEST(HybridPruning, GaussianISAM) { - size_t K = 16; - std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, - 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; - // Ground truth discrete seq - std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, - 1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; - // Switching example of robot moving in 1D - // with given measurements and equal mode priors. - Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); - HybridGaussianISAM isam; - HybridGaussianFactorGraph graph; - Values initial; - - // Add the X(0) prior - graph.push_back(switching.linearizedFactorGraph.at(0)); - initial.insert(X(0), switching.linearizationPoint.at(X(0))); - - HybridGaussianFactorGraph linearized; - HybridGaussianFactorGraph bayesNet; - - for (size_t k = 1; k < K; k++) { - // Motion Model - graph.push_back(switching.linearizedFactorGraph.at(k)); - // Measurement - graph.push_back(switching.linearizedFactorGraph.at(k + K - 1)); - - // initial.insert(X(k), switching.linearizationPoint.at(X(k))); - - isam.update(graph, 3); - graph.resize(0); - // initial.clear(); - } - - HybridValues values = isam.optimize(); - - DiscreteValues expected_discrete; - for (size_t k = 0; k < K - 1; k++) { - expected_discrete[M(k)] = discrete_seq[k]; - } - - EXPECT(assert_equal(expected_discrete, values.discrete())); - - // Values expected_continuous; - // for (size_t k = 0; k < K; k++) { - // expected_continuous.insert(X(k), measurements[k]); - // } - // EXPECT(assert_equal(expected_continuous, result)); -} - -/* ************************************************************************* */ -int main() { - TestResult tr; - return TestRegistry::runAllTests(tr); -} -/* ************************************************************************* */