From 2927d92a52ceffaa4af1f743f3b58793ced382cd Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 28 May 2022 15:43:37 -0400 Subject: [PATCH 01/74] add HybridNonlinearFactor and nonlinear HybridFactorGraph --- gtsam/hybrid/HybridFactorGraph.h | 108 +++++++++++++++++++++ gtsam/hybrid/HybridNonlinearFactor.cpp | 45 +++++++++ gtsam/hybrid/HybridNonlinearFactor.h | 60 ++++++++++++ gtsam/hybrid/tests/testHybridFactorGraph.h | 50 ++++++++++ 4 files changed, 263 insertions(+) create mode 100644 gtsam/hybrid/HybridFactorGraph.h create mode 100644 gtsam/hybrid/HybridNonlinearFactor.cpp create mode 100644 gtsam/hybrid/HybridNonlinearFactor.h create mode 100644 gtsam/hybrid/tests/testHybridFactorGraph.h diff --git a/gtsam/hybrid/HybridFactorGraph.h b/gtsam/hybrid/HybridFactorGraph.h new file mode 100644 index 0000000000..f415386ea4 --- /dev/null +++ b/gtsam/hybrid/HybridFactorGraph.h @@ -0,0 +1,108 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridFactorGraph.h + * @brief Nonlinear hybrid factor graph that uses type erasure + * @author Varun Agrawal + * @date May 28, 2022 + */ + +#pragma once + +#include +#include +#include +#include + +namespace gtsam { + +/** + * Hybrid Factor Graph + * ----------------------- + * This is the non-linear version of a hybrid factor graph. + * Everything inside needs to be hybrid factor or hybrid conditional. + */ +class HybridFactorGraph : public FactorGraph { + public: + using Base = FactorGraph; + using This = HybridFactorGraph; ///< this class + using shared_ptr = boost::shared_ptr; ///< shared_ptr to This + + using Values = gtsam::Values; ///< backwards compatibility + using Indices = KeyVector; ///> map from keys to values + + /// @name Constructors + /// @{ + + HybridFactorGraph() = default; + + /** + * Implicit copy/downcast constructor to override explicit template container + * constructor. In BayesTree this is used for: + * `cachedSeparatorMarginal_.reset(*separatorMarginal)` + * */ + template + HybridFactorGraph(const FactorGraph& graph) : Base(graph) {} + + /// @} + + // Allow use of selected FactorGraph methods: + using Base::empty; + using Base::reserve; + using Base::size; + using FactorGraph::add; + using Base::operator[]; + + /// Add a Jacobian factor to the factor graph. + void add(JacobianFactor&& factor); + + /// Add a Jacobian factor as a shared ptr. + void add(boost::shared_ptr factor); + + /// Add a DecisionTreeFactor to the factor graph. + void add(DecisionTreeFactor&& factor); + + /// Add a DecisionTreeFactor as a shared ptr. + void add(boost::shared_ptr factor); + + /** + * @brief Linearize all the continuous factors in the + * NonlinearHybridFactorGraph. + * + * @param continuousValues: Dictionary of continuous values. + * @return GaussianHybridFactorGraph::shared_ptr + */ + GaussianHybridFactorGraph::shared_ptr linearize( + const Values& continuousValues) const { + // create an empty linear FG + GaussianHybridFactorGraph::shared_ptr linearFG = + boost::make_shared(); + + linearFG->reserve(size()); + + // linearize all factors + for (const sharedFactor& factor : factors_) { + if (factor) { + if (auto nf = boost::dynamic_pointer_cast) { + (*linearFG) += factor->linearize(linearizationPoint); + } else if (auto hf = boost::dynamic_pointer_cast) { + if (hf->isContinuous()) { + } + } + + } else + (*linearFG) += GaussianFactor::shared_ptr(); + } + } +}; + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp new file mode 100644 index 0000000000..ee6e76431e --- /dev/null +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -0,0 +1,45 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridNonlinearFactor.cpp + * @date May 28, 2022 + * @author Varun Agrawal + */ + +#include + +#include + +namespace gtsam { + +/* ************************************************************************* */ +HybridNonlinearFactor::HybridNonlinearFactor(NonlinearFactor::shared_ptr other) + : Base(other->keys()), inner_(other) {} + +/* ************************************************************************* */ +HybridNonlinearFactor::HybridNonlinearFactor(NonlinearFactor &&nf) + : Base(nf.keys()), + inner_(boost::make_shared(std::move(nf))) {} + +/* ************************************************************************* */ +bool HybridNonlinearFactor::equals(const HybridFactor &lf, double tol) const { + return Base(lf, tol); +} + +/* ************************************************************************* */ +void HybridNonlinearFactor::print(const std::string &s, + const KeyFormatter &formatter) const { + HybridFactor::print(s, formatter); + inner_->print("inner: ", formatter); +}; + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h new file mode 100644 index 0000000000..74feb05312 --- /dev/null +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -0,0 +1,60 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridNonlinearFactor.h + * @date May 28, 2022 + * @author Varun Agrawal + */ + +#pragma once + +#include +#include + +namespace gtsam { + +/** + * A HybridNonlinearFactor is a layer over NonlinearFactor so that we do not + * have a diamond inheritance. + */ +class HybridNonlinearFactor : public HybridFactor { + private: + NonlinearFactor::shared_ptr inner_; + + public: + using Base = HybridFactor; + using This = HybridNonlinearFactor; + using shared_ptr = boost::shared_ptr; + + // Explicit conversion from a shared ptr of GF + explicit HybridNonlinearFactor(NonlinearFactor::shared_ptr other); + + // Forwarding constructor from concrete NonlinearFactor + explicit HybridNonlinearFactor(NonlinearFactor &&jf); + + public: + /// @name Testable + /// @{ + + /// Check equality. + virtual bool equals(const HybridFactor &lf, double tol) const override; + + /// GTSAM print utility. + void print( + const std::string &s = "HybridNonlinearFactor\n", + const KeyFormatter &formatter = DefaultKeyFormatter) const override; + + /// @} + + NonlinearFactor::shared_ptr inner() const { return inner_; } +}; +} // namespace gtsam diff --git a/gtsam/hybrid/tests/testHybridFactorGraph.h b/gtsam/hybrid/tests/testHybridFactorGraph.h new file mode 100644 index 0000000000..284e996ce6 --- /dev/null +++ b/gtsam/hybrid/tests/testHybridFactorGraph.h @@ -0,0 +1,50 @@ +/* ---------------------------------------------------------------------------- + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + * See LICENSE for the license information + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridFactorGraph.cpp + * @brief Unit tests for HybridFactorGraph + * @author Varun Agrawal + * @date May 2021 + */ + +#include +#include +#include + +/* **************************************************************************** + * Test that any linearizedFactorGraph gaussian factors are appended to the + * existing gaussian factor graph in the hybrid factor graph. + */ +TEST(HybridFactorGraph, GaussianFactorGraph) { + // Initialize the hybrid factor graph + HybridFactorGraph fg; + + // Add a simple prior factor to the nonlinear factor graph + fg.emplace_shared>(X(0), 0, Isotropic::Sigma(1, 0.1)); + + // Add a linear factor to the nonlinear factor graph + fg.add(X(0), I_1x1, Vector1(5)); + + // Linearization point + Values linearizationPoint; + linearizationPoint.insert(X(0), 0); + + GaussianHybridFactorGraph ghfg = fg.linearize(linearizationPoint); + + // ghfg.push_back(ghfg.gaussianGraph().begin(), ghfg.gaussianGraph().end()); + + // EXPECT_LONGS_EQUAL(2, dcmfg.gaussianGraph().size()); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ From 78ea90bb27ea338f445100e7eabce390675747a3 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 28 May 2022 18:44:09 -0400 Subject: [PATCH 02/74] Add MixtureFactor for nonlinear factor types --- gtsam/hybrid/HybridNonlinearFactor.cpp | 7 +- gtsam/hybrid/HybridNonlinearFactor.h | 3 - gtsam/hybrid/MixtureFactor.h | 240 +++++++++++++++++++++++++ 3 files changed, 241 insertions(+), 9 deletions(-) create mode 100644 gtsam/hybrid/MixtureFactor.h diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp index ee6e76431e..69a9767900 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.cpp +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -25,14 +25,9 @@ namespace gtsam { HybridNonlinearFactor::HybridNonlinearFactor(NonlinearFactor::shared_ptr other) : Base(other->keys()), inner_(other) {} -/* ************************************************************************* */ -HybridNonlinearFactor::HybridNonlinearFactor(NonlinearFactor &&nf) - : Base(nf.keys()), - inner_(boost::make_shared(std::move(nf))) {} - /* ************************************************************************* */ bool HybridNonlinearFactor::equals(const HybridFactor &lf, double tol) const { - return Base(lf, tol); + return Base::equals(lf, tol); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 74feb05312..53d7bfbf47 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -38,9 +38,6 @@ class HybridNonlinearFactor : public HybridFactor { // Explicit conversion from a shared ptr of GF explicit HybridNonlinearFactor(NonlinearFactor::shared_ptr other); - // Forwarding constructor from concrete NonlinearFactor - explicit HybridNonlinearFactor(NonlinearFactor &&jf); - public: /// @name Testable /// @{ diff --git a/gtsam/hybrid/MixtureFactor.h b/gtsam/hybrid/MixtureFactor.h new file mode 100644 index 0000000000..556402058e --- /dev/null +++ b/gtsam/hybrid/MixtureFactor.h @@ -0,0 +1,240 @@ +/* ---------------------------------------------------------------------------- + * Copyright 2020 The Ambitious Folks of the MRG + * See LICENSE for the license information + * -------------------------------------------------------------------------- */ + +/** + * @file MixtureFactor.h + * @brief Nonlinear Mixture factor of continuous and discrete. + * @author Kevin Doherty, kdoherty@mit.edu + * @author Varun Agrawal + * @date December 2021 + */ + +#pragma once + +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +namespace gtsam { + +/** + * @brief Implementation of a discrete conditional mixture factor. Implements a + * joint discrete-continuous factor where the discrete variable serves to + * "select" a mixture component corresponding to a NonlinearFactor type + * of measurement. + */ +template +class MixtureFactor : public HybridFactor { + public: + using Base = HybridFactor; + using This = MixtureFactor; + using shared_ptr = boost::shared_ptr; + using sharedFactor = boost::shared_ptr; + + /// typedef for DecisionTree which has Keys as node labels and + /// NonlinearFactorType as leaf nodes. + using Factors = DecisionTree; + + private: + /// Decision tree of Gaussian factors indexed by discrete keys. + Factors factors_; + bool normalized_; + + public: + MixtureFactor() = default; + + /** + * @brief Construct from Decision tree. + * + * @param keys Vector of keys for continuous factors. + * @param discreteKeys Vector of discrete keys. + * @param factors Decision tree with of shared factors. + * @param normalized Flag indicating if the factor error is already + * normalized. + */ + MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, + const Factors& factors, bool normalized = false) + : Base(keys, discreteKeys), factors_(factors), normalized_(normalized) {} + + /** + * @brief Convenience constructor that generates the underlying factor + * decision tree for us. + * + * Here it is important that the vector of factors has the correct number of + * elements based on the number of discrete keys and the cardinality of the + * keys, so that the decision tree is constructed appropriately. + * + * @param keys Vector of keys for continuous factors. + * @param discreteKeys Vector of discrete keys. + * @param factors Vector of shared pointers to factors. + * @param normalized Flag indicating if the factor error is already + * normalized. + */ + MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, + const std::vector& factors, + bool normalized = false) + : MixtureFactor(keys, discreteKeys, Factors(discreteKeys, factors), + normalized) {} + + ~MixtureFactor() = default; + + /** + * @brief Compute error of factor given both continuous and discrete values. + * + * @param continuousVals The continuous Values. + * @param discreteVals The discrete Values. + * @return double The error of this factor. + */ + double error(const Values& continuousVals, + const DiscreteValues& discreteVals) const { + // Retrieve the factor corresponding to the assignment in discreteVals. + auto factor = factors_(discreteVals); + // Compute the error for the selected factor + const double factorError = factor->error(continuousVals); + if (normalized_) return factorError; + return factorError + + this->nonlinearFactorLogNormalizingConstant(*factor, continuousVals); + } + + size_t dim() const { + // TODO(Varun) + throw std::runtime_error("MixtureFactor::dim not implemented."); + } + + /// Testable + /// @{ + + /// print to stdout + void print( + const std::string& s = "MixtureFactor", + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override { + std::cout << (s.empty() ? "" : s + " "); + std::cout << "("; + auto contKeys = keys(); + auto dKeys = discreteKeys(); + for (DiscreteKey key : dKeys) { + auto it = std::find(contKeys.begin(), contKeys.end(), key.first); + contKeys.erase(it); + } + for (Key key : contKeys) { + std::cout << " " << keyFormatter(key); + } + std::cout << ";"; + for (DiscreteKey key : dKeys) { + std::cout << " " << keyFormatter(key.first); + } + std::cout << " ) \n"; + auto valueFormatter = [](const sharedFactor& v) { + if (v) { + return (boost::format("Nonlinear factor on %d keys") % v->size()).str(); + } else { + return std::string("nullptr"); + } + }; + factors_.print("", keyFormatter, valueFormatter); + } + + /// Check equality + bool equals(const HybridFactor& other, double tol = 1e-9) const override { + // We attempt a dynamic cast from HybridFactor to MixtureFactor. If it + // fails, return false. + if (!dynamic_cast(&other)) return false; + + // If the cast is successful, we'll properly construct a MixtureFactor + // object from `other` + const MixtureFactor& f(static_cast(other)); + + // Ensure that this MixtureFactor and `f` have the same `factors_`. + auto compare = [tol](const sharedFactor& a, const sharedFactor& b) { + return traits::Equals(*a, *b, tol); + }; + if (!factors_.equals(f.factors_, compare)) return false; + + // If everything above passes, and the keys_, discreteKeys_ and normalized_ + // member variables are identical, return true. + return (std::equal(keys_.begin(), keys_.end(), f.keys().begin()) && + (discreteKeys_ == f.discreteKeys_) && + (normalized_ == f.normalized_)); + } + + /// @} + + /// Linearize specific nonlinear factors based on the assignment in + /// discreteValues. + GaussianFactor::shared_ptr linearize( + const Values& continuousVals, const DiscreteValues& discreteVals) const { + auto factor = factors_(discreteVals); + return factor->linearize(continuousVals); + } + + /// Linearize all the continuous factors to get a GaussianMixtureFactor. + boost::shared_ptr linearize( + const Values& continuousVals) const { + // functional to linearize each factor in the decision tree + auto linearizeDT = [continuousVals](const sharedFactor& factor) { + return factor->linearize(continuousVals); + }; + + DecisionTree linearized_factors( + factors_, linearizeDT); + + return boost::make_shared(keys_, discreteKeys_, + linearized_factors); + } + + /** + * If the component factors are not already normalized, we want to compute + * their normalizing constants so that the resulting joint distribution is + * appropriately computed. Remember, this is the _negative_ normalizing + * constant for the measurement likelihood (since we are minimizing the + * _negative_ log-likelihood). + */ + double nonlinearFactorLogNormalizingConstant( + const NonlinearFactorType& factor, const Values& values) const { + // Information matrix (inverse covariance matrix) for the factor. + Matrix infoMat; + + // NOTE: This is sloppy (and mallocs!), is there a cleaner way? + auto factorPtr = boost::make_shared(factor); + + // If this is a NoiseModelFactor, we'll use its noiseModel to + // otherwise noiseModelFactor will be nullptr + auto noiseModelFactor = + boost::dynamic_pointer_cast(factorPtr); + if (noiseModelFactor) { + // If dynamic cast to NoiseModelFactor succeeded, see if the noise model + // is Gaussian + auto noiseModel = noiseModelFactor->noiseModel(); + + auto gaussianNoiseModel = + boost::dynamic_pointer_cast(noiseModel); + if (gaussianNoiseModel) { + // If the noise model is Gaussian, retrieve the information matrix + infoMat = gaussianNoiseModel->information(); + } else { + // If the factor is not a Gaussian factor, we'll linearize it to get + // something with a normalized noise model + // TODO(kevin): does this make sense to do? I think maybe not in + // general? Should we just yell at the user? + auto gaussianFactor = factor.linearize(values); + infoMat = gaussianFactor->information(); + } + } + + // Compute the (negative) log of the normalizing constant + return -(factor.dim() * log(2.0 * M_PI) / 2.0) - + (log(infoMat.determinant()) / 2.0); + } +}; + +} // namespace gtsam From e91a35453a5fdbeb26b9a57c405de3a5a1e94898 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 28 May 2022 20:44:30 -0400 Subject: [PATCH 03/74] convert to cpp --- .../{testHybridFactorGraph.h => testHybridFactorGraph.cpp} | 7 +++++++ 1 file changed, 7 insertions(+) rename gtsam/hybrid/tests/{testHybridFactorGraph.h => testHybridFactorGraph.cpp} (91%) diff --git a/gtsam/hybrid/tests/testHybridFactorGraph.h b/gtsam/hybrid/tests/testHybridFactorGraph.cpp similarity index 91% rename from gtsam/hybrid/tests/testHybridFactorGraph.h rename to gtsam/hybrid/tests/testHybridFactorGraph.cpp index 284e996ce6..36f5f38ae6 100644 --- a/gtsam/hybrid/tests/testHybridFactorGraph.h +++ b/gtsam/hybrid/tests/testHybridFactorGraph.cpp @@ -13,9 +13,14 @@ * @date May 2021 */ +#include +#include #include #include #include +#include + +using namespace gtsam; /* **************************************************************************** * Test that any linearizedFactorGraph gaussian factors are appended to the @@ -37,6 +42,8 @@ TEST(HybridFactorGraph, GaussianFactorGraph) { GaussianHybridFactorGraph ghfg = fg.linearize(linearizationPoint); + EXPECT_LONGS_EQUAL(ghfg); + // ghfg.push_back(ghfg.gaussianGraph().begin(), ghfg.gaussianGraph().end()); // EXPECT_LONGS_EQUAL(2, dcmfg.gaussianGraph().size()); From 9cbd2ef477f40f47ba9281cd2a09aaf1af853eb7 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 29 May 2022 12:44:56 -0400 Subject: [PATCH 04/74] Base Hybrid Factor Graph --- gtsam/hybrid/HybridFactorGraph.h | 105 +++++++++++++++++++++---------- 1 file changed, 72 insertions(+), 33 deletions(-) diff --git a/gtsam/hybrid/HybridFactorGraph.h b/gtsam/hybrid/HybridFactorGraph.h index f415386ea4..f19eb0faff 100644 --- a/gtsam/hybrid/HybridFactorGraph.h +++ b/gtsam/hybrid/HybridFactorGraph.h @@ -20,15 +20,19 @@ #include #include +#include +#include #include #include +#include +#include namespace gtsam { /** * Hybrid Factor Graph * ----------------------- - * This is the non-linear version of a hybrid factor graph. + * This is the base hybrid factor graph. * Everything inside needs to be hybrid factor or hybrid conditional. */ class HybridFactorGraph : public FactorGraph { @@ -40,9 +44,22 @@ class HybridFactorGraph : public FactorGraph { using Values = gtsam::Values; ///< backwards compatibility using Indices = KeyVector; ///> map from keys to values + protected: + /// Check if FACTOR type is derived from DiscreteFactor. + template + using IsDiscrete = typename std::enable_if< + std::is_base_of::value>::type; + + /// Check if FACTOR type is derived from HybridFactor. + template + using IsHybrid = typename std::enable_if< + std::is_base_of::value>::type; + + public: /// @name Constructors /// @{ + /// Default constructor HybridFactorGraph() = default; /** @@ -61,46 +78,68 @@ class HybridFactorGraph : public FactorGraph { using Base::size; using FactorGraph::add; using Base::operator[]; + using Base::resize; + + /** + * Add a discrete factor *pointer* to the internal discrete graph + * @param discreteFactor - boost::shared_ptr to the factor to add + */ + template + IsDiscrete push_discrete( + const boost::shared_ptr& discreteFactor) { + Base::push_back(boost::make_shared(discreteFactor)); + } - /// Add a Jacobian factor to the factor graph. - void add(JacobianFactor&& factor); + /** + * Add a discrete-continuous (Hybrid) factor *pointer* to the graph + * @param hybridFactor - boost::shared_ptr to the factor to add + */ + template + IsHybrid push_hybrid(const boost::shared_ptr& hybridFactor) { + Base::push_back(boost::make_shared(factor)); + } - /// Add a Jacobian factor as a shared ptr. - void add(boost::shared_ptr factor); + /// delete emplace_shared. + template + void emplace_shared(Args&&... args) = delete; - /// Add a DecisionTreeFactor to the factor graph. - void add(DecisionTreeFactor&& factor); + /// Construct a factor and add (shared pointer to it) to factor graph. + template + IsDiscrete emplace_discrete(Args&&... args) { + auto factor = boost::allocate_shared( + Eigen::aligned_allocator(), std::forward(args)...); + push_discrete(factor); + } - /// Add a DecisionTreeFactor as a shared ptr. - void add(boost::shared_ptr factor); + /// Construct a factor and add (shared pointer to it) to factor graph. + template + IsHybrid emplace_hybrid(Args&&... args) { + auto factor = boost::allocate_shared( + Eigen::aligned_allocator(), std::forward(args)...); + push_hybrid(factor); + } /** - * @brief Linearize all the continuous factors in the - * NonlinearHybridFactorGraph. + * @brief Add a single factor shared pointer to the hybrid factor graph. + * Dynamically handles the factor type and assigns it to the correct + * underlying container. * - * @param continuousValues: Dictionary of continuous values. - * @return GaussianHybridFactorGraph::shared_ptr + * @param sharedFactor The factor to add to this factor graph. */ - GaussianHybridFactorGraph::shared_ptr linearize( - const Values& continuousValues) const { - // create an empty linear FG - GaussianHybridFactorGraph::shared_ptr linearFG = - boost::make_shared(); - - linearFG->reserve(size()); - - // linearize all factors - for (const sharedFactor& factor : factors_) { - if (factor) { - if (auto nf = boost::dynamic_pointer_cast) { - (*linearFG) += factor->linearize(linearizationPoint); - } else if (auto hf = boost::dynamic_pointer_cast) { - if (hf->isContinuous()) { - } - } - - } else - (*linearFG) += GaussianFactor::shared_ptr(); + void push_back(const SharedFactor& sharedFactor) { + if (auto p = boost::dynamic_pointer_cast(sharedFactor)) { + push_discrete(p); + } + if (auto p = boost::dynamic_pointer_cast(sharedFactor)) { + push_hybrid(p); + } + } + + /** Constructor from iterator over factors (shared_ptr or plain objects) */ + template + void push_back(ITERATOR firstFactor, ITERATOR lastFactor) { + for (auto&& it = firstFactor; it != lastFactor; it++) { + push_back(*it); } } }; From 01b9a65e1e522d75db2a3dd69e31a5115024183a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 29 May 2022 12:47:45 -0400 Subject: [PATCH 05/74] make GaussianMixtureFactor a subclass of HybridGaussianFactor --- gtsam/hybrid/GaussianMixtureFactor.h | 8 ++++---- gtsam/hybrid/HybridGaussianFactor.h | 10 ++++++++-- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index b2fbe4aefd..bd2e079cba 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -22,7 +22,7 @@ #include #include -#include +#include #include namespace gtsam { @@ -40,9 +40,9 @@ using GaussianFactorVector = std::vector; * of discrete variables indexes to the continuous gaussian distribution. * */ -class GaussianMixtureFactor : public HybridFactor { +class GaussianMixtureFactor : public HybridGaussianFactor { public: - using Base = HybridFactor; + using Base = HybridGaussianFactor; using This = GaussianMixtureFactor; using shared_ptr = boost::shared_ptr; @@ -93,7 +93,7 @@ class GaussianMixtureFactor : public HybridFactor { bool equals(const HybridFactor &lf, double tol = 1e-9) const override; void print( - const std::string &s = "HybridFactor\n", + const std::string &s = "GaussianMixtureFactor\n", const KeyFormatter &formatter = DefaultKeyFormatter) const override; /// @} diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 8d457e7783..4c8ede12cc 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -27,7 +27,7 @@ namespace gtsam { * A HybridGaussianFactor is a layer over GaussianFactor so that we do not have * a diamond inheritance. */ -class HybridGaussianFactor : public HybridFactor { +class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { private: GaussianFactor::shared_ptr inner_; @@ -36,6 +36,12 @@ class HybridGaussianFactor : public HybridFactor { using This = HybridGaussianFactor; using shared_ptr = boost::shared_ptr; + HybridGaussianFactor() = default; + + HybridGaussianFactor(const KeyVector &continuousKeys, + const DiscreteKeys &discreteKeys) + : Base(continuousKeys, discreteKeys) {} + // Explicit conversion from a shared ptr of GF explicit HybridGaussianFactor(GaussianFactor::shared_ptr other); @@ -51,7 +57,7 @@ class HybridGaussianFactor : public HybridFactor { /// GTSAM print utility. void print( - const std::string &s = "HybridFactor\n", + const std::string &s = "HybridGaussianFactor\n", const KeyFormatter &formatter = DefaultKeyFormatter) const override; /// @} From fe0d666b3a56d62daf44689eb01a1ceb4a8be8a1 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 29 May 2022 13:15:04 -0400 Subject: [PATCH 06/74] HybridFactorGraph fixes --- gtsam/hybrid/HybridFactorGraph.h | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/HybridFactorGraph.h b/gtsam/hybrid/HybridFactorGraph.h index f19eb0faff..ef27caec8a 100644 --- a/gtsam/hybrid/HybridFactorGraph.h +++ b/gtsam/hybrid/HybridFactorGraph.h @@ -18,17 +18,17 @@ #pragma once -#include +#include +#include #include -#include -#include #include #include -#include #include namespace gtsam { +using SharedFactor = boost::shared_ptr; + /** * Hybrid Factor Graph * ----------------------- @@ -96,7 +96,7 @@ class HybridFactorGraph : public FactorGraph { */ template IsHybrid push_hybrid(const boost::shared_ptr& hybridFactor) { - Base::push_back(boost::make_shared(factor)); + Base::push_back(hybridFactor); } /// delete emplace_shared. From cdd030b88b71f5e46132c25fb232c84028132ea7 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 29 May 2022 13:32:21 -0400 Subject: [PATCH 07/74] Make MixtureFactor only work with NonlinearFactors and make some improvements --- gtsam/hybrid/MixtureFactor.h | 42 +++++++++++++++++++----------------- 1 file changed, 22 insertions(+), 20 deletions(-) diff --git a/gtsam/hybrid/MixtureFactor.h b/gtsam/hybrid/MixtureFactor.h index 556402058e..b2423d20eb 100644 --- a/gtsam/hybrid/MixtureFactor.h +++ b/gtsam/hybrid/MixtureFactor.h @@ -15,7 +15,7 @@ #include #include -#include +#include #include #include @@ -28,21 +28,27 @@ namespace gtsam { /** - * @brief Implementation of a discrete conditional mixture factor. Implements a - * joint discrete-continuous factor where the discrete variable serves to - * "select" a mixture component corresponding to a NonlinearFactor type - * of measurement. + * @brief Implementation of a discrete conditional mixture factor. + * + * Implements a joint discrete-continuous factor where the discrete variable + * serves to "select" a mixture component corresponding to a NonlinearFactor + * type of measurement. + * + * This class stores all factors as HybridFactors which can then be typecast to + * one of (NonlinearFactor, GaussianFactor) which can then be checked to perform + * the correct operation. */ -template -class MixtureFactor : public HybridFactor { +class MixtureFactor : public HybridNonlinearFactor { public: using Base = HybridFactor; using This = MixtureFactor; using shared_ptr = boost::shared_ptr; - using sharedFactor = boost::shared_ptr; + using sharedFactor = boost::shared_ptr; - /// typedef for DecisionTree which has Keys as node labels and - /// NonlinearFactorType as leaf nodes. + /** + * @brief typedef for DecisionTree which has Keys as node labels and + * NonlinearFactor as leaf nodes. + */ using Factors = DecisionTree; private: @@ -103,7 +109,7 @@ class MixtureFactor : public HybridFactor { const double factorError = factor->error(continuousVals); if (normalized_) return factorError; return factorError + - this->nonlinearFactorLogNormalizingConstant(*factor, continuousVals); + this->nonlinearFactorLogNormalizingConstant(factor, continuousVals); } size_t dim() const { @@ -156,7 +162,7 @@ class MixtureFactor : public HybridFactor { // Ensure that this MixtureFactor and `f` have the same `factors_`. auto compare = [tol](const sharedFactor& a, const sharedFactor& b) { - return traits::Equals(*a, *b, tol); + return traits::Equals(*a, *b, tol); }; if (!factors_.equals(f.factors_, compare)) return false; @@ -199,19 +205,15 @@ class MixtureFactor : public HybridFactor { * constant for the measurement likelihood (since we are minimizing the * _negative_ log-likelihood). */ - double nonlinearFactorLogNormalizingConstant( - const NonlinearFactorType& factor, const Values& values) const { + double nonlinearFactorLogNormalizingConstant(const sharedFactor& factor, + const Values& values) const { // Information matrix (inverse covariance matrix) for the factor. Matrix infoMat; - // NOTE: This is sloppy (and mallocs!), is there a cleaner way? - auto factorPtr = boost::make_shared(factor); - // If this is a NoiseModelFactor, we'll use its noiseModel to // otherwise noiseModelFactor will be nullptr - auto noiseModelFactor = - boost::dynamic_pointer_cast(factorPtr); - if (noiseModelFactor) { + if (auto noiseModelFactor = + boost::dynamic_pointer_cast(factor);) { // If dynamic cast to NoiseModelFactor succeeded, see if the noise model // is Gaussian auto noiseModel = noiseModelFactor->noiseModel(); From 08fab8a9362d4cc12365afb76233eae85fa2db6c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 29 May 2022 13:33:04 -0400 Subject: [PATCH 08/74] HybridNonlinearFactor linearize method --- gtsam/hybrid/HybridNonlinearFactor.h | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 53d7bfbf47..504992e22d 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -18,6 +18,7 @@ #pragma once #include +#include #include namespace gtsam { @@ -53,5 +54,10 @@ class HybridNonlinearFactor : public HybridFactor { /// @} NonlinearFactor::shared_ptr inner() const { return inner_; } + + /// Linearize to a HybridGaussianFactor at the linearization point `c`. + boost::shared_ptr linearize(const Values &c) const { + return boost::make_shared(inner_->linearize(c)); + } }; } // namespace gtsam From 9279bd607648098043e8c1f60f533e0a31b4412c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 29 May 2022 13:33:19 -0400 Subject: [PATCH 09/74] push_back for GaussianHybridFactor --- gtsam/hybrid/GaussianHybridFactorGraph.h | 1 + 1 file changed, 1 insertion(+) diff --git a/gtsam/hybrid/GaussianHybridFactorGraph.h b/gtsam/hybrid/GaussianHybridFactorGraph.h index c8e0718fc4..23d5918d46 100644 --- a/gtsam/hybrid/GaussianHybridFactorGraph.h +++ b/gtsam/hybrid/GaussianHybridFactorGraph.h @@ -100,6 +100,7 @@ class GaussianHybridFactorGraph /// @} using FactorGraph::add; + using FactorGraph::push_back; /// Add a Jacobian factor to the factor graph. void add(JacobianFactor&& factor); From 3274cb12a411260e2d3ab1640362636eb651ed0b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 29 May 2022 13:33:56 -0400 Subject: [PATCH 10/74] clean up testHybridFactorGraph, need to add more tests --- gtsam/hybrid/tests/testHybridFactorGraph.cpp | 26 ++++++-------------- 1 file changed, 7 insertions(+), 19 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridFactorGraph.cpp b/gtsam/hybrid/tests/testHybridFactorGraph.cpp index 36f5f38ae6..f5b4ec0b19 100644 --- a/gtsam/hybrid/tests/testHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridFactorGraph.cpp @@ -18,35 +18,23 @@ #include #include #include +#include #include +using namespace std; using namespace gtsam; +using noiseModel::Isotropic; +using symbol_shorthand::L; +using symbol_shorthand::M; +using symbol_shorthand::X; /* **************************************************************************** * Test that any linearizedFactorGraph gaussian factors are appended to the * existing gaussian factor graph in the hybrid factor graph. */ -TEST(HybridFactorGraph, GaussianFactorGraph) { +TEST(HybridFactorGraph, Constructor) { // Initialize the hybrid factor graph HybridFactorGraph fg; - - // Add a simple prior factor to the nonlinear factor graph - fg.emplace_shared>(X(0), 0, Isotropic::Sigma(1, 0.1)); - - // Add a linear factor to the nonlinear factor graph - fg.add(X(0), I_1x1, Vector1(5)); - - // Linearization point - Values linearizationPoint; - linearizationPoint.insert(X(0), 0); - - GaussianHybridFactorGraph ghfg = fg.linearize(linearizationPoint); - - EXPECT_LONGS_EQUAL(ghfg); - - // ghfg.push_back(ghfg.gaussianGraph().begin(), ghfg.gaussianGraph().end()); - - // EXPECT_LONGS_EQUAL(2, dcmfg.gaussianGraph().size()); } /* ************************************************************************* */ From 6c36b2c35598790d3e181b8fa5a87064388507a7 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 29 May 2022 14:51:30 -0400 Subject: [PATCH 11/74] GaussianHybridFactorGraph inherits from HybridFactorGraph --- gtsam/hybrid/GaussianHybridFactorGraph.h | 14 ++++++++++---- gtsam/hybrid/HybridFactorGraph.h | 3 ++- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/GaussianHybridFactorGraph.h b/gtsam/hybrid/GaussianHybridFactorGraph.h index 23d5918d46..341a0838e5 100644 --- a/gtsam/hybrid/GaussianHybridFactorGraph.h +++ b/gtsam/hybrid/GaussianHybridFactorGraph.h @@ -19,6 +19,7 @@ #pragma once #include +#include #include #include #include @@ -71,10 +72,10 @@ struct EliminationTraits { * Everything inside needs to be hybrid factor or hybrid conditional. */ class GaussianHybridFactorGraph - : public FactorGraph, + : public HybridFactorGraph, public EliminateableFactorGraph { public: - using Base = FactorGraph; + using Base = HybridFactorGraph; using This = GaussianHybridFactorGraph; ///< this class using BaseEliminateable = EliminateableFactorGraph; ///< for elimination @@ -99,8 +100,13 @@ class GaussianHybridFactorGraph /// @} - using FactorGraph::add; - using FactorGraph::push_back; + using Base::empty; + using Base::reserve; + using Base::size; + using Base::operator[]; + using Base::add; + using Base::push_back; + using Base::resize; /// Add a Jacobian factor to the factor graph. void add(JacobianFactor&& factor); diff --git a/gtsam/hybrid/HybridFactorGraph.h b/gtsam/hybrid/HybridFactorGraph.h index ef27caec8a..892136b86c 100644 --- a/gtsam/hybrid/HybridFactorGraph.h +++ b/gtsam/hybrid/HybridFactorGraph.h @@ -76,8 +76,9 @@ class HybridFactorGraph : public FactorGraph { using Base::empty; using Base::reserve; using Base::size; - using FactorGraph::add; using Base::operator[]; + using Base::add; + using Base::push_back; using Base::resize; /** From 85f4b4892586ad166dffa83d8e0b914c87868097 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 30 May 2022 08:21:46 -0400 Subject: [PATCH 12/74] Improvements to GaussianHybridFactorGraph, make MixtureFactor a subclass of HybridFactor --- gtsam/hybrid/GaussianHybridFactorGraph.h | 51 ++++++++++++++++++++++++ gtsam/hybrid/HybridNonlinearFactor.cpp | 3 +- gtsam/hybrid/HybridNonlinearFactor.h | 4 +- gtsam/hybrid/MixtureFactor.h | 8 ++-- 4 files changed, 59 insertions(+), 7 deletions(-) diff --git a/gtsam/hybrid/GaussianHybridFactorGraph.h b/gtsam/hybrid/GaussianHybridFactorGraph.h index 341a0838e5..b38a1ebd8d 100644 --- a/gtsam/hybrid/GaussianHybridFactorGraph.h +++ b/gtsam/hybrid/GaussianHybridFactorGraph.h @@ -20,9 +20,11 @@ #include #include +#include #include #include #include +#include namespace gtsam { @@ -74,6 +76,12 @@ struct EliminationTraits { class GaussianHybridFactorGraph : public HybridFactorGraph, public EliminateableFactorGraph { + protected: + /// Check if FACTOR type is derived from GaussianFactor. + template + using IsGaussian = typename std::enable_if< + std::is_base_of::value>::type; + public: using Base = HybridFactorGraph; using This = GaussianHybridFactorGraph; ///< this class @@ -119,6 +127,49 @@ class GaussianHybridFactorGraph /// Add a DecisionTreeFactor as a shared ptr. void add(boost::shared_ptr factor); + + /** + * Add a gaussian factor *pointer* to the internal gaussian factor graph + * @param gaussianFactor - boost::shared_ptr to the factor to add + */ + template + IsGaussian push_gaussian( + const boost::shared_ptr& gaussianFactor) { + Base::Base::push_back( + boost::make_shared(gaussianFactor)); + } + + /// Construct a factor and add (shared pointer to it) to factor graph. + template + IsGaussian emplace_gaussian(Args&&... args) { + auto factor = boost::allocate_shared( + Eigen::aligned_allocator(), std::forward(args)...); + push_gaussian(factor); + } + + /** + * @brief Add a single factor shared pointer to the hybrid factor graph. + * Dynamically handles the factor type and assigns it to the correct + * underlying container. + * + * @param sharedFactor The factor to add to this factor graph. + */ + void push_back(const SharedFactor& sharedFactor) { + if (auto p = boost::dynamic_pointer_cast(sharedFactor)) { + push_gaussian(p); + } else { + Base::push_back(sharedFactor); + } + } + + /** + * @brief Push back for Gaussian Factor specifically. + * + * @param sharedFactor Shared ptr to a gaussian factor. + */ + void push_back(const GaussianFactor::shared_ptr& sharedFactor) { + push_gaussian(sharedFactor); + } }; } // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp index 69a9767900..0938fd2b12 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.cpp +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -22,7 +22,8 @@ namespace gtsam { /* ************************************************************************* */ -HybridNonlinearFactor::HybridNonlinearFactor(NonlinearFactor::shared_ptr other) +HybridNonlinearFactor::HybridNonlinearFactor( + const NonlinearFactor::shared_ptr &other) : Base(other->keys()), inner_(other) {} /* ************************************************************************* */ diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 504992e22d..4a0c0fd9c1 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -36,8 +36,8 @@ class HybridNonlinearFactor : public HybridFactor { using This = HybridNonlinearFactor; using shared_ptr = boost::shared_ptr; - // Explicit conversion from a shared ptr of GF - explicit HybridNonlinearFactor(NonlinearFactor::shared_ptr other); + // Explicit conversion from a shared ptr of NonlinearFactor + explicit HybridNonlinearFactor(const NonlinearFactor::shared_ptr &other); public: /// @name Testable diff --git a/gtsam/hybrid/MixtureFactor.h b/gtsam/hybrid/MixtureFactor.h index b2423d20eb..d2d1a8d74f 100644 --- a/gtsam/hybrid/MixtureFactor.h +++ b/gtsam/hybrid/MixtureFactor.h @@ -38,7 +38,7 @@ namespace gtsam { * one of (NonlinearFactor, GaussianFactor) which can then be checked to perform * the correct operation. */ -class MixtureFactor : public HybridNonlinearFactor { +class MixtureFactor : public HybridFactor { public: using Base = HybridFactor; using This = MixtureFactor; @@ -213,7 +213,7 @@ class MixtureFactor : public HybridNonlinearFactor { // If this is a NoiseModelFactor, we'll use its noiseModel to // otherwise noiseModelFactor will be nullptr if (auto noiseModelFactor = - boost::dynamic_pointer_cast(factor);) { + boost::dynamic_pointer_cast(factor)) { // If dynamic cast to NoiseModelFactor succeeded, see if the noise model // is Gaussian auto noiseModel = noiseModelFactor->noiseModel(); @@ -228,13 +228,13 @@ class MixtureFactor : public HybridNonlinearFactor { // something with a normalized noise model // TODO(kevin): does this make sense to do? I think maybe not in // general? Should we just yell at the user? - auto gaussianFactor = factor.linearize(values); + auto gaussianFactor = factor->linearize(values); infoMat = gaussianFactor->information(); } } // Compute the (negative) log of the normalizing constant - return -(factor.dim() * log(2.0 * M_PI) / 2.0) - + return -(factor->dim() * log(2.0 * M_PI) / 2.0) - (log(infoMat.determinant()) / 2.0); } }; From 53e8c32b40fc934f6491778c3638f5972214c8ab Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 30 May 2022 08:33:16 -0400 Subject: [PATCH 13/74] Add NonlinearHybridFactorGraph class --- gtsam/hybrid/NonlinearHybridFactorGraph.h | 209 +++++ gtsam/hybrid/tests/Switching.h | 3 + .../tests/testNonlinearHybridFactorGraph.cpp | 770 ++++++++++++++++++ 3 files changed, 982 insertions(+) create mode 100644 gtsam/hybrid/NonlinearHybridFactorGraph.h create mode 100644 gtsam/hybrid/tests/testNonlinearHybridFactorGraph.cpp diff --git a/gtsam/hybrid/NonlinearHybridFactorGraph.h b/gtsam/hybrid/NonlinearHybridFactorGraph.h new file mode 100644 index 0000000000..aeb5a4545a --- /dev/null +++ b/gtsam/hybrid/NonlinearHybridFactorGraph.h @@ -0,0 +1,209 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file NonlinearHybridFactorGraph.h + * @brief Nonlinear hybrid factor graph that uses type erasure + * @author Varun Agrawal + * @date May 28, 2022 + */ + +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +#include +namespace gtsam { + +/** + * Nonlinear Hybrid Factor Graph + * ----------------------- + * This is the non-linear version of a hybrid factor graph. + * Everything inside needs to be hybrid factor or hybrid conditional. + */ +class GTSAM_EXPORT NonlinearHybridFactorGraph : public HybridFactorGraph { + protected: + /// Check if FACTOR type is derived from NonlinearFactor. + template + using IsNonlinear = typename std::enable_if< + std::is_base_of::value>::type; + + public: + using Base = FactorGraph; + using This = NonlinearHybridFactorGraph; ///< this class + using shared_ptr = boost::shared_ptr; ///< shared_ptr to This + + using Values = gtsam::Values; ///< backwards compatibility + using Indices = KeyVector; ///> map from keys to values + + /// @name Constructors + /// @{ + + NonlinearHybridFactorGraph() = default; + + /** + * Implicit copy/downcast constructor to override explicit template container + * constructor. In BayesTree this is used for: + * `cachedSeparatorMarginal_.reset(*separatorMarginal)` + * */ + template + NonlinearHybridFactorGraph(const FactorGraph& graph) + : Base(graph) {} + + /// @} + + // Allow use of selected FactorGraph methods: + using Base::empty; + using Base::reserve; + using Base::size; + using Base::operator[]; + using Base::add; + using Base::push_back; + using Base::resize; + + /** + * Add a nonlinear factor *pointer* to the internal nonlinear factor graph + * @param nonlinearFactor - boost::shared_ptr to the factor to add + */ + template + IsNonlinear push_nonlinear( + const boost::shared_ptr& nonlinearFactor) { + Base::push_back(boost::make_shared(nonlinearFactor)); + } + + /// Construct a factor and add (shared pointer to it) to factor graph. + template + IsNonlinear emplace_nonlinear(Args&&... args) { + auto factor = boost::allocate_shared( + Eigen::aligned_allocator(), std::forward(args)...); + push_nonlinear(factor); + } + + /** + * @brief Add a single factor shared pointer to the hybrid factor graph. + * Dynamically handles the factor type and assigns it to the correct + * underlying container. + * + * @tparam FACTOR The factor type template + * @param sharedFactor The factor to add to this factor graph. + */ + template + void push_back(const boost::shared_ptr& sharedFactor) { + if (auto p = boost::dynamic_pointer_cast(sharedFactor)) { + push_nonlinear(p); + } else { + Base::push_back(sharedFactor); + } + } + + /** Constructor from iterator over factors (shared_ptr or plain objects) */ + template + void push_back(ITERATOR firstFactor, ITERATOR lastFactor) { + for (auto&& it = firstFactor; it != lastFactor; it++) { + push_back(*it); + } + } + + // /// Add a nonlinear factor to the factor graph. + // void add(NonlinearFactor&& factor) { + // Base::add(boost::make_shared(std::move(factor))); + // } + + /// Add a nonlinear factor as a shared ptr. + void add(boost::shared_ptr factor) { + FactorGraph::add(boost::make_shared(factor)); + } + + /** + * Simply prints the factor graph. + */ + void print( + const std::string& str = "NonlinearHybridFactorGraph", + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override {} + + /** + * @return true if all internal graphs of `this` are equal to those of + * `other` + */ + bool equals(const NonlinearHybridFactorGraph& other, + double tol = 1e-9) const { + return false; + } + + /** + * @brief Linearize all the continuous factors in the + * NonlinearHybridFactorGraph. + * + * @param continuousValues: Dictionary of continuous values. + * @return GaussianHybridFactorGraph::shared_ptr + */ + GaussianHybridFactorGraph::shared_ptr linearize( + const Values& continuousValues) const { + // create an empty linear FG + GaussianHybridFactorGraph::shared_ptr linearFG = + boost::make_shared(); + + linearFG->reserve(size()); + + // linearize all hybrid factors + for (auto&& factor : factors_) { + // First check if it is a valid factor + if (factor) { + // Check if the factor is a hybrid factor. + // It can be either a nonlinear MixtureFactor or a linear + // GaussianMixtureFactor. + if (factor->isHybrid()) { + // Check if it is a nonlinear mixture factor + if (auto nlmf = boost::dynamic_pointer_cast(factor)) { + linearFG->push_back(nlmf->linearize(continuousValues)); + } else { + linearFG->push_back(factor); + } + + // Now check if the factor is a continuous only factor. + } else if (factor->isContinuous()) { + // In this case, we check if factor->inner() is nonlinear since + // HybridFactors wrap over continuous factors. + auto nlhf = + boost::dynamic_pointer_cast(factor); + if (auto nlf = + boost::dynamic_pointer_cast(nlhf->inner())) { + auto hgf = boost::make_shared( + nlf->linearize(continuousValues)); + linearFG->push_back(hgf); + } else { + linearFG->push_back(factor); + } + // Finally if nothing else, we are discrete-only which doesn't need + // lineariztion. + } else { + linearFG->push_back(factor); + } + + } else { + linearFG->push_back(GaussianFactor::shared_ptr()); + } + } + return linearFG; + } +}; + +template <> +struct traits + : public Testable {}; + +} // namespace gtsam diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 77d8182c8e..be58824e00 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -29,6 +29,9 @@ using gtsam::symbol_shorthand::C; using gtsam::symbol_shorthand::X; namespace gtsam { + +using MotionModel = BetweenFactor; + inline GaussianHybridFactorGraph::shared_ptr makeSwitchingChain( size_t n, std::function keyFunc = X, std::function dKeyFunc = C) { diff --git a/gtsam/hybrid/tests/testNonlinearHybridFactorGraph.cpp b/gtsam/hybrid/tests/testNonlinearHybridFactorGraph.cpp new file mode 100644 index 0000000000..8043f78df8 --- /dev/null +++ b/gtsam/hybrid/tests/testNonlinearHybridFactorGraph.cpp @@ -0,0 +1,770 @@ +/* ---------------------------------------------------------------------------- + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + * See LICENSE for the license information + * -------------------------------------------------------------------------- */ + +/** + * @file testDCFactorGraph.cpp + * @brief Unit tests for DCFactorGraph + * @author Varun Agrawal + * @author Fan Jiang + * @author Frank Dellaert + * @date December 2021 + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "Switching.h" + +// Include for test suite +#include + +using namespace std; +using namespace gtsam; +using noiseModel::Isotropic; +using symbol_shorthand::L; +using symbol_shorthand::M; +using symbol_shorthand::X; + +/* **************************************************************************** + * Test that any linearizedFactorGraph gaussian factors are appended to the + * existing gaussian factor graph in the hybrid factor graph. + */ +TEST(HybridFactorGraph, GaussianFactorGraph) { + NonlinearHybridFactorGraph fg; + + // Add a simple prior factor to the nonlinear factor graph + fg.emplace_nonlinear>(X(0), 0, Isotropic::Sigma(1, 0.1)); + + // Linearization point + Values linearizationPoint; + linearizationPoint.insert(X(0), 0); + + GaussianHybridFactorGraph ghfg = *fg.linearize(linearizationPoint); + + // Add a factor to the GaussianFactorGraph + ghfg.add(JacobianFactor(X(0), I_1x1, Vector1(5))); + + EXPECT_LONGS_EQUAL(2, ghfg.size()); +} + +/* ************************************************************************** + */ +/// Test that the resize method works correctly for a +/// NonlinearHybridFactorGraph. +TEST(NonlinearHybridFactorGraph, Resize) { + NonlinearHybridFactorGraph fg; + // auto nonlinearFactor = boost::make_shared>(); + // fg.push_back(nonlinearFactor); + + // auto discreteFactor = boost::make_shared(); + // fg.push_back(discreteFactor); + + // auto dcFactor = boost::make_shared>(); + // fg.push_back(dcFactor); + // EXPECT_LONGS_EQUAL(fg.size(), 3); + + fg.resize(0); + EXPECT_LONGS_EQUAL(fg.size(), 0); +} + +// /* ************************************************************************** +// */ +// /// Test that the resize method works correctly for a +// /// GaussianHybridFactorGraph. +// TEST(GaussianHybridFactorGraph, Resize) { +// NonlinearHybridFactorGraph nhfg; +// auto nonlinearFactor = boost::make_shared>( +// X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1)); +// nhfg.push_back(nonlinearFactor); +// auto discreteFactor = boost::make_shared(); +// nhfg.push_back(discreteFactor); + +// KeyVector contKeys = {X(0), X(1)}; +// auto noise_model = noiseModel::Isotropic::Sigma(1, 1.0); +// auto still = boost::make_shared(X(0), X(1), 0.0, noise_model), +// moving = boost::make_shared(X(0), X(1), 1.0, +// noise_model); +// std::vector components = {still, moving}; +// auto dcFactor = boost::make_shared>( +// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); +// nhfg.push_back(dcFactor); + +// Values linearizationPoint; +// linearizationPoint.insert(X(0), 0); +// linearizationPoint.insert(X(1), 1); + +// // Generate `GaussianHybridFactorGraph` by linearizing +// GaussianHybridFactorGraph fg = nhfg.linearize(linearizationPoint); + +// EXPECT_LONGS_EQUAL(fg.dcGraph().size(), 1); +// EXPECT_LONGS_EQUAL(fg.discreteGraph().size(), 1); +// EXPECT_LONGS_EQUAL(fg.gaussianGraph().size(), 1); + +// EXPECT_LONGS_EQUAL(fg.size(), 3); + +// fg.resize(0); +// EXPECT_LONGS_EQUAL(fg.dcGraph().size(), 0); +// EXPECT_LONGS_EQUAL(fg.discreteGraph().size(), 0); +// EXPECT_LONGS_EQUAL(fg.gaussianGraph().size(), 0); + +// EXPECT_LONGS_EQUAL(fg.size(), 0); +// } + +// /* +// **************************************************************************** +// * Test push_back on HFG makes the correct distinction. +// */ +// TEST(HybridFactorGraph, PushBack) { +// NonlinearHybridFactorGraph fg; + +// auto nonlinearFactor = boost::make_shared>(); +// fg.push_back(nonlinearFactor); + +// EXPECT_LONGS_EQUAL(fg.dcGraph().size(), 0); +// EXPECT_LONGS_EQUAL(fg.discreteGraph().size(), 0); +// EXPECT_LONGS_EQUAL(fg.nonlinearGraph().size(), 1); + +// fg = NonlinearHybridFactorGraph(); + +// auto discreteFactor = boost::make_shared(); +// fg.push_back(discreteFactor); + +// EXPECT_LONGS_EQUAL(fg.dcGraph().size(), 0); +// EXPECT_LONGS_EQUAL(fg.discreteGraph().size(), 1); +// EXPECT_LONGS_EQUAL(fg.nonlinearGraph().size(), 0); + +// fg = NonlinearHybridFactorGraph(); + +// auto dcFactor = boost::make_shared>(); +// fg.push_back(dcFactor); + +// EXPECT_LONGS_EQUAL(fg.dcGraph().size(), 1); +// EXPECT_LONGS_EQUAL(fg.discreteGraph().size(), 0); +// EXPECT_LONGS_EQUAL(fg.nonlinearGraph().size(), 0); + +// // Now do the same with GaussianHybridFactorGraph +// GaussianHybridFactorGraph ghfg; + +// auto gaussianFactor = boost::make_shared(); +// ghfg.push_back(gaussianFactor); + +// EXPECT_LONGS_EQUAL(ghfg.dcGraph().size(), 0); +// EXPECT_LONGS_EQUAL(ghfg.discreteGraph().size(), 0); +// EXPECT_LONGS_EQUAL(ghfg.gaussianGraph().size(), 1); + +// ghfg = GaussianHybridFactorGraph(); + +// ghfg.push_back(discreteFactor); + +// EXPECT_LONGS_EQUAL(ghfg.dcGraph().size(), 0); +// EXPECT_LONGS_EQUAL(ghfg.discreteGraph().size(), 1); +// EXPECT_LONGS_EQUAL(ghfg.gaussianGraph().size(), 0); + +// ghfg = GaussianHybridFactorGraph(); + +// ghfg.push_back(dcFactor); + +// EXPECT_LONGS_EQUAL(ghfg.dcGraph().size(), 1); +// EXPECT_LONGS_EQUAL(ghfg.discreteGraph().size(), 0); +// EXPECT_LONGS_EQUAL(ghfg.gaussianGraph().size(), 0); +// } + +// /* +// ****************************************************************************/ +// // Test construction of switching-like hybrid factor graph. +// TEST(HybridFactorGraph, Switching) { +// Switching self(3); + +// EXPECT_LONGS_EQUAL(8, self.nonlinearFactorGraph.size()); +// EXPECT_LONGS_EQUAL(4, self.nonlinearFactorGraph.nonlinearGraph().size()); +// EXPECT_LONGS_EQUAL(2, self.nonlinearFactorGraph.discreteGraph().size()); +// EXPECT_LONGS_EQUAL(2, self.nonlinearFactorGraph.dcGraph().size()); + +// EXPECT_LONGS_EQUAL(8, self.linearizedFactorGraph.size()); +// EXPECT_LONGS_EQUAL(2, self.linearizedFactorGraph.discreteGraph().size()); +// EXPECT_LONGS_EQUAL(2, self.linearizedFactorGraph.dcGraph().size()); +// EXPECT_LONGS_EQUAL(4, self.linearizedFactorGraph.gaussianGraph().size()); +// } + +// /* +// ****************************************************************************/ +// // Test linearization on a switching-like hybrid factor graph. +// TEST(HybridFactorGraph, Linearization) { +// Switching self(3); + +// // Linearize here: +// GaussianHybridFactorGraph actualLinearized = +// self.nonlinearFactorGraph.linearize(self.linearizationPoint); + +// EXPECT_LONGS_EQUAL(8, actualLinearized.size()); +// EXPECT_LONGS_EQUAL(2, actualLinearized.discreteGraph().size()); +// EXPECT_LONGS_EQUAL(2, actualLinearized.dcGraph().size()); +// EXPECT_LONGS_EQUAL(4, actualLinearized.gaussianGraph().size()); +// } + +// /* +// ****************************************************************************/ +// // Test elimination tree construction +// TEST(HybridFactorGraph, EliminationTree) { +// Switching self(3); + +// // Create ordering. +// Ordering ordering; +// for (size_t k = 1; k <= self.K; k++) ordering += X(k); + +// // Create elimination tree. +// HybridEliminationTree etree(self.linearizedFactorGraph, ordering); +// EXPECT_LONGS_EQUAL(1, etree.roots().size()) +// } + +// /* +// ****************************************************************************/ +// // Test elimination function by eliminating x1 in *-x1-*-x2 graph. +// TEST(DCGaussianElimination, Eliminate_x1) { +// Switching self(3); + +// // Gather factors on x1, has a simple Gaussian and a mixture factor. +// GaussianHybridFactorGraph factors; +// factors.push_gaussian(self.linearizedFactorGraph.gaussianGraph()[0]); +// factors.push_dc(self.linearizedFactorGraph.dcGraph()[0]); + +// // Check that sum works: +// auto sum = factors.sum(); +// Assignment mode; +// mode[M(1)] = 1; +// auto actual = sum(mode); // Selects one of 2 modes. +// EXPECT_LONGS_EQUAL(2, actual.size()); // Prior and motion model. + +// // Eliminate x1 +// Ordering ordering; +// ordering += X(1); + +// auto result = EliminateHybrid(factors, ordering); +// CHECK(result.first); +// EXPECT_LONGS_EQUAL(1, result.first->nrFrontals()); +// CHECK(result.second); +// // Has two keys, x2 and m1 +// EXPECT_LONGS_EQUAL(2, result.second->size()); +// } + +// /* +// ****************************************************************************/ +// // Test elimination function by eliminating x2 in x1-*-x2-*-x3 chain. +// // m1/ \m2 +// TEST(DCGaussianElimination, Eliminate_x2) { +// Switching self(3); + +// // Gather factors on x2, will be two mixture factors (with x1 and x3, +// resp.). GaussianHybridFactorGraph factors; +// factors.push_dc(self.linearizedFactorGraph.dcGraph()[0]); // involves m1 +// factors.push_dc(self.linearizedFactorGraph.dcGraph()[1]); // involves m2 + +// // Check that sum works: +// auto sum = factors.sum(); +// Assignment mode; +// mode[M(1)] = 0; +// mode[M(2)] = 1; +// auto actual = sum(mode); // Selects one of 4 mode +// combinations. EXPECT_LONGS_EQUAL(2, actual.size()); // 2 motion models. + +// // Eliminate x2 +// Ordering ordering; +// ordering += X(2); + +// std::pair> +// result = +// EliminateHybrid(factors, ordering); +// CHECK(result.first); +// EXPECT_LONGS_EQUAL(1, result.first->nrFrontals()); +// CHECK(result.second); +// // Note: separator keys should include m1, m2. +// EXPECT_LONGS_EQUAL(4, result.second->size()); +// } + +// /* +// ****************************************************************************/ +// // Helper method to generate gaussian factor graphs with a specific mode. +// GaussianFactorGraph::shared_ptr batchGFG(double between, +// Values linearizationPoint) { +// NonlinearFactorGraph graph; +// graph.addPrior(X(1), 0, Isotropic::Sigma(1, 0.1)); + +// auto between_x1_x2 = boost::make_shared( +// X(1), X(2), between, Isotropic::Sigma(1, 1.0)); + +// graph.push_back(between_x1_x2); + +// return graph.linearize(linearizationPoint); +// } + +// /* +// ****************************************************************************/ +// // Test elimination function by eliminating x1 and x2 in graph. +// TEST(DCGaussianElimination, EliminateHybrid_2_Variable) { +// Switching self(2, 1.0, 0.1); + +// auto factors = self.linearizedFactorGraph; + +// // Check that sum works: +// auto sum = factors.sum(); +// Assignment mode; +// mode[M(1)] = 1; +// auto actual = sum(mode); // Selects one of 2 modes. +// EXPECT_LONGS_EQUAL(4, +// actual.size()); // Prior, 1 motion models, 2 +// measurements. + +// // Eliminate x1 +// Ordering ordering; +// ordering += X(1); +// ordering += X(2); + +// AbstractConditional::shared_ptr abstractConditionalMixture; +// boost::shared_ptr factorOnModes; +// std::tie(abstractConditionalMixture, factorOnModes) = +// EliminateHybrid(factors, ordering); + +// auto gaussianConditionalMixture = +// dynamic_pointer_cast(abstractConditionalMixture); + +// CHECK(gaussianConditionalMixture); +// EXPECT_LONGS_EQUAL( +// 2, +// gaussianConditionalMixture->nrFrontals()); // Frontals = [x1, x2] +// EXPECT_LONGS_EQUAL( +// 1, +// gaussianConditionalMixture->nrParents()); // 1 parent, which is the +// mode + +// auto discreteFactor = +// dynamic_pointer_cast(factorOnModes); +// CHECK(discreteFactor); +// EXPECT_LONGS_EQUAL(1, discreteFactor->discreteKeys().size()); +// EXPECT(discreteFactor->root_->isLeaf() == false); +// } + +// /* +// ****************************************************************************/ +// /// Test the toDecisionTreeFactor method +// TEST(HybridFactorGraph, ToDecisionTreeFactor) { +// size_t K = 3; + +// // Provide tight sigma values so that the errors are visibly different. +// double between_sigma = 5e-8, prior_sigma = 1e-7; + +// Switching self(K, between_sigma, prior_sigma); + +// // Clear out discrete factors since sum() cannot hanldle those +// GaussianHybridFactorGraph linearizedFactorGraph( +// self.linearizedFactorGraph.gaussianGraph(), DiscreteFactorGraph(), +// self.linearizedFactorGraph.dcGraph()); + +// auto decisionTreeFactor = linearizedFactorGraph.toDecisionTreeFactor(); + +// auto allAssignments = +// DiscreteValues::CartesianProduct(linearizedFactorGraph.discreteKeys()); + +// // Get the error of the discrete assignment m1=0, m2=1. +// double actual = (*decisionTreeFactor)(allAssignments[1]); + +// /********************************************/ +// // Create equivalent factor graph for m1=0, m2=1 +// GaussianFactorGraph graph = linearizedFactorGraph.gaussianGraph(); + +// for (auto &p : linearizedFactorGraph.dcGraph()) { +// if (auto mixture = +// boost::dynamic_pointer_cast(p)) { +// graph.add((*mixture)(allAssignments[1])); +// } +// } + +// VectorValues values = graph.optimize(); +// double expected = graph.probPrime(values); +// /********************************************/ +// EXPECT_DOUBLES_EQUAL(expected, actual, 1e-12); +// // REGRESSION: +// EXPECT_DOUBLES_EQUAL(0.6125, actual, 1e-4); +// } + +// /* +// ****************************************************************************/ +// // Test partial elimination +// TEST_UNSAFE(HybridFactorGraph, Partial_Elimination) { +// Switching self(3); + +// auto linearizedFactorGraph = self.linearizedFactorGraph; + +// // Create ordering. +// Ordering ordering; +// for (size_t k = 1; k <= self.K; k++) ordering += X(k); + +// // Eliminate partially. +// HybridBayesNet::shared_ptr hybridBayesNet; +// GaussianHybridFactorGraph::shared_ptr remainingFactorGraph; +// std::tie(hybridBayesNet, remainingFactorGraph) = +// linearizedFactorGraph.eliminatePartialSequential(ordering); + +// CHECK(hybridBayesNet); +// // GTSAM_PRINT(*hybridBayesNet); // HybridBayesNet +// EXPECT_LONGS_EQUAL(3, hybridBayesNet->size()); +// EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)}); +// EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)})); +// EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)}); +// EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(2), M(1)})); +// EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)}); +// EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(2), M(1)})); + +// CHECK(remainingFactorGraph); +// // GTSAM_PRINT(*remainingFactorGraph); // HybridFactorGraph +// EXPECT_LONGS_EQUAL(3, remainingFactorGraph->size()); +// EXPECT(remainingFactorGraph->discreteGraph().at(0)->keys() == +// KeyVector({M(1)})); +// EXPECT(remainingFactorGraph->discreteGraph().at(1)->keys() == +// KeyVector({M(2), M(1)})); +// EXPECT(remainingFactorGraph->discreteGraph().at(2)->keys() == +// KeyVector({M(2), M(1)})); +// } + +// /* +// ****************************************************************************/ +// // Test full elimination +// TEST_UNSAFE(HybridFactorGraph, Full_Elimination) { +// Switching self(3); + +// auto linearizedFactorGraph = self.linearizedFactorGraph; + +// // We first do a partial elimination +// HybridBayesNet::shared_ptr hybridBayesNet_partial; +// GaussianHybridFactorGraph::shared_ptr remainingFactorGraph_partial; +// DiscreteBayesNet discreteBayesNet; + +// { +// // Create ordering. +// Ordering ordering; +// for (size_t k = 1; k <= self.K; k++) ordering += X(k); + +// // Eliminate partially. +// std::tie(hybridBayesNet_partial, remainingFactorGraph_partial) = +// linearizedFactorGraph.eliminatePartialSequential(ordering); + +// DiscreteFactorGraph dfg; +// dfg.push_back(remainingFactorGraph_partial->discreteGraph()); +// ordering.clear(); +// for (size_t k = 1; k < self.K; k++) ordering += M(k); +// discreteBayesNet = *dfg.eliminateSequential(ordering, EliminateForMPE); +// } + +// // Create ordering. +// Ordering ordering; +// for (size_t k = 1; k <= self.K; k++) ordering += X(k); +// for (size_t k = 1; k < self.K; k++) ordering += M(k); + +// // Eliminate partially. +// HybridBayesNet::shared_ptr hybridBayesNet = +// linearizedFactorGraph.eliminateSequential(ordering); + +// CHECK(hybridBayesNet); +// EXPECT_LONGS_EQUAL(5, hybridBayesNet->size()); +// // p(x1 | x2, m1) +// EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)}); +// EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)})); +// // p(x2 | x3, m1, m2) +// EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)}); +// EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(2), M(1)})); +// // p(x3 | m1, m2) +// EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)}); +// EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(2), M(1)})); +// // P(m1 | m2) +// EXPECT(hybridBayesNet->at(3)->frontals() == KeyVector{M(1)}); +// EXPECT(hybridBayesNet->at(3)->parents() == KeyVector({M(2)})); +// EXPECT(dynamic_pointer_cast(hybridBayesNet->at(3)) +// ->equals(*discreteBayesNet.at(0))); +// // P(m2) +// EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(2)}); +// EXPECT_LONGS_EQUAL(0, hybridBayesNet->at(4)->nrParents()); +// EXPECT(dynamic_pointer_cast(hybridBayesNet->at(4)) +// ->equals(*discreteBayesNet.at(1))); +// } + +// /* +// ****************************************************************************/ +// // Test printing +// TEST(HybridFactorGraph, Printing) { +// Switching self(3); + +// auto linearizedFactorGraph = self.linearizedFactorGraph; + +// // Create ordering. +// Ordering ordering; +// for (size_t k = 1; k <= self.K; k++) ordering += X(k); + +// // Eliminate partially. +// HybridBayesNet::shared_ptr hybridBayesNet; +// GaussianHybridFactorGraph::shared_ptr remainingFactorGraph; +// std::tie(hybridBayesNet, remainingFactorGraph) = +// linearizedFactorGraph.eliminatePartialSequential(ordering); + +// string expected_hybridFactorGraph = R"( +// size: 8 +// DiscreteFactorGraph +// size: 2 +// factor 0: P( m1 ): +// Leaf 0.5 +// factor 1: P( m2 | m1 ): +// Choice(m2) +// 0 Choice(m1) +// 0 0 Leaf 0.33333333 +// 0 1 Leaf 0.6 +// 1 Choice(m1) +// 1 0 Leaf 0.66666667 +// 1 1 Leaf 0.4 +// DCFactorGraph +// size: 2 +// factor 0: [ x1 x2; m1 ]{ +// Choice(m1) +// 0 Leaf Jacobian factor on 2 keys: +// A[x1] = [ +// -1 +// ] +// A[x2] = [ +// 1 +// ] +// b = [ -1 ] +// No noise model +// 1 Leaf Jacobian factor on 2 keys: +// A[x1] = [ +// -1 +// ] +// A[x2] = [ +// 1 +// ] +// b = [ -0 ] +// No noise model +// } +// factor 1: [ x2 x3; m2 ]{ +// Choice(m2) +// 0 Leaf Jacobian factor on 2 keys: +// A[x2] = [ +// -1 +// ] +// A[x3] = [ +// 1 +// ] +// b = [ -1 ] +// No noise model +// 1 Leaf Jacobian factor on 2 keys: +// A[x2] = [ +// -1 +// ] +// A[x3] = [ +// 1 +// ] +// b = [ -0 ] +// No noise model +// } +// GaussianGraph +// size: 4 +// factor 0: +// A[x1] = [ +// 10 +// ] +// b = [ -10 ] +// No noise model +// factor 1: +// A[x1] = [ +// 10 +// ] +// b = [ -10 ] +// No noise model +// factor 2: +// A[x2] = [ +// 10 +// ] +// b = [ -10 ] +// No noise model +// factor 3: +// A[x3] = [ +// 10 +// ] +// b = [ -10 ] +// No noise model +// )"; +// EXPECT(assert_print_equal(expected_hybridFactorGraph, +// linearizedFactorGraph)); + +// // Expected output for hybridBayesNet. +// string expected_hybridBayesNet = R"( +// size: 3 +// factor 0: GaussianMixture [ x1 | x2 m1 ]{ +// Choice(m1) +// 0 Leaf Jacobian factor on 2 keys: +// p(x1 | x2) +// R = [ 14.1774 ] +// S[x2] = [ -0.0705346 ] +// d = [ -14.0364 ] +// No noise model +// 1 Leaf Jacobian factor on 2 keys: +// p(x1 | x2) +// R = [ 14.1774 ] +// S[x2] = [ -0.0705346 ] +// d = [ -14.1069 ] +// No noise model +// } +// factor 1: GaussianMixture [ x2 | x3 m2 m1 ]{ +// Choice(m2) +// 0 Choice(m1) +// 0 0 Leaf Jacobian factor on 2 keys: +// p(x2 | x3) +// R = [ 10.0993 ] +// S[x3] = [ -0.0990172 ] +// d = [ -9.99975 ] +// No noise model +// 0 1 Leaf Jacobian factor on 2 keys: +// p(x2 | x3) +// R = [ 10.0993 ] +// S[x3] = [ -0.0990172 ] +// d = [ -9.90122 ] +// No noise model +// 1 Choice(m1) +// 1 0 Leaf Jacobian factor on 2 keys: +// p(x2 | x3) +// R = [ 10.0993 ] +// S[x3] = [ -0.0990172 ] +// d = [ -10.0988 ] +// No noise model +// 1 1 Leaf Jacobian factor on 2 keys: +// p(x2 | x3) +// R = [ 10.0993 ] +// S[x3] = [ -0.0990172 ] +// d = [ -10.0002 ] +// No noise model +// } +// factor 2: GaussianMixture [ x3 | m2 m1 ]{ +// Choice(m2) +// 0 Choice(m1) +// 0 0 Leaf Jacobian factor on 1 keys: +// p(x3) +// R = [ 10.0494 ] +// d = [ -10.1489 ] +// No noise model +// 0 1 Leaf Jacobian factor on 1 keys: +// p(x3) +// R = [ 10.0494 ] +// d = [ -10.1479 ] +// No noise model +// 1 Choice(m1) +// 1 0 Leaf Jacobian factor on 1 keys: +// p(x3) +// R = [ 10.0494 ] +// d = [ -10.0504 ] +// No noise model +// 1 1 Leaf Jacobian factor on 1 keys: +// p(x3) +// R = [ 10.0494 ] +// d = [ -10.0494 ] +// No noise model +// } +// )"; +// EXPECT(assert_print_equal(expected_hybridBayesNet, *hybridBayesNet)); +// } + +// /* ************************************************************************* +// */ +// // Simple PlanarSLAM example test with 2 poses and 2 landmarks (each pose +// // connects to 1 landmark) to expose issue with default decision tree +// creation +// // in hybrid elimination. The hybrid factor is between the poses X0 and X1. +// The +// // issue arises if we eliminate a landmark variable first since it is not +// // connected to a DCFactor. +// TEST(HybridFactorGraph, DefaultDecisionTree) { +// NonlinearHybridFactorGraph fg; + +// // Add a prior on pose x1 at the origin. A prior factor consists of a mean +// and +// // a noise model (covariance matrix) +// Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin +// auto priorNoise = noiseModel::Diagonal::Sigmas( +// Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta +// fg.emplace_nonlinear>(X(0), prior, priorNoise); + +// using PlanarMotionModel = BetweenFactor; + +// // Add odometry factor +// Pose2 odometry(2.0, 0.0, 0.0); +// KeyVector contKeys = {X(0), X(1)}; +// auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); +// auto still = boost::make_shared(X(0), X(1), Pose2(0, 0, +// 0), +// noise_model), +// moving = boost::make_shared(X(0), X(1), odometry, +// noise_model); +// std::vector components = {still, moving}; +// auto dcFactor = boost::make_shared>( +// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); +// fg.push_back(dcFactor); + +// // Add Range-Bearing measurements to from X0 to L0 and X1 to L1. +// // create a noise model for the landmark measurements +// auto measurementNoise = noiseModel::Diagonal::Sigmas( +// Vector2(0.1, 0.2)); // 0.1 rad std on bearing, 20cm on range +// // create the measurement values - indices are (pose id, landmark id) +// Rot2 bearing11 = Rot2::fromDegrees(45), bearing22 = Rot2::fromDegrees(90); +// double range11 = std::sqrt(4.0 + 4.0), range22 = 2.0; + +// // Add Bearing-Range factors +// fg.emplace_nonlinear>( +// X(0), L(0), bearing11, range11, measurementNoise); +// fg.emplace_nonlinear>( +// X(1), L(1), bearing22, range22, measurementNoise); + +// // Create (deliberately inaccurate) initial estimate +// Values initialEstimate; +// initialEstimate.insert(X(0), Pose2(0.5, 0.0, 0.2)); +// initialEstimate.insert(X(1), Pose2(2.3, 0.1, -0.2)); +// initialEstimate.insert(L(0), Point2(1.8, 2.1)); +// initialEstimate.insert(L(1), Point2(4.1, 1.8)); + +// // We want to eliminate variables not connected to DCFactors first. +// Ordering ordering; +// ordering += L(0); +// ordering += L(1); +// ordering += X(0); +// ordering += X(1); + +// GaussianHybridFactorGraph linearized = fg.linearize(initialEstimate); +// gtsam::HybridBayesNet::shared_ptr hybridBayesNet; +// gtsam::GaussianHybridFactorGraph::shared_ptr remainingFactorGraph; + +// // This should NOT fail +// std::tie(hybridBayesNet, remainingFactorGraph) = +// linearized.eliminatePartialSequential(ordering); +// EXPECT_LONGS_EQUAL(4, hybridBayesNet->size()); +// EXPECT_LONGS_EQUAL(1, remainingFactorGraph->size()); +// } + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ \ No newline at end of file From 7e1827784995db5c673c63671810231e32cbcb4d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 30 May 2022 16:51:15 -0400 Subject: [PATCH 14/74] fix base class --- gtsam/hybrid/NonlinearHybridFactorGraph.h | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/gtsam/hybrid/NonlinearHybridFactorGraph.h b/gtsam/hybrid/NonlinearHybridFactorGraph.h index aeb5a4545a..3f7f5ba10f 100644 --- a/gtsam/hybrid/NonlinearHybridFactorGraph.h +++ b/gtsam/hybrid/NonlinearHybridFactorGraph.h @@ -43,7 +43,7 @@ class GTSAM_EXPORT NonlinearHybridFactorGraph : public HybridFactorGraph { std::is_base_of::value>::type; public: - using Base = FactorGraph; + using Base = HybridFactorGraph; using This = NonlinearHybridFactorGraph; ///< this class using shared_ptr = boost::shared_ptr; ///< shared_ptr to This @@ -72,7 +72,6 @@ class GTSAM_EXPORT NonlinearHybridFactorGraph : public HybridFactorGraph { using Base::size; using Base::operator[]; using Base::add; - using Base::push_back; using Base::resize; /** From 119679a3668226ad6499a41c1eb705fc9b0b93a1 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 30 May 2022 17:30:25 -0400 Subject: [PATCH 15/74] linearize returns object instead of pointer --- gtsam/hybrid/NonlinearHybridFactorGraph.h | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/gtsam/hybrid/NonlinearHybridFactorGraph.h b/gtsam/hybrid/NonlinearHybridFactorGraph.h index 3f7f5ba10f..82a331531e 100644 --- a/gtsam/hybrid/NonlinearHybridFactorGraph.h +++ b/gtsam/hybrid/NonlinearHybridFactorGraph.h @@ -150,13 +150,11 @@ class GTSAM_EXPORT NonlinearHybridFactorGraph : public HybridFactorGraph { * @param continuousValues: Dictionary of continuous values. * @return GaussianHybridFactorGraph::shared_ptr */ - GaussianHybridFactorGraph::shared_ptr linearize( - const Values& continuousValues) const { + GaussianHybridFactorGraph linearize(const Values& continuousValues) const { // create an empty linear FG - GaussianHybridFactorGraph::shared_ptr linearFG = - boost::make_shared(); + GaussianHybridFactorGraph linearFG; - linearFG->reserve(size()); + linearFG.reserve(size()); // linearize all hybrid factors for (auto&& factor : factors_) { @@ -168,9 +166,9 @@ class GTSAM_EXPORT NonlinearHybridFactorGraph : public HybridFactorGraph { if (factor->isHybrid()) { // Check if it is a nonlinear mixture factor if (auto nlmf = boost::dynamic_pointer_cast(factor)) { - linearFG->push_back(nlmf->linearize(continuousValues)); + linearFG.push_back(nlmf->linearize(continuousValues)); } else { - linearFG->push_back(factor); + linearFG.push_back(factor); } // Now check if the factor is a continuous only factor. @@ -183,18 +181,18 @@ class GTSAM_EXPORT NonlinearHybridFactorGraph : public HybridFactorGraph { boost::dynamic_pointer_cast(nlhf->inner())) { auto hgf = boost::make_shared( nlf->linearize(continuousValues)); - linearFG->push_back(hgf); + linearFG.push_back(hgf); } else { - linearFG->push_back(factor); + linearFG.push_back(factor); } // Finally if nothing else, we are discrete-only which doesn't need // lineariztion. } else { - linearFG->push_back(factor); + linearFG.push_back(factor); } } else { - linearFG->push_back(GaussianFactor::shared_ptr()); + linearFG.push_back(GaussianFactor::shared_ptr()); } } return linearFG; From 3212dde4aa493313b8f861eedde694a980757a75 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 30 May 2022 17:50:50 -0400 Subject: [PATCH 16/74] remove unneeded method --- gtsam/hybrid/GaussianHybridFactorGraph.h | 9 --------- 1 file changed, 9 deletions(-) diff --git a/gtsam/hybrid/GaussianHybridFactorGraph.h b/gtsam/hybrid/GaussianHybridFactorGraph.h index b38a1ebd8d..6998ff899a 100644 --- a/gtsam/hybrid/GaussianHybridFactorGraph.h +++ b/gtsam/hybrid/GaussianHybridFactorGraph.h @@ -161,15 +161,6 @@ class GaussianHybridFactorGraph Base::push_back(sharedFactor); } } - - /** - * @brief Push back for Gaussian Factor specifically. - * - * @param sharedFactor Shared ptr to a gaussian factor. - */ - void push_back(const GaussianFactor::shared_ptr& sharedFactor) { - push_gaussian(sharedFactor); - } }; } // namespace gtsam From 0c16799ef664fe0d4493e0b5d3bd7eaedf6f6719 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 30 May 2022 17:51:07 -0400 Subject: [PATCH 17/74] GaussianMixtureFactor inherits from HybridFactor --- gtsam/hybrid/GaussianMixtureFactor.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index bd2e079cba..494a44ccf9 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -40,9 +40,9 @@ using GaussianFactorVector = std::vector; * of discrete variables indexes to the continuous gaussian distribution. * */ -class GaussianMixtureFactor : public HybridGaussianFactor { +class GaussianMixtureFactor : public HybridFactor { public: - using Base = HybridGaussianFactor; + using Base = HybridFactor; using This = GaussianMixtureFactor; using shared_ptr = boost::shared_ptr; From e711a62e2d1f2ee3011024bfdc7b5081d65d5212 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 30 May 2022 17:51:25 -0400 Subject: [PATCH 18/74] More tests working --- gtsam/hybrid/tests/Switching.h | 1 + .../tests/testNonlinearHybridFactorGraph.cpp | 159 ++++++++---------- 2 files changed, 71 insertions(+), 89 deletions(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index be58824e00..42c61767e0 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -22,6 +22,7 @@ #include #include #include +#include #pragma once diff --git a/gtsam/hybrid/tests/testNonlinearHybridFactorGraph.cpp b/gtsam/hybrid/tests/testNonlinearHybridFactorGraph.cpp index 8043f78df8..6e40199921 100644 --- a/gtsam/hybrid/tests/testNonlinearHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testNonlinearHybridFactorGraph.cpp @@ -58,7 +58,7 @@ TEST(HybridFactorGraph, GaussianFactorGraph) { Values linearizationPoint; linearizationPoint.insert(X(0), 0); - GaussianHybridFactorGraph ghfg = *fg.linearize(linearizationPoint); + GaussianHybridFactorGraph ghfg = fg.linearize(linearizationPoint); // Add a factor to the GaussianFactorGraph ghfg.add(JacobianFactor(X(0), I_1x1, Vector1(5))); @@ -72,121 +72,102 @@ TEST(HybridFactorGraph, GaussianFactorGraph) { /// NonlinearHybridFactorGraph. TEST(NonlinearHybridFactorGraph, Resize) { NonlinearHybridFactorGraph fg; - // auto nonlinearFactor = boost::make_shared>(); - // fg.push_back(nonlinearFactor); + auto nonlinearFactor = boost::make_shared>(); + fg.push_back(nonlinearFactor); - // auto discreteFactor = boost::make_shared(); - // fg.push_back(discreteFactor); + auto discreteFactor = boost::make_shared(); + fg.push_back(discreteFactor); - // auto dcFactor = boost::make_shared>(); - // fg.push_back(dcFactor); - // EXPECT_LONGS_EQUAL(fg.size(), 3); + auto dcFactor = boost::make_shared(); + fg.push_back(dcFactor); + + EXPECT_LONGS_EQUAL(fg.size(), 3); fg.resize(0); EXPECT_LONGS_EQUAL(fg.size(), 0); } -// /* ************************************************************************** -// */ -// /// Test that the resize method works correctly for a -// /// GaussianHybridFactorGraph. -// TEST(GaussianHybridFactorGraph, Resize) { -// NonlinearHybridFactorGraph nhfg; -// auto nonlinearFactor = boost::make_shared>( -// X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1)); -// nhfg.push_back(nonlinearFactor); -// auto discreteFactor = boost::make_shared(); -// nhfg.push_back(discreteFactor); - -// KeyVector contKeys = {X(0), X(1)}; -// auto noise_model = noiseModel::Isotropic::Sigma(1, 1.0); -// auto still = boost::make_shared(X(0), X(1), 0.0, noise_model), -// moving = boost::make_shared(X(0), X(1), 1.0, -// noise_model); -// std::vector components = {still, moving}; -// auto dcFactor = boost::make_shared>( -// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); -// nhfg.push_back(dcFactor); - -// Values linearizationPoint; -// linearizationPoint.insert(X(0), 0); -// linearizationPoint.insert(X(1), 1); - -// // Generate `GaussianHybridFactorGraph` by linearizing -// GaussianHybridFactorGraph fg = nhfg.linearize(linearizationPoint); - -// EXPECT_LONGS_EQUAL(fg.dcGraph().size(), 1); -// EXPECT_LONGS_EQUAL(fg.discreteGraph().size(), 1); -// EXPECT_LONGS_EQUAL(fg.gaussianGraph().size(), 1); - -// EXPECT_LONGS_EQUAL(fg.size(), 3); +/* ************************************************************************** + */ +/// Test that the resize method works correctly for a +/// GaussianHybridFactorGraph. +TEST(GaussianHybridFactorGraph, Resize) { + NonlinearHybridFactorGraph nhfg; + auto nonlinearFactor = boost::make_shared>( + X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1)); + nhfg.push_back(nonlinearFactor); + auto discreteFactor = boost::make_shared(); + nhfg.push_back(discreteFactor); + + KeyVector contKeys = {X(0), X(1)}; + auto noise_model = noiseModel::Isotropic::Sigma(1, 1.0); + auto still = boost::make_shared(X(0), X(1), 0.0, noise_model), + moving = boost::make_shared(X(0), X(1), 1.0, noise_model); + + // TODO(Varun) This is declared as NonlinearFactor instead of MotionModel, aka + // not clear!! + std::vector components = {still, moving}; + auto dcFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + nhfg.push_back(dcFactor); -// fg.resize(0); -// EXPECT_LONGS_EQUAL(fg.dcGraph().size(), 0); -// EXPECT_LONGS_EQUAL(fg.discreteGraph().size(), 0); -// EXPECT_LONGS_EQUAL(fg.gaussianGraph().size(), 0); + Values linearizationPoint; + linearizationPoint.insert(X(0), 0); + linearizationPoint.insert(X(1), 1); -// EXPECT_LONGS_EQUAL(fg.size(), 0); -// } + // Generate `GaussianHybridFactorGraph` by linearizing + GaussianHybridFactorGraph gfg = nhfg.linearize(linearizationPoint); -// /* -// **************************************************************************** -// * Test push_back on HFG makes the correct distinction. -// */ -// TEST(HybridFactorGraph, PushBack) { -// NonlinearHybridFactorGraph fg; + EXPECT_LONGS_EQUAL(gfg.size(), 3); -// auto nonlinearFactor = boost::make_shared>(); -// fg.push_back(nonlinearFactor); + gfg.resize(0); + EXPECT_LONGS_EQUAL(gfg.size(), 0); +} -// EXPECT_LONGS_EQUAL(fg.dcGraph().size(), 0); -// EXPECT_LONGS_EQUAL(fg.discreteGraph().size(), 0); -// EXPECT_LONGS_EQUAL(fg.nonlinearGraph().size(), 1); +/* +**************************************************************************** +* Test push_back on HFG makes the correct distinction. +*/ +TEST(HybridFactorGraph, PushBack) { + NonlinearHybridFactorGraph fg; -// fg = NonlinearHybridFactorGraph(); + auto nonlinearFactor = boost::make_shared>(); + fg.push_back(nonlinearFactor); -// auto discreteFactor = boost::make_shared(); -// fg.push_back(discreteFactor); + EXPECT_LONGS_EQUAL(fg.size(), 1); -// EXPECT_LONGS_EQUAL(fg.dcGraph().size(), 0); -// EXPECT_LONGS_EQUAL(fg.discreteGraph().size(), 1); -// EXPECT_LONGS_EQUAL(fg.nonlinearGraph().size(), 0); + fg = NonlinearHybridFactorGraph(); -// fg = NonlinearHybridFactorGraph(); + auto discreteFactor = boost::make_shared(); + fg.push_back(discreteFactor); -// auto dcFactor = boost::make_shared>(); -// fg.push_back(dcFactor); + EXPECT_LONGS_EQUAL(fg.size(), 1); -// EXPECT_LONGS_EQUAL(fg.dcGraph().size(), 1); -// EXPECT_LONGS_EQUAL(fg.discreteGraph().size(), 0); -// EXPECT_LONGS_EQUAL(fg.nonlinearGraph().size(), 0); + fg = NonlinearHybridFactorGraph(); -// // Now do the same with GaussianHybridFactorGraph -// GaussianHybridFactorGraph ghfg; + auto dcFactor = boost::make_shared(); + fg.push_back(dcFactor); -// auto gaussianFactor = boost::make_shared(); -// ghfg.push_back(gaussianFactor); + EXPECT_LONGS_EQUAL(fg.size(), 1); -// EXPECT_LONGS_EQUAL(ghfg.dcGraph().size(), 0); -// EXPECT_LONGS_EQUAL(ghfg.discreteGraph().size(), 0); -// EXPECT_LONGS_EQUAL(ghfg.gaussianGraph().size(), 1); + // Now do the same with GaussianHybridFactorGraph + GaussianHybridFactorGraph ghfg; -// ghfg = GaussianHybridFactorGraph(); + auto gaussianFactor = boost::make_shared(); + ghfg.push_back(gaussianFactor); -// ghfg.push_back(discreteFactor); + EXPECT_LONGS_EQUAL(ghfg.size(), 1); -// EXPECT_LONGS_EQUAL(ghfg.dcGraph().size(), 0); -// EXPECT_LONGS_EQUAL(ghfg.discreteGraph().size(), 1); -// EXPECT_LONGS_EQUAL(ghfg.gaussianGraph().size(), 0); + ghfg = GaussianHybridFactorGraph(); + ghfg.push_back(discreteFactor); -// ghfg = GaussianHybridFactorGraph(); + EXPECT_LONGS_EQUAL(ghfg.size(), 1); -// ghfg.push_back(dcFactor); + ghfg = GaussianHybridFactorGraph(); + ghfg.push_back(dcFactor); -// EXPECT_LONGS_EQUAL(ghfg.dcGraph().size(), 1); -// EXPECT_LONGS_EQUAL(ghfg.discreteGraph().size(), 0); -// EXPECT_LONGS_EQUAL(ghfg.gaussianGraph().size(), 0); -// } + EXPECT_LONGS_EQUAL(ghfg.size(), 1); +} // /* // ****************************************************************************/ From 9e737dbbd8efe75b1a183fc34983b8fab75e6250 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 3 Jun 2022 12:47:21 -0400 Subject: [PATCH 19/74] initial pruning method --- gtsam/hybrid/HybridBayesNet.cpp | 95 +++++++++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 1292711d89..7d18347230 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -14,3 +14,98 @@ */ #include + +namespace gtsam { + +/* ************************************************************************* */ +/// Return the DiscreteKey vector as a set. +static std::set DiscreteKeysAsSet(const DiscreteKeys &dkeys) { + std::set s; + s.insert(dkeys.begin(), dkeys.end()); + return s; +} + +/* ************************************************************************* */ +HybridBayesNet HybridBayesNet::prune( + const DecisionTreeFactor::shared_ptr &discreteFactor) const { + /* To Prune, we visitWith every leaf in the GaussianMixture. + * For each leaf, using the assignment we can check the discrete decision tree + * for 0.0 probability, then just set the leaf to a nullptr. + * + * We can later check the GaussianMixture for just nullptrs. + */ + + HybridBayesNet prunedBayesNetFragment; + + // Functional which loops over all assignments and create a set of + // GaussianConditionals + auto pruner = + [&](const Assignment &choices, + const GaussianFactor::shared_ptr &gf) -> GaussianFactor::shared_ptr { + // typecast so we can use this to get probability value + DiscreteValues values(choices); + + if ((*discreteFactor)(values) == 0.0) { + // empty aka null pointer + boost::shared_ptr null; + return null; + } else { + return gf; + } + }; + + // Go through all the conditionals in the + // Bayes Net and prune them as per discreteFactor. + for (size_t i = 0; i < this->size(); i++) { + HybridConditional::shared_ptr conditional = this->at(i); + + GaussianMixture::shared_ptr gaussianMixture = + boost::dynamic_pointer_cast(conditional->inner()); + + if (gaussianMixture) { + // We may have mixtures with less discrete keys than discreteFactor so we + // skip those since the label assignment does not exist. + auto gmKeySet = DiscreteKeysAsSet(gaussianMixture->discreteKeys()); + auto dfKeySet = DiscreteKeysAsSet(discreteFactor->discreteKeys()); + if (gmKeySet != dfKeySet) { + // Add the gaussianMixture which doesn't have to be pruned. + prunedBayesNetFragment.push_back( + boost::make_shared(gaussianMixture)); + continue; + } + + // The GaussianMixture stores all the conditionals and uneliminated + // factors in the factors tree. + auto gaussianMixtureTree = gaussianMixture->factors(); + + // Run the pruning to get a new, pruned tree + auto prunedFactors = gaussianMixtureTree.apply(pruner); + + DiscreteKeys discreteKeys = gaussianMixture->discreteKeys(); + // reverse keys to get a natural ordering + std::reverse(discreteKeys.begin(), discreteKeys.end()); + + // Convert to GaussianConditionals + auto prunedTree = GaussianMixture::Conditionals( + prunedFactors, [](const GaussianFactor::shared_ptr &factor) { + return boost::dynamic_pointer_cast(factor); + }); + + // Create the new gaussian mixture and add it to the bayes net. + auto prunedGaussianMixture = boost::make_shared( + gaussianMixture->frontals(), gaussianMixture->parents(), discreteKeys, + prunedTree); + + prunedBayesNetFragment.push_back( + boost::make_shared(prunedGaussianMixture)); + + } else { + // Add the non-GaussianMixture conditional + prunedBayesNetFragment.push_back(conditional); + } + } + + return prunedBayesNetFragment; +} + +} // namespace gtsam From 89768cf692144a8ccc24d66f993bcd249059f32d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 3 Jun 2022 13:07:46 -0400 Subject: [PATCH 20/74] record continuous keys separately --- gtsam/hybrid/HybridFactor.cpp | 9 +++++---- gtsam/hybrid/HybridFactor.h | 12 +++++++++--- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/gtsam/hybrid/HybridFactor.cpp b/gtsam/hybrid/HybridFactor.cpp index 127c9761c0..cdae5faa67 100644 --- a/gtsam/hybrid/HybridFactor.cpp +++ b/gtsam/hybrid/HybridFactor.cpp @@ -50,7 +50,7 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1, /* ************************************************************************ */ HybridFactor::HybridFactor(const KeyVector &keys) - : Base(keys), isContinuous_(true), nrContinuous_(keys.size()) {} + : Base(keys), isContinuous_(true), continuousKeys_(keys) {} /* ************************************************************************ */ HybridFactor::HybridFactor(const KeyVector &continuousKeys, @@ -59,8 +59,8 @@ HybridFactor::HybridFactor(const KeyVector &continuousKeys, isDiscrete_((continuousKeys.size() == 0) && (discreteKeys.size() != 0)), isContinuous_((continuousKeys.size() != 0) && (discreteKeys.size() == 0)), isHybrid_((continuousKeys.size() != 0) && (discreteKeys.size() != 0)), - nrContinuous_(continuousKeys.size()), - discreteKeys_(discreteKeys) {} + discreteKeys_(discreteKeys), + continuousKeys_(continuousKeys) {} /* ************************************************************************ */ HybridFactor::HybridFactor(const DiscreteKeys &discreteKeys) @@ -73,7 +73,8 @@ bool HybridFactor::equals(const HybridFactor &lf, double tol) const { const This *e = dynamic_cast(&lf); return e != nullptr && Base::equals(*e, tol) && isDiscrete_ == e->isDiscrete_ && isContinuous_ == e->isContinuous_ && - isHybrid_ == e->isHybrid_ && nrContinuous_ == e->nrContinuous_; + isHybrid_ == e->isHybrid_ && continuousKeys_ == e->continuousKeys_ && + discreteKeys_ == e->discreteKeys_; } /* ************************************************************************ */ diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index 244fba4ccb..ffa28d3f71 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -50,7 +50,10 @@ class GTSAM_EXPORT HybridFactor : public Factor { size_t nrContinuous_ = 0; protected: + // Set of DiscreteKeys for this factor. DiscreteKeys discreteKeys_; + // For bookkeeping + KeyVector continuousKeys_; public: // typedefs needed to play nice with gtsam @@ -117,10 +120,13 @@ class GTSAM_EXPORT HybridFactor : public Factor { bool isHybrid() const { return isHybrid_; } /// Return the number of continuous variables in this factor. - size_t nrContinuous() const { return nrContinuous_; } + size_t nrContinuous() const { return continuousKeys_.size(); } - /// Return vector of discrete keys. - DiscreteKeys discreteKeys() const { return discreteKeys_; } + /// Return the discrete keys for this factor. + const DiscreteKeys &discreteKeys() const { return discreteKeys_; } + + /// Return only the continuous keys for this factor. + const KeyVector &continuousKeys() const { return continuousKeys_; } /// @} }; From ad77a45a0d9ded056d01a6e9b7af4331e9ac3425 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 3 Jun 2022 13:08:32 -0400 Subject: [PATCH 21/74] formatting and docs update --- gtsam/hybrid/GaussianMixture.cpp | 15 ++++++--------- gtsam/hybrid/GaussianMixture.h | 11 +++++++---- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 0000575182..8970a3993a 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -36,8 +36,7 @@ GaussianMixture::GaussianMixture( conditionals_(conditionals) {} /* *******************************************************************************/ -const GaussianMixture::Conditionals & -GaussianMixture::conditionals() { +const GaussianMixture::Conditionals &GaussianMixture::conditionals() { return conditionals_; } @@ -48,8 +47,8 @@ GaussianMixture GaussianMixture::FromConditionals( const std::vector &conditionalsList) { Conditionals dt(discreteParents, conditionalsList); - return GaussianMixture(continuousFrontals, continuousParents, - discreteParents, dt); + return GaussianMixture(continuousFrontals, continuousParents, discreteParents, + dt); } /* *******************************************************************************/ @@ -66,8 +65,7 @@ GaussianMixture::Sum GaussianMixture::add( } /* *******************************************************************************/ -GaussianMixture::Sum -GaussianMixture::asGaussianFactorGraphTree() const { +GaussianMixture::Sum GaussianMixture::asGaussianFactorGraphTree() const { auto lambda = [](const GaussianFactor::shared_ptr &factor) { GaussianFactorGraph result; result.push_back(factor); @@ -77,15 +75,14 @@ GaussianMixture::asGaussianFactorGraphTree() const { } /* *******************************************************************************/ -bool GaussianMixture::equals(const HybridFactor &lf, - double tol) const { +bool GaussianMixture::equals(const HybridFactor &lf, double tol) const { const This *e = dynamic_cast(&lf); return e != nullptr && BaseFactor::equals(*e, tol); } /* *******************************************************************************/ void GaussianMixture::print(const std::string &s, - const KeyFormatter &formatter) const { + const KeyFormatter &formatter) const { std::cout << s; if (isContinuous()) std::cout << "Continuous "; if (isDiscrete()) std::cout << "Discrete "; diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index e855067150..6100b49d19 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -28,11 +28,14 @@ namespace gtsam { /** * @brief A conditional of gaussian mixtures indexed by discrete variables, as - * part of a Bayes Network. + * part of a Bayes Network. This is the result of the elimination of a + * continuous variable in a hybrid scheme, such that the remaining variables are + * discrete+continuous. * - * Represents the conditional density P(X | M, Z) where X is a continuous random - * variable, M is the selection of discrete variables corresponding to a subset - * of the Gaussian variables and Z is parent of this node + * Represents the conditional density P(X | M, Z) where X is the set of + * continuous random variables, M is the selection of discrete variables + * corresponding to a subset of the Gaussian variables and Z is parent of this + * node . * * The probability P(x|y,z,...) is proportional to * \f$ \sum_i k_i \exp - \frac{1}{2} |R_i x - (d_i - S_i y - T_i z - ...)|^2 \f$ From c2e5061b711ecdf4f7308a01f0e3f6d5e7419391 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 3 Jun 2022 13:25:19 -0400 Subject: [PATCH 22/74] add pruning to HybridBayesNet --- gtsam/hybrid/HybridBayesNet.cpp | 34 ++++++++++++++++----------------- gtsam/hybrid/HybridBayesNet.h | 6 +++++- 2 files changed, 22 insertions(+), 18 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 7d18347230..d3c77d83e3 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -39,18 +39,18 @@ HybridBayesNet HybridBayesNet::prune( // Functional which loops over all assignments and create a set of // GaussianConditionals - auto pruner = - [&](const Assignment &choices, - const GaussianFactor::shared_ptr &gf) -> GaussianFactor::shared_ptr { + auto pruner = [&](const Assignment &choices, + const GaussianConditional::shared_ptr &conditional) + -> GaussianConditional::shared_ptr { // typecast so we can use this to get probability value DiscreteValues values(choices); if ((*discreteFactor)(values) == 0.0) { // empty aka null pointer - boost::shared_ptr null; + boost::shared_ptr null; return null; } else { - return gf; + return conditional; } }; @@ -74,28 +74,28 @@ HybridBayesNet HybridBayesNet::prune( continue; } - // The GaussianMixture stores all the conditionals and uneliminated - // factors in the factors tree. - auto gaussianMixtureTree = gaussianMixture->factors(); - // Run the pruning to get a new, pruned tree - auto prunedFactors = gaussianMixtureTree.apply(pruner); + GaussianMixture::Conditionals prunedTree = + gaussianMixture->conditionals().apply(pruner); DiscreteKeys discreteKeys = gaussianMixture->discreteKeys(); // reverse keys to get a natural ordering std::reverse(discreteKeys.begin(), discreteKeys.end()); - // Convert to GaussianConditionals - auto prunedTree = GaussianMixture::Conditionals( - prunedFactors, [](const GaussianFactor::shared_ptr &factor) { - return boost::dynamic_pointer_cast(factor); - }); + // Convert from boost::iterator_range to std::vector. + std::vector frontals, parents; + for (Key key : gaussianMixture->frontals()) { + frontals.push_back(key); + } + for (Key key : gaussianMixture->parents()) { + parents.push_back(key); + } // Create the new gaussian mixture and add it to the bayes net. auto prunedGaussianMixture = boost::make_shared( - gaussianMixture->frontals(), gaussianMixture->parents(), discreteKeys, - prunedTree); + frontals, parents, discreteKeys, prunedTree); + // Type-erase and add to the pruned Bayes Net fragment. prunedBayesNetFragment.push_back( boost::make_shared(prunedGaussianMixture)); diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 43eead2801..834c4c3efd 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -17,6 +17,7 @@ #pragma once +#include #include #include @@ -35,7 +36,10 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { using sharedConditional = boost::shared_ptr; /** Construct empty bayes net */ - HybridBayesNet() = default; + HybridBayesNet() : Base() {} + + HybridBayesNet prune( + const DecisionTreeFactor::shared_ptr &discreteFactor) const; }; } // namespace gtsam From 28db8b20ff15e3b66e5e61c72e385e46212af702 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 4 Jun 2022 11:45:42 -0400 Subject: [PATCH 23/74] use KeyVector and iterator constructor --- gtsam/hybrid/HybridBayesNet.cpp | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index d3c77d83e3..0403ca0d33 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -82,14 +82,12 @@ HybridBayesNet HybridBayesNet::prune( // reverse keys to get a natural ordering std::reverse(discreteKeys.begin(), discreteKeys.end()); - // Convert from boost::iterator_range to std::vector. - std::vector frontals, parents; - for (Key key : gaussianMixture->frontals()) { - frontals.push_back(key); - } - for (Key key : gaussianMixture->parents()) { - parents.push_back(key); - } + // Convert from boost::iterator_range to KeyVector + // so we can pass it to constructor. + KeyVector frontals(gaussianMixture->frontals().begin(), + gaussianMixture->frontals().end()), + parents(gaussianMixture->parents().begin(), + gaussianMixture->parents().end()); // Create the new gaussian mixture and add it to the bayes net. auto prunedGaussianMixture = boost::make_shared( From 5c5c05370a14141a9ec60628bdb1327cab98f1de Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 7 Jun 2022 18:38:21 -0400 Subject: [PATCH 24/74] Add HybridFactorGraph base class and more methods for adding gaussian factors --- gtsam/hybrid/HybridFactorGraph.h | 148 +++++++++++++++++++++++ gtsam/hybrid/HybridGaussianFactorGraph.h | 61 ++++++++-- 2 files changed, 202 insertions(+), 7 deletions(-) create mode 100644 gtsam/hybrid/HybridFactorGraph.h diff --git a/gtsam/hybrid/HybridFactorGraph.h b/gtsam/hybrid/HybridFactorGraph.h new file mode 100644 index 0000000000..ce0241d255 --- /dev/null +++ b/gtsam/hybrid/HybridFactorGraph.h @@ -0,0 +1,148 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridFactorGraph.h + * @brief Hybrid factor graph base class that uses type erasure + * @author Varun Agrawal + * @date May 28, 2022 + */ + +#pragma once + +#include +#include +#include +#include +#include + +#include +namespace gtsam { + +using SharedFactor = boost::shared_ptr; + +/** + * Hybrid Factor Graph + * ----------------------- + * This is the base hybrid factor graph. + * Everything inside needs to be hybrid factor or hybrid conditional. + */ +class HybridFactorGraph : public FactorGraph { + public: + using Base = FactorGraph; + using This = HybridFactorGraph; ///< this class + using shared_ptr = boost::shared_ptr; ///< shared_ptr to This + + using Values = gtsam::Values; ///< backwards compatibility + using Indices = KeyVector; ///> map from keys to values + + protected: + /// Check if FACTOR type is derived from DiscreteFactor. + template + using IsDiscrete = typename std::enable_if< + std::is_base_of::value>::type; + + /// Check if FACTOR type is derived from HybridFactor. + template + using IsHybrid = typename std::enable_if< + std::is_base_of::value>::type; + + public: + /// @name Constructors + /// @{ + + /// Default constructor + HybridFactorGraph() = default; + + /** + * Implicit copy/downcast constructor to override explicit template container + * constructor. In BayesTree this is used for: + * `cachedSeparatorMarginal_.reset(*separatorMarginal)` + * */ + template + HybridFactorGraph(const FactorGraph& graph) : Base(graph) {} + + /// @} + + // Allow use of selected FactorGraph methods: + using Base::empty; + using Base::reserve; + using Base::size; + using Base::operator[]; + using Base::add; + using Base::push_back; + using Base::resize; + + /** + * Add a discrete factor *pointer* to the internal discrete graph + * @param discreteFactor - boost::shared_ptr to the factor to add + */ + template + IsDiscrete push_discrete( + const boost::shared_ptr& discreteFactor) { + Base::push_back(boost::make_shared(discreteFactor)); + } + + /** + * Add a discrete-continuous (Hybrid) factor *pointer* to the graph + * @param hybridFactor - boost::shared_ptr to the factor to add + */ + template + IsHybrid push_hybrid(const boost::shared_ptr& hybridFactor) { + Base::push_back(hybridFactor); + } + + /// delete emplace_shared. + template + void emplace_shared(Args&&... args) = delete; + + /// Construct a factor and add (shared pointer to it) to factor graph. + template + IsDiscrete emplace_discrete(Args&&... args) { + auto factor = boost::allocate_shared( + Eigen::aligned_allocator(), std::forward(args)...); + push_discrete(factor); + } + + /// Construct a factor and add (shared pointer to it) to factor graph. + template + IsHybrid emplace_hybrid(Args&&... args) { + auto factor = boost::allocate_shared( + Eigen::aligned_allocator(), std::forward(args)...); + push_hybrid(factor); + } + + /** + * @brief Add a single factor shared pointer to the hybrid factor graph. + * Dynamically handles the factor type and assigns it to the correct + * underlying container. + * + * @param sharedFactor The factor to add to this factor graph. + */ + void push_back(const SharedFactor& sharedFactor) { + if (auto p = boost::dynamic_pointer_cast(sharedFactor)) { + push_discrete(p); + } + if (auto p = boost::dynamic_pointer_cast(sharedFactor)) { + push_hybrid(p); + } + } + + /** Constructor from iterator over factors (shared_ptr or plain objects) */ + template + void push_back(ITERATOR firstFactor, ITERATOR lastFactor) { + for (auto&& it = firstFactor; it != lastFactor; it++) { + push_back(*it); + } + } +}; + +} // namespace gtsam \ No newline at end of file diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 0188aa652c..1d49904fed 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -19,6 +19,8 @@ #pragma once #include +#include +#include #include #include #include @@ -53,10 +55,9 @@ struct EliminationTraits { typedef HybridBayesNet BayesNetType; ///< Type of Bayes net from sequential elimination typedef HybridEliminationTree - EliminationTreeType; ///< Type of elimination tree - typedef HybridBayesTree BayesTreeType; ///< Type of Bayes tree - typedef HybridJunctionTree - JunctionTreeType; ///< Type of Junction tree + EliminationTreeType; ///< Type of elimination tree + typedef HybridBayesTree BayesTreeType; ///< Type of Bayes tree + typedef HybridJunctionTree JunctionTreeType; ///< Type of Junction tree /// The default dense elimination function static std::pair, boost::shared_ptr > @@ -72,10 +73,16 @@ struct EliminationTraits { * Everything inside needs to be hybrid factor or hybrid conditional. */ class GTSAM_EXPORT HybridGaussianFactorGraph - : public FactorGraph, + : public HybridFactorGraph, public EliminateableFactorGraph { + protected: + /// Check if FACTOR type is derived from GaussianFactor. + template + using IsGaussian = typename std::enable_if< + std::is_base_of::value>::type; + public: - using Base = FactorGraph; + using Base = HybridFactorGraph; using This = HybridGaussianFactorGraph; ///< this class using BaseEliminateable = EliminateableFactorGraph; ///< for elimination @@ -100,7 +107,13 @@ class GTSAM_EXPORT HybridGaussianFactorGraph /// @} - using FactorGraph::add; + using Base::empty; + using Base::reserve; + using Base::size; + using Base::operator[]; + using Base::add; + using Base::push_back; + using Base::resize; /// Add a Jacobian factor to the factor graph. void add(JacobianFactor&& factor); @@ -113,6 +126,40 @@ class GTSAM_EXPORT HybridGaussianFactorGraph /// Add a DecisionTreeFactor as a shared ptr. void add(boost::shared_ptr factor); + + /** + * Add a gaussian factor *pointer* to the internal gaussian factor graph + * @param gaussianFactor - boost::shared_ptr to the factor to add + */ + template + IsGaussian push_gaussian( + const boost::shared_ptr& gaussianFactor) { + Base::Base::push_back( + boost::make_shared(gaussianFactor)); + } + + /// Construct a factor and add (shared pointer to it) to factor graph. + template + IsGaussian emplace_gaussian(Args&&... args) { + auto factor = boost::allocate_shared( + Eigen::aligned_allocator(), std::forward(args)...); + push_gaussian(factor); + } + + /** + * @brief Add a single factor shared pointer to the hybrid factor graph. + * Dynamically handles the factor type and assigns it to the correct + * underlying container. + * + * @param sharedFactor The factor to add to this factor graph. + */ + void push_back(const SharedFactor& sharedFactor) { + if (auto p = boost::dynamic_pointer_cast(sharedFactor)) { + push_gaussian(p); + } else { + Base::push_back(sharedFactor); + } + } }; } // namespace gtsam From 44079d13b4357ae958e754b6ed3769c133f0f565 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 7 Jun 2022 18:38:46 -0400 Subject: [PATCH 25/74] refactor testGaussianHybridFactorGraph to include comments and valid tests --- .../tests/testGaussianHybridFactorGraph.cpp | 225 ++++++------------ 1 file changed, 70 insertions(+), 155 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp b/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp index 552bb18f59..9e1a2efdd4 100644 --- a/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp @@ -58,26 +58,30 @@ using gtsam::symbol_shorthand::X; using gtsam::symbol_shorthand::Y; /* ************************************************************************* */ -TEST(HybridGaussianFactorGraph, creation) { - HybridConditional test; +TEST(HybridGaussianFactorGraph, Creation) { + HybridConditional conditional; HybridGaussianFactorGraph hfg; - hfg.add(HybridGaussianFactor(JacobianFactor(0, I_3x3, Z_3x1))); + hfg.add(HybridGaussianFactor(JacobianFactor(X(0), I_3x3, Z_3x1))); + + // Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor + // graph + GaussianMixture gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{C(0), 2}), + GaussianMixture::Conditionals( + C(0), + boost::make_shared( + X(0), Z_3x1, I_3x3, X(1), I_3x3), + boost::make_shared( + X(0), Vector3::Ones(), I_3x3, X(1), I_3x3))); + hfg.add(gm); - GaussianMixture clgc( - {X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{C(0), 2}), - GaussianMixture::Conditionals( - C(0), - boost::make_shared(X(0), Z_3x1, I_3x3, X(1), - I_3x3), - boost::make_shared(X(0), Vector3::Ones(), I_3x3, - X(1), I_3x3))); - GTSAM_PRINT(clgc); + EXPECT_LONGS_EQUAL(2, hfg.size()); } /* ************************************************************************* */ -TEST(HybridGaussianFactorGraph, eliminate) { +TEST(HybridGaussianFactorGraph, EliminateSequential) { + // Test elimination of a single variable. HybridGaussianFactorGraph hfg; hfg.add(HybridGaussianFactor(JacobianFactor(0, I_3x3, Z_3x1))); @@ -88,11 +92,13 @@ TEST(HybridGaussianFactorGraph, eliminate) { } /* ************************************************************************* */ -TEST(HybridGaussianFactorGraph, eliminateMultifrontal) { +TEST(HybridGaussianFactorGraph, EliminateMultifrontal) { + // Test multifrontal elimination HybridGaussianFactorGraph hfg; DiscreteKey c(C(1), 2); + // Add priors on x0 and c1 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8}))); @@ -110,9 +116,12 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { DiscreteKey c1(C(1), 2); + // Add prior on x0 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); + // Add factor between x0 and x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); + // Add a gaussian mixture factor Ï•(x1, c1) DecisionTree dt( C(1), boost::make_shared(X(1), I_3x3, Z_3x1), boost::make_shared(X(1), I_3x3, Vector3::Ones())); @@ -123,9 +132,9 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {C(1)})); auto dc = result->at(2)->asDiscreteConditional(); - DiscreteValues dv; - dv[C(1)] = 0; - EXPECT_DOUBLES_EQUAL(0.6225, dc->operator()(dv), 1e-3); + DiscreteValues mode; + mode[C(1)] = 0; + EXPECT_DOUBLES_EQUAL(0.6225, (*dc)(mode), 1e-3); } /* ************************************************************************* */ @@ -134,26 +143,26 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { DiscreteKey c1(C(1), 2); + // Add prior on x0 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); + // Add factor between x0 and x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); DecisionTree dt( C(1), boost::make_shared(X(1), I_3x3, Z_3x1), boost::make_shared(X(1), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); - // hfg.add(GaussianMixtureFactor({X(0)}, {c1}, dt)); + + // Discrete probability table for c1 hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c1, {2, 8}))); + // Joint discrete probability table for c1, c2 hfg.add(HybridDiscreteFactor( DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4"))); - // hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(2), 2}, {C(3), 2}}, "1 - // 2 3 4"))); hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(3), 2}, - // {C(1), 2}}, "1 2 2 1"))); auto result = hfg.eliminateSequential( Ordering::ColamdConstrainedLast(hfg, {C(1), C(2)})); - GTSAM_PRINT(*result); + EXPECT_LONGS_EQUAL(4, result->size()); } /* ************************************************************************* */ @@ -165,28 +174,19 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - // DecisionTree dt( - // C(1), boost::make_shared(X(1), I_3x3, Z_3x1), - // boost::make_shared(X(1), I_3x3, Vector3::Ones())); - - // hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); hfg.add(GaussianMixtureFactor::FromFactors( {X(1)}, {{C(1), 2}}, {boost::make_shared(X(1), I_3x3, Z_3x1), boost::make_shared(X(1), I_3x3, Vector3::Ones())})); - // hfg.add(GaussianMixtureFactor({X(0)}, {c1}, dt)); hfg.add(DecisionTreeFactor(c1, {2, 8})); hfg.add(DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4")); - // hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(2), 2}, {C(3), 2}}, "1 - // 2 3 4"))); hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(3), 2}, - // {C(1), 2}}, "1 2 2 1"))); auto result = hfg.eliminateMultifrontal( Ordering::ColamdConstrainedLast(hfg, {C(1), C(2)})); - GTSAM_PRINT(*result); - GTSAM_PRINT(*result->marginalFactor(C(2))); + // GTSAM_PRINT(*result); + // GTSAM_PRINT(*result->marginalFactor(C(2))); } /* ************************************************************************* */ @@ -195,30 +195,28 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) { DiscreteKey c(C(1), 2); + // Prior on x0 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); + // Factor between x0-x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); + // Decision tree with different modes on x1 DecisionTree dt( C(1), boost::make_shared(X(1), I_3x3, Z_3x1), boost::make_shared(X(1), I_3x3, Vector3::Ones())); + // Hybrid factor P(x1|c1) hfg.add(GaussianMixtureFactor({X(1)}, {c}, dt)); + // Prior factor on c1 hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8}))); - // hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 - // 2 3 4"))); + // Get a constrained ordering keeping c1 last auto ordering_full = Ordering::ColamdConstrainedLast(hfg, {C(1)}); + // Returns a Hybrid Bayes Tree with distribution P(x0|x1)P(x1|c1)P(c1) HybridBayesTree::shared_ptr hbt = hfg.eliminateMultifrontal(ordering_full); - GTSAM_PRINT(*hbt); - /* - Explanation: the Junction tree will need to reeliminate to get to the marginal - on X(1), which is not possible because it involves eliminating discrete before - continuous. The solution to this, however, is in Murphy02. TLDR is that this - is 1. expensive and 2. inexact. neverless it is doable. And I believe that we - should do this. - */ + EXPECT_LONGS_EQUAL(3, hbt->size()); } /* ************************************************************************* */ @@ -233,10 +231,6 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1)); { - // DecisionTree dt( - // C(0), boost::make_shared(X(0), I_3x3, Z_3x1), - // boost::make_shared(X(0), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor::FromFactors( {X(0)}, {{C(0), 2}}, {boost::make_shared(X(0), I_3x3, Z_3x1), @@ -249,7 +243,6 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { hfg.add(GaussianMixtureFactor({X(2)}, {{C(1), 2}}, dt1)); } - // hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8}))); hfg.add(HybridDiscreteFactor( DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4"))); @@ -273,26 +266,36 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { auto ordering_full = Ordering::ColamdConstrainedLast(hfg, {C(0), C(1), C(2), C(3)}); - GTSAM_PRINT(ordering_full); - HybridBayesTree::shared_ptr hbt; HybridGaussianFactorGraph::shared_ptr remaining; std::tie(hbt, remaining) = hfg.eliminatePartialMultifrontal(ordering_full); - GTSAM_PRINT(*hbt); - - GTSAM_PRINT(*remaining); + // GTSAM_PRINT(*hbt); + // GTSAM_PRINT(*remaining); - hbt->dot(std::cout); /* - Explanation: the Junction tree will need to reeliminate to get to the marginal - on X(1), which is not possible because it involves eliminating discrete before - continuous. The solution to this, however, is in Murphy02. TLDR is that this - is 1. expensive and 2. inexact. neverless it is doable. And I believe that we - should do this. + (Fan) Explanation: the Junction tree will need to reeliminate to get to the + marginal on X(1), which is not possible because it involves eliminating + discrete before continuous. The solution to this, however, is in Murphy02. + TLDR is that this is 1. expensive and 2. inexact. nevertheless it is doable. + And I believe that we should do this. */ } +void dotPrint(const HybridGaussianFactorGraph::shared_ptr &hfg, + const HybridBayesTree::shared_ptr &hbt, + const Ordering &ordering) { + DotWriter dw; + dw.positionHints['c'] = 2; + dw.positionHints['x'] = 1; + std::cout << hfg->dot(DefaultKeyFormatter, dw); + std::cout << "\n"; + hbt->dot(std::cout); + + std::cout << "\n"; + std::cout << hfg->eliminateSequential(ordering)->dot(DefaultKeyFormatter, dw); +} + /* ************************************************************************* */ // TODO(fan): make a graph like Varun's paper one TEST(HybridGaussianFactorGraph, Switching) { @@ -326,9 +329,6 @@ TEST(HybridGaussianFactorGraph, Switching) { for (auto &l : lvls) { l = -l; } - std::copy(lvls.begin(), lvls.end(), - std::ostream_iterator(std::cout, ",")); - std::cout << "\n"; } { std::vector naturalC(N - 1); @@ -342,63 +342,19 @@ TEST(HybridGaussianFactorGraph, Switching) { // std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering)); std::tie(ndC, lvls) = makeBinaryOrdering(ordC); std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering)); - std::copy(lvls.begin(), lvls.end(), - std::ostream_iterator(std::cout, ",")); } auto ordering_full = Ordering(ordering); - // auto ordering_full = - // Ordering(); - - // for (int i = 1; i <= 9; i++) { - // ordering_full.push_back(X(i)); - // } - - // for (int i = 1; i < 9; i++) { - // ordering_full.push_back(C(i)); - // } - - // auto ordering_full = - // Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7), - // X(5), - // C(1), C(2), C(3), C(4), C(5), C(6), C(7), C(8)}); - // GTSAM_PRINT(*hfg); - GTSAM_PRINT(ordering_full); + // GTSAM_PRINT(ordering_full); HybridBayesTree::shared_ptr hbt; HybridGaussianFactorGraph::shared_ptr remaining; std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full); // GTSAM_PRINT(*hbt); - // GTSAM_PRINT(*remaining); - - { - DotWriter dw; - dw.positionHints['c'] = 2; - dw.positionHints['x'] = 1; - std::cout << hfg->dot(DefaultKeyFormatter, dw); - std::cout << "\n"; - hbt->dot(std::cout); - } - - { - DotWriter dw; - // dw.positionHints['c'] = 2; - // dw.positionHints['x'] = 1; - std::cout << "\n"; - std::cout << hfg->eliminateSequential(ordering_full) - ->dot(DefaultKeyFormatter, dw); - } - /* - Explanation: the Junction tree will need to reeliminate to get to the marginal - on X(1), which is not possible because it involves eliminating discrete before - continuous. The solution to this, however, is in Murphy02. TLDR is that this - is 1. expensive and 2. inexact. neverless it is doable. And I believe that we - should do this. - */ - hbt->marginalFactor(C(11))->print("HBT: "); + // hbt->marginalFactor(C(11))->print("HBT: "); } /* ************************************************************************* */ @@ -434,9 +390,6 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) { for (auto &l : lvls) { l = -l; } - std::copy(lvls.begin(), lvls.end(), - std::ostream_iterator(std::cout, ",")); - std::cout << "\n"; } { std::vector naturalC(N - 1); @@ -450,40 +403,16 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) { // std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering)); std::tie(ndC, lvls) = makeBinaryOrdering(ordC); std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering)); - std::copy(lvls.begin(), lvls.end(), - std::ostream_iterator(std::cout, ",")); } auto ordering_full = Ordering(ordering); // GTSAM_PRINT(*hfg); - GTSAM_PRINT(ordering_full); + // GTSAM_PRINT(ordering_full); HybridBayesTree::shared_ptr hbt; HybridGaussianFactorGraph::shared_ptr remaining; std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full); - // GTSAM_PRINT(*hbt); - - // GTSAM_PRINT(*remaining); - - { - DotWriter dw; - dw.positionHints['c'] = 2; - dw.positionHints['x'] = 1; - std::cout << hfg->dot(DefaultKeyFormatter, dw); - std::cout << "\n"; - hbt->dot(std::cout); - } - - { - DotWriter dw; - // dw.positionHints['c'] = 2; - // dw.positionHints['x'] = 1; - std::cout << "\n"; - std::cout << hfg->eliminateSequential(ordering_full) - ->dot(DefaultKeyFormatter, dw); - } - auto new_fg = makeSwitchingChain(12); auto isam = HybridGaussianISAM(*hbt); @@ -492,13 +421,14 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) { factorGraph.push_back(new_fg->at(new_fg->size() - 2)); factorGraph.push_back(new_fg->at(new_fg->size() - 1)); isam.update(factorGraph); - std::cout << isam.dot(); - isam.marginalFactor(C(11))->print(); + // std::cout << isam.dot(); + // isam.marginalFactor(C(11))->print(); } } /* ************************************************************************* */ -TEST(HybridGaussianFactorGraph, SwitchingTwoVar) { +// TODO(Varun) Actually test something! +TEST_DISABLED(HybridGaussianFactorGraph, SwitchingTwoVar) { const int N = 7; auto hfg = makeSwitchingChain(N, X); hfg->push_back(*makeSwitchingChain(N, Y, D)); @@ -517,21 +447,6 @@ TEST(HybridGaussianFactorGraph, SwitchingTwoVar) { ordX.emplace_back(Y(i)); } - // { - // KeyVector ndX; - // std::vector lvls; - // std::tie(ndX, lvls) = makeBinaryOrdering(naturalX); - // std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering)); - // std::copy(lvls.begin(), lvls.end(), - // std::ostream_iterator(std::cout, ",")); - // std::cout << "\n"; - - // for (size_t i = 0; i < N; i++) { - // ordX.emplace_back(X(ndX[i])); - // ordX.emplace_back(Y(ndX[i])); - // } - // } - for (size_t i = 1; i <= N - 1; i++) { ordX.emplace_back(C(i)); } From 374e3cbc7a882c8e8a2f53aa84535ec1fb5fcbd3 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 7 Jun 2022 18:39:10 -0400 Subject: [PATCH 26/74] Improved hybrid bayes net and tests --- gtsam/hybrid/HybridBayesNet.cpp | 27 +++++++ gtsam/hybrid/HybridBayesNet.h | 22 ++++++ gtsam/hybrid/tests/testHybridBayesNet.cpp | 92 +++++++++++++++++++++++ 3 files changed, 141 insertions(+) create mode 100644 gtsam/hybrid/tests/testHybridBayesNet.cpp diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 1292711d89..b3df73bf20 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -10,7 +10,34 @@ * @file HybridBayesNet.cpp * @brief A bayes net of Gaussian Conditionals indexed by discrete keys. * @author Fan Jiang + * @author Varun Agrawal * @date January 2022 */ #include + +namespace gtsam { + +/* ************************************************************************* */ +GaussianMixture::shared_ptr HybridBayesNet::atGaussian(size_t i) const { + return boost::dynamic_pointer_cast(factors_.at(i)->inner()); +} + +/* ************************************************************************* */ +DiscreteConditional::shared_ptr HybridBayesNet::atDiscrete(size_t i) const { + return boost::dynamic_pointer_cast( + factors_.at(i)->inner()); +} + +/* ************************************************************************* */ +GaussianBayesNet HybridBayesNet::choose( + const DiscreteValues &assignment) const { + GaussianBayesNet gbn; + for (size_t idx = 0; idx < size(); idx++) { + GaussianMixture gm = *this->atGaussian(idx); + gbn.push_back(gm(assignment)); + } + return gbn; +} + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 43eead2801..412b208b93 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -19,6 +19,7 @@ #include #include +#include namespace gtsam { @@ -36,6 +37,27 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { /** Construct empty bayes net */ HybridBayesNet() = default; + + /// Add a discrete conditional to the Bayes Net. + void add(const DiscreteKey &key, const std::string &table) { + push_back( + HybridConditional(boost::make_shared(key, table))); + } + + /// Get a specific Gaussian mixture by index `i`. + GaussianMixture::shared_ptr atGaussian(size_t i) const; + + /// Get a specific discrete conditional by index `i`. + DiscreteConditional::shared_ptr atDiscrete(size_t i) const; + + /** + * @brief Get the Gaussian Bayes Net which corresponds to a specific discrete + * value assignment. + * + * @param assignment The discrete value assignment for the discrete keys. + * @return GaussianBayesNet + */ + GaussianBayesNet choose(const DiscreteValues &assignment) const; }; } // namespace gtsam diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp new file mode 100644 index 0000000000..34133ab0b3 --- /dev/null +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -0,0 +1,92 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridBayesNet.cpp + * @brief Unit tests for HybridBayesNet + * @author Varun Agrawal + * @author Fan Jiang + * @author Frank Dellaert + * @date December 2021 + */ + +#include +#include + +// Include for test suite +#include + +using namespace std; +using namespace gtsam; +using noiseModel::Isotropic; +using symbol_shorthand::M; +using symbol_shorthand::X; + +static const DiscreteKey Asia(0, 2); + +/* ****************************************************************************/ +// Test creation +TEST(HybridBayesNet, Creation) { + HybridBayesNet bayesNet; + + bayesNet.add(Asia, "99/1"); + + DiscreteConditional expected(Asia, "99/1"); + + CHECK(bayesNet.atDiscrete(0)); + auto& df = *bayesNet.atDiscrete(0); + EXPECT(df.equals(expected)); +} + +/* ****************************************************************************/ +// Test choosing an assignment of conditionals +TEST(HybridBayesNet, Choose) { + Switching s(4); + + Ordering ordering; + for (auto&& kvp : s.linearizationPoint) { + ordering += kvp.key; + } + + HybridBayesNet::shared_ptr hybridBayesNet; + HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; + std::tie(hybridBayesNet, remainingFactorGraph) = + s.linearizedFactorGraph.eliminatePartialSequential(ordering); + + DiscreteValues assignment; + assignment[M(1)] = 1; + assignment[M(2)] = 1; + assignment[M(3)] = 0; + + GaussianBayesNet gbn = hybridBayesNet->choose(assignment); + + EXPECT_LONGS_EQUAL(4, gbn.size()); + + EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( + hybridBayesNet->atGaussian(0)))(assignment), + *gbn.at(0))); + EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( + hybridBayesNet->atGaussian(1)))(assignment), + *gbn.at(1))); + EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( + hybridBayesNet->atGaussian(2)))(assignment), + *gbn.at(2))); + EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( + hybridBayesNet->atGaussian(3)))(assignment), + *gbn.at(3))); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ \ No newline at end of file From 8d6a225851b5a2751452cedf598c660446b99161 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 7 Jun 2022 18:39:20 -0400 Subject: [PATCH 27/74] fix printing --- gtsam/hybrid/GaussianMixtureFactor.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 6c90ee6a71..7b7c1899fe 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -108,7 +108,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { bool equals(const HybridFactor &lf, double tol = 1e-9) const override; void print( - const std::string &s = "HybridFactor\n", + const std::string &s = "GaussianMixtureFactor\n", const KeyFormatter &formatter = DefaultKeyFormatter) const override; /// @} From 00be610e18ced3a1226a5271a6f1ad275d5c3539 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 7 Jun 2022 18:39:33 -0400 Subject: [PATCH 28/74] Add Switching class and make it linear --- gtsam/hybrid/tests/Switching.h | 139 +++++++++++++++++++++++++++++++-- 1 file changed, 132 insertions(+), 7 deletions(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index c081b8e87e..85c86f64e0 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -18,18 +18,136 @@ #include #include +#include #include #include #include #include +#include #pragma once -using gtsam::symbol_shorthand::C; -using gtsam::symbol_shorthand::X; - namespace gtsam { -inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( + +using symbol_shorthand::C; +using symbol_shorthand::M; +using symbol_shorthand::X; + +/* ****************************************************************************/ +// Test fixture with switching network. +// TODO(Varun) Currently this is only linear. We need to add nonlinear support +// and then update to +// https://github.com/borglab/gtsam/pull/973/files#diff-58c02b3b197ebf731694946e87762d252e9eaa2f5c6c4ba22d618085b321ca23 +struct Switching { + size_t K; + DiscreteKeys modes; + HybridGaussianFactorGraph linearizedFactorGraph; + Values linearizationPoint; + + using MotionModel = BetweenFactor; + // using MotionMixture = MixtureFactor; + + /// Create with given number of time steps. + Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1) + : K(K) { + // Create DiscreteKeys for binary K modes, modes[0] will not be used. + modes = addDiscreteModes(K); + + // Create hybrid factor graph. + // Add a prior on X(1). + auto prior = boost::make_shared( + X(1), Matrix11::Ones() / prior_sigma, Vector1::Zero()); + linearizedFactorGraph.push_back(prior); + + // Add "motion models". + linearizedFactorGraph = addMotionModels(K); + + // Add measurement factors + for (size_t k = 1; k <= K; k++) { + linearizedFactorGraph.emplace_gaussian( + X(k), Matrix11::Ones() / 0.1, Vector1::Zero()); + } + + // Add "mode chain" + linearizedFactorGraph = addModeChain(linearizedFactorGraph); + + // Create the linearization point. + for (size_t k = 1; k <= K; k++) { + linearizationPoint.insert(X(k), static_cast(k)); + } + } + + /// Create DiscreteKeys for K binary modes. + DiscreteKeys addDiscreteModes(size_t K) { + DiscreteKeys m; + for (size_t k = 0; k <= K; k++) { + m.emplace_back(M(k), 2); + } + return m; + } + + /// Helper function to add motion models for each [k, k+1] interval. + HybridGaussianFactorGraph addMotionModels(size_t K) { + HybridGaussianFactorGraph hgfg; + for (size_t k = 1; k < K; k++) { + auto keys = {X(k), X(k + 1)}; + auto components = motionModels(k); + hgfg.emplace_hybrid(keys, DiscreteKeys{modes[k]}, + components); + } + return hgfg; + } + + // Create motion models for a given time step + static std::vector motionModels( + size_t k, double sigma = 1.0) { + auto noise_model = noiseModel::Isotropic::Sigma(1, sigma); + auto still = boost::make_shared( + X(k), -Matrix11::Ones() / sigma, X(k + 1), + Matrix11::Ones() / sigma, Vector1::Zero()), + moving = boost::make_shared( + X(k), -Matrix11::Ones() / sigma, X(k + 1), + Matrix11::Ones() / sigma, -Vector1::Ones() / sigma); + return {boost::dynamic_pointer_cast(still), + boost::dynamic_pointer_cast(moving)}; + } + + // // Add "mode chain" to NonlinearHybridFactorGraph + // void addModeChain(HybridNonlinearFactorGraph& fg) { + // auto prior = boost::make_shared(modes[1], "1/1"); + // fg.push_discrete(prior); + // for (size_t k = 1; k < K - 1; k++) { + // auto parents = {modes[k]}; + // auto conditional = boost::make_shared( + // modes[k + 1], parents, "1/2 3/2"); + // fg.push_discrete(conditional); + // } + // } + + // Add "mode chain" to GaussianHybridFactorGraph + HybridGaussianFactorGraph addModeChain(HybridGaussianFactorGraph& fg) { + auto prior = boost::make_shared(modes[1], "1/1"); + fg.push_discrete(prior); + for (size_t k = 1; k < K - 1; k++) { + auto parents = {modes[k]}; + auto conditional = boost::make_shared( + modes[k + 1], parents, "1/2 3/2"); + fg.push_discrete(conditional); + } + return fg; + } +}; + +/** + * @brief Create a switching system chain. A switching system is a continuous + * system which depends on a discrete mode at each time step of the chain. + * + * @param n The number of chain elements. + * @param keyFunc The functional to help specify the continuous key. + * @param dKeyFunc The functional to help specify the discrete key. + * @return HybridGaussianFactorGraph::shared_ptr + */ +HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( size_t n, std::function keyFunc = X, std::function dKeyFunc = C) { HybridGaussianFactorGraph hfg; @@ -54,9 +172,16 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( return boost::make_shared(std::move(hfg)); } -inline std::pair> makeBinaryOrdering( - std::vector &input) { +/** + * @brief + * + * @param input The original ordering. + * @return std::pair> + */ +std::pair> makeBinaryOrdering( + std::vector& input) { KeyVector new_order; + std::vector levels(input.size()); std::function::iterator, std::vector::iterator, int)> @@ -79,7 +204,7 @@ inline std::pair> makeBinaryOrdering( bsg(input.begin(), input.end(), 0); std::reverse(new_order.begin(), new_order.end()); - // std::reverse(levels.begin(), levels.end()); + return {new_order, levels}; } From 26e0cef48d04f01ff76d5ec09653a6d41b4f25c8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 7 Jun 2022 19:56:19 -0400 Subject: [PATCH 29/74] fixes --- gtsam/hybrid/HybridBayesNet.h | 3 +++ gtsam/hybrid/HybridFactorGraph.h | 8 -------- 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 412b208b93..7fe19c0ea7 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -38,6 +38,9 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { /** Construct empty bayes net */ HybridBayesNet() = default; + /// Add HybridConditional to Bayes Net + using Base::add; + /// Add a discrete conditional to the Bayes Net. void add(const DiscreteKey &key, const std::string &table) { push_back( diff --git a/gtsam/hybrid/HybridFactorGraph.h b/gtsam/hybrid/HybridFactorGraph.h index ce0241d255..cbbc45590f 100644 --- a/gtsam/hybrid/HybridFactorGraph.h +++ b/gtsam/hybrid/HybridFactorGraph.h @@ -135,14 +135,6 @@ class HybridFactorGraph : public FactorGraph { push_hybrid(p); } } - - /** Constructor from iterator over factors (shared_ptr or plain objects) */ - template - void push_back(ITERATOR firstFactor, ITERATOR lastFactor) { - for (auto&& it = firstFactor; it != lastFactor; it++) { - push_back(*it); - } - } }; } // namespace gtsam \ No newline at end of file From 5bf5d0300697afddac74b898502b5226ff30c085 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 7 Jun 2022 20:13:42 -0400 Subject: [PATCH 30/74] local include --- gtsam/hybrid/tests/testHybridBayesNet.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 34133ab0b3..ec6b67d660 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -19,7 +19,8 @@ */ #include -#include + +#include "Switching.h" // Include for test suite #include From 8d359257797d502b76eb6500347515a08602eb15 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 7 Jun 2022 21:04:21 -0400 Subject: [PATCH 31/74] make makeBinaryOrdering inline to prevent multiple definition error --- gtsam/hybrid/tests/Switching.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 85c86f64e0..76c3da4989 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -178,7 +178,7 @@ HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( * @param input The original ordering. * @return std::pair> */ -std::pair> makeBinaryOrdering( +inline std::pair> makeBinaryOrdering( std::vector& input) { KeyVector new_order; From 622ebdd13ba3841317d533dbbd87771fa9bbd910 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 7 Jun 2022 22:21:38 -0400 Subject: [PATCH 32/74] makeSwitchingChain is also inline --- gtsam/hybrid/tests/Switching.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 76c3da4989..545259e481 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -147,7 +147,7 @@ struct Switching { * @param dKeyFunc The functional to help specify the discrete key. * @return HybridGaussianFactorGraph::shared_ptr */ -HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( +inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( size_t n, std::function keyFunc = X, std::function dKeyFunc = C) { HybridGaussianFactorGraph hfg; From 09d512ae4878b767ac1ac5e674795c4ec83f14e6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 9 Jun 2022 20:11:30 -0400 Subject: [PATCH 33/74] add docstring for makeBinaryOrdering --- gtsam/hybrid/tests/Switching.h | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 545259e481..38a327a580 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -173,7 +173,10 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( } /** - * @brief + * @brief Return the ordering as a binary tree such that all parent nodes are + * above their children. + * + * This will result in a nested dissection Bayes tree after elimination. * * @param input The original ordering. * @return std::pair> From 7496f2f43afa11d5431cc09d85694a461db0e1cb Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 10 Jun 2022 04:24:13 -0400 Subject: [PATCH 34/74] address review comments --- gtsam/hybrid/HybridGaussianFactorGraph.h | 3 +-- gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp | 1 + 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 1d49904fed..e1cd2dc5f9 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -134,8 +134,7 @@ class GTSAM_EXPORT HybridGaussianFactorGraph template IsGaussian push_gaussian( const boost::shared_ptr& gaussianFactor) { - Base::Base::push_back( - boost::make_shared(gaussianFactor)); + Base::push_back(boost::make_shared(gaussianFactor)); } /// Construct a factor and add (shared pointer to it) to factor graph. diff --git a/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp b/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp index 9e1a2efdd4..a96aab6b9a 100644 --- a/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp @@ -273,6 +273,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { // GTSAM_PRINT(*hbt); // GTSAM_PRINT(*remaining); + // hbt->marginalFactor(X(1))->print("HBT: "); /* (Fan) Explanation: the Junction tree will need to reeliminate to get to the marginal on X(1), which is not possible because it involves eliminating From 91da5209fe5801cc845a05828e676cc045774e2e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 20 Jun 2022 09:12:42 -0400 Subject: [PATCH 35/74] add new lines --- gtsam/hybrid/HybridFactorGraph.h | 2 +- gtsam/hybrid/tests/testHybridBayesNet.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridFactorGraph.h b/gtsam/hybrid/HybridFactorGraph.h index cbbc45590f..fc730f0c97 100644 --- a/gtsam/hybrid/HybridFactorGraph.h +++ b/gtsam/hybrid/HybridFactorGraph.h @@ -137,4 +137,4 @@ class HybridFactorGraph : public FactorGraph { } }; -} // namespace gtsam \ No newline at end of file +} // namespace gtsam diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index ec6b67d660..f3db839557 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -90,4 +90,4 @@ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } -/* ************************************************************************* */ \ No newline at end of file +/* ************************************************************************* */ From 8ddc2ea989d17abb4b8fc551176a3bc411cdae71 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 26 Jul 2022 07:15:48 -0400 Subject: [PATCH 36/74] rename to HybridNonlinearFactorGraph --- ...orGraph.h => HybridNonlinearFactorGraph.h} | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) rename gtsam/hybrid/{NonlinearHybridFactorGraph.h => HybridNonlinearFactorGraph.h} (92%) diff --git a/gtsam/hybrid/NonlinearHybridFactorGraph.h b/gtsam/hybrid/HybridNonlinearFactorGraph.h similarity index 92% rename from gtsam/hybrid/NonlinearHybridFactorGraph.h rename to gtsam/hybrid/HybridNonlinearFactorGraph.h index 82a331531e..c54a1057e2 100644 --- a/gtsam/hybrid/NonlinearHybridFactorGraph.h +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.h @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file NonlinearHybridFactorGraph.h + * @file HybridNonlinearFactorGraph.h * @brief Nonlinear hybrid factor graph that uses type erasure * @author Varun Agrawal * @date May 28, 2022 @@ -35,7 +35,7 @@ namespace gtsam { * This is the non-linear version of a hybrid factor graph. * Everything inside needs to be hybrid factor or hybrid conditional. */ -class GTSAM_EXPORT NonlinearHybridFactorGraph : public HybridFactorGraph { +class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { protected: /// Check if FACTOR type is derived from NonlinearFactor. template @@ -44,7 +44,7 @@ class GTSAM_EXPORT NonlinearHybridFactorGraph : public HybridFactorGraph { public: using Base = HybridFactorGraph; - using This = NonlinearHybridFactorGraph; ///< this class + using This = HybridNonlinearFactorGraph; ///< this class using shared_ptr = boost::shared_ptr; ///< shared_ptr to This using Values = gtsam::Values; ///< backwards compatibility @@ -53,7 +53,7 @@ class GTSAM_EXPORT NonlinearHybridFactorGraph : public HybridFactorGraph { /// @name Constructors /// @{ - NonlinearHybridFactorGraph() = default; + HybridNonlinearFactorGraph() = default; /** * Implicit copy/downcast constructor to override explicit template container @@ -61,7 +61,7 @@ class GTSAM_EXPORT NonlinearHybridFactorGraph : public HybridFactorGraph { * `cachedSeparatorMarginal_.reset(*separatorMarginal)` * */ template - NonlinearHybridFactorGraph(const FactorGraph& graph) + HybridNonlinearFactorGraph(const FactorGraph& graph) : Base(graph) {} /// @} @@ -131,21 +131,21 @@ class GTSAM_EXPORT NonlinearHybridFactorGraph : public HybridFactorGraph { * Simply prints the factor graph. */ void print( - const std::string& str = "NonlinearHybridFactorGraph", + const std::string& str = "HybridNonlinearFactorGraph", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override {} /** * @return true if all internal graphs of `this` are equal to those of * `other` */ - bool equals(const NonlinearHybridFactorGraph& other, + bool equals(const HybridNonlinearFactorGraph& other, double tol = 1e-9) const { return false; } /** * @brief Linearize all the continuous factors in the - * NonlinearHybridFactorGraph. + * HybridNonlinearFactorGraph. * * @param continuousValues: Dictionary of continuous values. * @return GaussianHybridFactorGraph::shared_ptr @@ -200,7 +200,7 @@ class GTSAM_EXPORT NonlinearHybridFactorGraph : public HybridFactorGraph { }; template <> -struct traits - : public Testable {}; +struct traits + : public Testable {}; } // namespace gtsam From 7a55341e3202ad2aac6a1f8521e7b2794c639f7d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 26 Jul 2022 07:21:05 -0400 Subject: [PATCH 37/74] add IsGaussian template check --- gtsam/hybrid/HybridGaussianFactorGraph.h | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index b98654b92b..ca9d8049ea 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -75,8 +75,14 @@ struct EliminationTraits { * Everything inside needs to be hybrid factor or hybrid conditional. */ class GTSAM_EXPORT HybridGaussianFactorGraph - : public HybridFactorGraph,, + : public HybridFactorGraph, public EliminateableFactorGraph { + protected: + /// Check if FACTOR type is derived from GaussianFactor. + template + using IsGaussian = typename std::enable_if< + std::is_base_of::value>::type; + public: using Base = HybridFactorGraph; using This = HybridGaussianFactorGraph; ///< this class From 43c28e7870661214f9024c7ed67d4e63c3fb823a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 26 Jul 2022 07:40:24 -0400 Subject: [PATCH 38/74] renaming fixes --- gtsam/hybrid/HybridNonlinearFactorGraph.h | 8 +-- ...cpp => testHybridNonlinearFactorGraph.cpp} | 61 +++++++++---------- 2 files changed, 34 insertions(+), 35 deletions(-) rename gtsam/hybrid/tests/{testNonlinearHybridFactorGraph.cpp => testHybridNonlinearFactorGraph.cpp} (94%) diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.h b/gtsam/hybrid/HybridNonlinearFactorGraph.h index c54a1057e2..f1396bcc0c 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.h +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.h @@ -18,7 +18,7 @@ #pragma once -#include +#include #include #include #include @@ -148,11 +148,11 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { * HybridNonlinearFactorGraph. * * @param continuousValues: Dictionary of continuous values. - * @return GaussianHybridFactorGraph::shared_ptr + * @return HybridGaussianFactorGraph::shared_ptr */ - GaussianHybridFactorGraph linearize(const Values& continuousValues) const { + HybridGaussianFactorGraph linearize(const Values& continuousValues) const { // create an empty linear FG - GaussianHybridFactorGraph linearFG; + HybridGaussianFactorGraph linearFG; linearFG.reserve(size()); diff --git a/gtsam/hybrid/tests/testNonlinearHybridFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp similarity index 94% rename from gtsam/hybrid/tests/testNonlinearHybridFactorGraph.cpp rename to gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 6e40199921..6c5e949212 100644 --- a/gtsam/hybrid/tests/testNonlinearHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -7,8 +7,8 @@ * -------------------------------------------------------------------------- */ /** - * @file testDCFactorGraph.cpp - * @brief Unit tests for DCFactorGraph + * @file testHybridNonlinearFactorGraph.cpp + * @brief Unit tests for HybridNonlinearFactorGraph * @author Varun Agrawal * @author Fan Jiang * @author Frank Dellaert @@ -22,9 +22,8 @@ #include #include #include -#include +#include #include -#include #include #include #include @@ -49,7 +48,7 @@ using symbol_shorthand::X; * existing gaussian factor graph in the hybrid factor graph. */ TEST(HybridFactorGraph, GaussianFactorGraph) { - NonlinearHybridFactorGraph fg; + HybridNonlinearFactorGraph fg; // Add a simple prior factor to the nonlinear factor graph fg.emplace_nonlinear>(X(0), 0, Isotropic::Sigma(1, 0.1)); @@ -58,7 +57,7 @@ TEST(HybridFactorGraph, GaussianFactorGraph) { Values linearizationPoint; linearizationPoint.insert(X(0), 0); - GaussianHybridFactorGraph ghfg = fg.linearize(linearizationPoint); + HybridGaussianFactorGraph ghfg = fg.linearize(linearizationPoint); // Add a factor to the GaussianFactorGraph ghfg.add(JacobianFactor(X(0), I_1x1, Vector1(5))); @@ -69,9 +68,9 @@ TEST(HybridFactorGraph, GaussianFactorGraph) { /* ************************************************************************** */ /// Test that the resize method works correctly for a -/// NonlinearHybridFactorGraph. -TEST(NonlinearHybridFactorGraph, Resize) { - NonlinearHybridFactorGraph fg; +/// HybridNonlinearFactorGraph. +TEST(HybridNonlinearFactorGraph, Resize) { + HybridNonlinearFactorGraph fg; auto nonlinearFactor = boost::make_shared>(); fg.push_back(nonlinearFactor); @@ -90,9 +89,9 @@ TEST(NonlinearHybridFactorGraph, Resize) { /* ************************************************************************** */ /// Test that the resize method works correctly for a -/// GaussianHybridFactorGraph. -TEST(GaussianHybridFactorGraph, Resize) { - NonlinearHybridFactorGraph nhfg; +/// HybridGaussianFactorGraph. +TEST(HybridGaussianFactorGraph, Resize) { + HybridNonlinearFactorGraph nhfg; auto nonlinearFactor = boost::make_shared>( X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1)); nhfg.push_back(nonlinearFactor); @@ -115,8 +114,8 @@ TEST(GaussianHybridFactorGraph, Resize) { linearizationPoint.insert(X(0), 0); linearizationPoint.insert(X(1), 1); - // Generate `GaussianHybridFactorGraph` by linearizing - GaussianHybridFactorGraph gfg = nhfg.linearize(linearizationPoint); + // Generate `HybridGaussianFactorGraph` by linearizing + HybridGaussianFactorGraph gfg = nhfg.linearize(linearizationPoint); EXPECT_LONGS_EQUAL(gfg.size(), 3); @@ -129,41 +128,41 @@ TEST(GaussianHybridFactorGraph, Resize) { * Test push_back on HFG makes the correct distinction. */ TEST(HybridFactorGraph, PushBack) { - NonlinearHybridFactorGraph fg; + HybridNonlinearFactorGraph fg; auto nonlinearFactor = boost::make_shared>(); fg.push_back(nonlinearFactor); EXPECT_LONGS_EQUAL(fg.size(), 1); - fg = NonlinearHybridFactorGraph(); + fg = HybridNonlinearFactorGraph(); auto discreteFactor = boost::make_shared(); fg.push_back(discreteFactor); EXPECT_LONGS_EQUAL(fg.size(), 1); - fg = NonlinearHybridFactorGraph(); + fg = HybridNonlinearFactorGraph(); auto dcFactor = boost::make_shared(); fg.push_back(dcFactor); EXPECT_LONGS_EQUAL(fg.size(), 1); - // Now do the same with GaussianHybridFactorGraph - GaussianHybridFactorGraph ghfg; + // Now do the same with HybridGaussianFactorGraph + HybridGaussianFactorGraph ghfg; auto gaussianFactor = boost::make_shared(); ghfg.push_back(gaussianFactor); EXPECT_LONGS_EQUAL(ghfg.size(), 1); - ghfg = GaussianHybridFactorGraph(); + ghfg = HybridGaussianFactorGraph(); ghfg.push_back(discreteFactor); EXPECT_LONGS_EQUAL(ghfg.size(), 1); - ghfg = GaussianHybridFactorGraph(); + ghfg = HybridGaussianFactorGraph(); ghfg.push_back(dcFactor); EXPECT_LONGS_EQUAL(ghfg.size(), 1); @@ -193,7 +192,7 @@ TEST(HybridFactorGraph, PushBack) { // Switching self(3); // // Linearize here: -// GaussianHybridFactorGraph actualLinearized = +// HybridGaussianFactorGraph actualLinearized = // self.nonlinearFactorGraph.linearize(self.linearizationPoint); // EXPECT_LONGS_EQUAL(8, actualLinearized.size()); @@ -224,7 +223,7 @@ TEST(HybridFactorGraph, PushBack) { // Switching self(3); // // Gather factors on x1, has a simple Gaussian and a mixture factor. -// GaussianHybridFactorGraph factors; +// HybridGaussianFactorGraph factors; // factors.push_gaussian(self.linearizedFactorGraph.gaussianGraph()[0]); // factors.push_dc(self.linearizedFactorGraph.dcGraph()[0]); @@ -255,7 +254,7 @@ TEST(HybridFactorGraph, PushBack) { // Switching self(3); // // Gather factors on x2, will be two mixture factors (with x1 and x3, -// resp.). GaussianHybridFactorGraph factors; +// resp.). HybridGaussianFactorGraph factors; // factors.push_dc(self.linearizedFactorGraph.dcGraph()[0]); // involves m1 // factors.push_dc(self.linearizedFactorGraph.dcGraph()[1]); // involves m2 @@ -355,7 +354,7 @@ TEST(HybridFactorGraph, PushBack) { // Switching self(K, between_sigma, prior_sigma); // // Clear out discrete factors since sum() cannot hanldle those -// GaussianHybridFactorGraph linearizedFactorGraph( +// HybridGaussianFactorGraph linearizedFactorGraph( // self.linearizedFactorGraph.gaussianGraph(), DiscreteFactorGraph(), // self.linearizedFactorGraph.dcGraph()); @@ -400,7 +399,7 @@ TEST(HybridFactorGraph, PushBack) { // // Eliminate partially. // HybridBayesNet::shared_ptr hybridBayesNet; -// GaussianHybridFactorGraph::shared_ptr remainingFactorGraph; +// HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; // std::tie(hybridBayesNet, remainingFactorGraph) = // linearizedFactorGraph.eliminatePartialSequential(ordering); @@ -435,7 +434,7 @@ TEST(HybridFactorGraph, PushBack) { // // We first do a partial elimination // HybridBayesNet::shared_ptr hybridBayesNet_partial; -// GaussianHybridFactorGraph::shared_ptr remainingFactorGraph_partial; +// HybridGaussianFactorGraph::shared_ptr remainingFactorGraph_partial; // DiscreteBayesNet discreteBayesNet; // { @@ -500,7 +499,7 @@ TEST(HybridFactorGraph, PushBack) { // // Eliminate partially. // HybridBayesNet::shared_ptr hybridBayesNet; -// GaussianHybridFactorGraph::shared_ptr remainingFactorGraph; +// HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; // std::tie(hybridBayesNet, remainingFactorGraph) = // linearizedFactorGraph.eliminatePartialSequential(ordering); @@ -678,7 +677,7 @@ TEST(HybridFactorGraph, PushBack) { // // issue arises if we eliminate a landmark variable first since it is not // // connected to a DCFactor. // TEST(HybridFactorGraph, DefaultDecisionTree) { -// NonlinearHybridFactorGraph fg; +// HybridNonlinearFactorGraph fg; // // Add a prior on pose x1 at the origin. A prior factor consists of a mean // and @@ -732,9 +731,9 @@ TEST(HybridFactorGraph, PushBack) { // ordering += X(0); // ordering += X(1); -// GaussianHybridFactorGraph linearized = fg.linearize(initialEstimate); +// HybridGaussianFactorGraph linearized = fg.linearize(initialEstimate); // gtsam::HybridBayesNet::shared_ptr hybridBayesNet; -// gtsam::GaussianHybridFactorGraph::shared_ptr remainingFactorGraph; +// gtsam::HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; // // This should NOT fail // std::tie(hybridBayesNet, remainingFactorGraph) = From 987448fa7784f90ba291d5bfb5764fb0b076d7ab Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 26 Jul 2022 13:09:42 -0400 Subject: [PATCH 39/74] remove derived push_back in HybridNonlinearFactorGraph and HybridFactorGraph --- gtsam/hybrid/HybridFactorGraph.h | 8 -------- gtsam/hybrid/HybridNonlinearFactorGraph.h | 8 -------- 2 files changed, 16 deletions(-) diff --git a/gtsam/hybrid/HybridFactorGraph.h b/gtsam/hybrid/HybridFactorGraph.h index 892136b86c..2d96072b49 100644 --- a/gtsam/hybrid/HybridFactorGraph.h +++ b/gtsam/hybrid/HybridFactorGraph.h @@ -135,14 +135,6 @@ class HybridFactorGraph : public FactorGraph { push_hybrid(p); } } - - /** Constructor from iterator over factors (shared_ptr or plain objects) */ - template - void push_back(ITERATOR firstFactor, ITERATOR lastFactor) { - for (auto&& it = firstFactor; it != lastFactor; it++) { - push_back(*it); - } - } }; } // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.h b/gtsam/hybrid/HybridNonlinearFactorGraph.h index f1396bcc0c..6c5dd515fa 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.h +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.h @@ -109,14 +109,6 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { } } - /** Constructor from iterator over factors (shared_ptr or plain objects) */ - template - void push_back(ITERATOR firstFactor, ITERATOR lastFactor) { - for (auto&& it = firstFactor; it != lastFactor; it++) { - push_back(*it); - } - } - // /// Add a nonlinear factor to the factor graph. // void add(NonlinearFactor&& factor) { // Base::add(boost::make_shared(std::move(factor))); From 8471c97b9f5f2bb5eca3d95e2a17afbe3d230f52 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 29 Jul 2022 20:10:29 -0400 Subject: [PATCH 40/74] add nonlinear switching system tests --- gtsam/hybrid/MixtureFactor.h | 8 +- gtsam/hybrid/tests/Switching.h | 108 +++++++++++++++++- .../tests/testHybridNonlinearFactorGraph.cpp | 87 ++++++-------- 3 files changed, 151 insertions(+), 52 deletions(-) diff --git a/gtsam/hybrid/MixtureFactor.h b/gtsam/hybrid/MixtureFactor.h index d2d1a8d74f..ba449976a8 100644 --- a/gtsam/hybrid/MixtureFactor.h +++ b/gtsam/hybrid/MixtureFactor.h @@ -1,6 +1,12 @@ /* ---------------------------------------------------------------------------- - * Copyright 2020 The Ambitious Folks of the MRG + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + * See LICENSE for the license information + * -------------------------------------------------------------------------- */ /** diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 4b1c276377..2707ae382c 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -18,12 +18,19 @@ #include #include +#include #include #include +#include +#include #include #include +#include +#include #include +#include + #pragma once using gtsam::symbol_shorthand::C; @@ -31,8 +38,6 @@ using gtsam::symbol_shorthand::X; namespace gtsam { -using MotionModel = BetweenFactor; - inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( size_t n, std::function keyFunc = X, std::function dKeyFunc = C) { @@ -87,4 +92,103 @@ inline std::pair> makeBinaryOrdering( return {new_order, levels}; } +/* *************************************************************************** + */ +using MotionModel = BetweenFactor; +// using MotionMixture = MixtureFactor; + +// Test fixture with switching network. +struct Switching { + size_t K; + DiscreteKeys modes; + HybridNonlinearFactorGraph nonlinearFactorGraph; + HybridGaussianFactorGraph linearizedFactorGraph; + Values linearizationPoint; + + /// Create with given number of time steps. + Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1) + : K(K) { + using symbol_shorthand::M; + using symbol_shorthand::X; + + // Create DiscreteKeys for binary K modes, modes[0] will not be used. + for (size_t k = 0; k <= K; k++) { + modes.emplace_back(M(k), 2); + } + + // Create hybrid factor graph. + // Add a prior on X(1). + auto prior = boost::make_shared>( + X(1), 0, noiseModel::Isotropic::Sigma(1, prior_sigma)); + nonlinearFactorGraph.push_nonlinear(prior); + + // Add "motion models". + for (size_t k = 1; k < K; k++) { + KeyVector keys = {X(k), X(k + 1)}; + auto motion_models = motionModels(k); + std::vector components; + for (auto &&f : motion_models) { + components.push_back(boost::dynamic_pointer_cast(f)); + } + nonlinearFactorGraph.emplace_hybrid( + keys, DiscreteKeys{modes[k]}, components); + } + + // Add measurement factors + auto measurement_noise = noiseModel::Isotropic::Sigma(1, 0.1); + for (size_t k = 1; k <= K; k++) { + nonlinearFactorGraph.emplace_nonlinear>( + X(k), 1.0 * (k - 1), measurement_noise); + } + + // Add "mode chain" + addModeChain(&nonlinearFactorGraph); + + // Create the linearization point. + for (size_t k = 1; k <= K; k++) { + linearizationPoint.insert(X(k), static_cast(k)); + } + + linearizedFactorGraph = nonlinearFactorGraph.linearize(linearizationPoint); + } + + // Create motion models for a given time step + static std::vector motionModels(size_t k, + double sigma = 1.0) { + using symbol_shorthand::M; + using symbol_shorthand::X; + + auto noise_model = noiseModel::Isotropic::Sigma(1, sigma); + auto still = + boost::make_shared(X(k), X(k + 1), 0.0, noise_model), + moving = + boost::make_shared(X(k), X(k + 1), 1.0, noise_model); + return {still, moving}; + } + + // Add "mode chain" to HybridNonlinearFactorGraph + void addModeChain(HybridNonlinearFactorGraph *fg) { + auto prior = boost::make_shared(modes[1], "1/1"); + fg->push_discrete(prior); + for (size_t k = 1; k < K - 1; k++) { + auto parents = {modes[k]}; + auto conditional = boost::make_shared( + modes[k + 1], parents, "1/2 3/2"); + fg->push_discrete(conditional); + } + } + + // Add "mode chain" to HybridGaussianFactorGraph + void addModeChain(HybridGaussianFactorGraph *fg) { + auto prior = boost::make_shared(modes[1], "1/1"); + fg->push_discrete(prior); + for (size_t k = 1; k < K - 1; k++) { + auto parents = {modes[k]}; + auto conditional = boost::make_shared( + modes[k + 1], parents, "1/2 3/2"); + fg->push_discrete(conditional); + } + } +}; + } // namespace gtsam diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 6c5e949212..fb0778ed8d 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -65,10 +65,9 @@ TEST(HybridFactorGraph, GaussianFactorGraph) { EXPECT_LONGS_EQUAL(2, ghfg.size()); } -/* ************************************************************************** +/*************************************************************************** + * Test that the resize method works correctly for a HybridNonlinearFactorGraph. */ -/// Test that the resize method works correctly for a -/// HybridNonlinearFactorGraph. TEST(HybridNonlinearFactorGraph, Resize) { HybridNonlinearFactorGraph fg; auto nonlinearFactor = boost::make_shared>(); @@ -86,10 +85,10 @@ TEST(HybridNonlinearFactorGraph, Resize) { EXPECT_LONGS_EQUAL(fg.size(), 0); } -/* ************************************************************************** +/*************************************************************************** + * Test that the resize method works correctly for a + * HybridGaussianFactorGraph. */ -/// Test that the resize method works correctly for a -/// HybridGaussianFactorGraph. TEST(HybridGaussianFactorGraph, Resize) { HybridNonlinearFactorGraph nhfg; auto nonlinearFactor = boost::make_shared>( @@ -123,10 +122,9 @@ TEST(HybridGaussianFactorGraph, Resize) { EXPECT_LONGS_EQUAL(gfg.size(), 0); } -/* -**************************************************************************** -* Test push_back on HFG makes the correct distinction. -*/ +/***************************************************************************** + * Test push_back on HFG makes the correct distinction. + */ TEST(HybridFactorGraph, PushBack) { HybridNonlinearFactorGraph fg; @@ -168,53 +166,44 @@ TEST(HybridFactorGraph, PushBack) { EXPECT_LONGS_EQUAL(ghfg.size(), 1); } -// /* -// ****************************************************************************/ -// // Test construction of switching-like hybrid factor graph. -// TEST(HybridFactorGraph, Switching) { -// Switching self(3); +/**************************************************************************** + * Test construction of switching-like hybrid factor graph. + */ +TEST(HybridFactorGraph, Switching) { + Switching self(3); -// EXPECT_LONGS_EQUAL(8, self.nonlinearFactorGraph.size()); -// EXPECT_LONGS_EQUAL(4, self.nonlinearFactorGraph.nonlinearGraph().size()); -// EXPECT_LONGS_EQUAL(2, self.nonlinearFactorGraph.discreteGraph().size()); -// EXPECT_LONGS_EQUAL(2, self.nonlinearFactorGraph.dcGraph().size()); + EXPECT_LONGS_EQUAL(8, self.nonlinearFactorGraph.size()); -// EXPECT_LONGS_EQUAL(8, self.linearizedFactorGraph.size()); -// EXPECT_LONGS_EQUAL(2, self.linearizedFactorGraph.discreteGraph().size()); -// EXPECT_LONGS_EQUAL(2, self.linearizedFactorGraph.dcGraph().size()); -// EXPECT_LONGS_EQUAL(4, self.linearizedFactorGraph.gaussianGraph().size()); -// } + EXPECT_LONGS_EQUAL(8, self.linearizedFactorGraph.size()); +} -// /* -// ****************************************************************************/ -// // Test linearization on a switching-like hybrid factor graph. -// TEST(HybridFactorGraph, Linearization) { -// Switching self(3); +/**************************************************************************** + * Test linearization on a switching-like hybrid factor graph. + */ +TEST(HybridFactorGraph, Linearization) { + Switching self(3); -// // Linearize here: -// HybridGaussianFactorGraph actualLinearized = -// self.nonlinearFactorGraph.linearize(self.linearizationPoint); + // Linearize here: + HybridGaussianFactorGraph actualLinearized = + self.nonlinearFactorGraph.linearize(self.linearizationPoint); -// EXPECT_LONGS_EQUAL(8, actualLinearized.size()); -// EXPECT_LONGS_EQUAL(2, actualLinearized.discreteGraph().size()); -// EXPECT_LONGS_EQUAL(2, actualLinearized.dcGraph().size()); -// EXPECT_LONGS_EQUAL(4, actualLinearized.gaussianGraph().size()); -// } + EXPECT_LONGS_EQUAL(8, actualLinearized.size()); +} -// /* -// ****************************************************************************/ -// // Test elimination tree construction -// TEST(HybridFactorGraph, EliminationTree) { -// Switching self(3); +/**************************************************************************** + * Test elimination tree construction + */ +TEST(HybridFactorGraph, EliminationTree) { + Switching self(3); -// // Create ordering. -// Ordering ordering; -// for (size_t k = 1; k <= self.K; k++) ordering += X(k); + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= self.K; k++) ordering += X(k); -// // Create elimination tree. -// HybridEliminationTree etree(self.linearizedFactorGraph, ordering); -// EXPECT_LONGS_EQUAL(1, etree.roots().size()) -// } + // Create elimination tree. + HybridEliminationTree etree(self.linearizedFactorGraph, ordering); + EXPECT_LONGS_EQUAL(1, etree.roots().size()) +} // /* // ****************************************************************************/ From 89079229bcd02f6cc1d3df829a225c853848f259 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 1 Aug 2022 12:50:10 -0400 Subject: [PATCH 41/74] get more nonlinear tests to work and make some updates --- gtsam/hybrid/HybridBayesTree.h | 5 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 10 +- .../tests/testHybridNonlinearFactorGraph.cpp | 227 +++++++++--------- 3 files changed, 124 insertions(+), 118 deletions(-) diff --git a/gtsam/hybrid/HybridBayesTree.h b/gtsam/hybrid/HybridBayesTree.h index 0b89ca8c4b..02a4a11e5e 100644 --- a/gtsam/hybrid/HybridBayesTree.h +++ b/gtsam/hybrid/HybridBayesTree.h @@ -76,7 +76,10 @@ class GTSAM_EXPORT HybridBayesTree : public BayesTree { /** * @brief Class for Hybrid Bayes tree orphan subtrees. * - * This does special stuff for the hybrid case + * This object stores parent keys in our base type factor so that + * eliminating those parent keys will pull this subtree into the + * elimination. + * This does special stuff for the hybrid case. * * @tparam CLIQUE */ diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 88730cae95..3536f6a361 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -98,6 +98,12 @@ GaussianMixtureFactor::Sum sumFrontals( } else if (f->isContinuous()) { deferredFactors.push_back( boost::dynamic_pointer_cast(f)->inner()); + + } else if (f->isDiscrete()) { + // Don't do anything for discrete-only factors + // since we want to eliminate continuous values only. + continue; + } else { // We need to handle the case where the object is actually an // BayesTreeOrphanWrapper! @@ -106,8 +112,8 @@ GaussianMixtureFactor::Sum sumFrontals( if (!orphan) { auto &fr = *f; throw std::invalid_argument( - std::string("factor is discrete in continuous elimination") + - typeid(fr).name()); + std::string("factor is discrete in continuous elimination ") + + demangle(typeid(fr).name())); } } } diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index fb0778ed8d..1f8abc4f68 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -25,6 +25,8 @@ #include #include #include +#include +#include #include #include #include @@ -205,131 +207,126 @@ TEST(HybridFactorGraph, EliminationTree) { EXPECT_LONGS_EQUAL(1, etree.roots().size()) } -// /* -// ****************************************************************************/ -// // Test elimination function by eliminating x1 in *-x1-*-x2 graph. -// TEST(DCGaussianElimination, Eliminate_x1) { -// Switching self(3); - -// // Gather factors on x1, has a simple Gaussian and a mixture factor. -// HybridGaussianFactorGraph factors; -// factors.push_gaussian(self.linearizedFactorGraph.gaussianGraph()[0]); -// factors.push_dc(self.linearizedFactorGraph.dcGraph()[0]); - -// // Check that sum works: -// auto sum = factors.sum(); -// Assignment mode; -// mode[M(1)] = 1; -// auto actual = sum(mode); // Selects one of 2 modes. -// EXPECT_LONGS_EQUAL(2, actual.size()); // Prior and motion model. - -// // Eliminate x1 -// Ordering ordering; -// ordering += X(1); +/**************************************************************************** + *Test elimination function by eliminating x1 in *-x1-*-x2 graph. + */ +TEST(GaussianElimination, Eliminate_x1) { + Switching self(3); -// auto result = EliminateHybrid(factors, ordering); -// CHECK(result.first); -// EXPECT_LONGS_EQUAL(1, result.first->nrFrontals()); -// CHECK(result.second); -// // Has two keys, x2 and m1 -// EXPECT_LONGS_EQUAL(2, result.second->size()); -// } + // Gather factors on x1, has a simple Gaussian and a mixture factor. + HybridGaussianFactorGraph factors; + // Add gaussian prior + factors.push_back(self.linearizedFactorGraph[0]); + // Add first hybrid factor + factors.push_back(self.linearizedFactorGraph[1]); + + // TODO(Varun) remove this block since sum is no longer exposed. + // // Check that sum works: + // auto sum = factors.sum(); + // Assignment mode; + // mode[M(1)] = 1; + // auto actual = sum(mode); // Selects one of 2 modes. + // EXPECT_LONGS_EQUAL(2, actual.size()); // Prior and motion model. + + // Eliminate x1 + Ordering ordering; + ordering += X(1); + + auto result = EliminateHybrid(factors, ordering); + CHECK(result.first); + EXPECT_LONGS_EQUAL(1, result.first->nrFrontals()); + CHECK(result.second); + // Has two keys, x2 and m1 + EXPECT_LONGS_EQUAL(2, result.second->size()); +} -// /* -// ****************************************************************************/ -// // Test elimination function by eliminating x2 in x1-*-x2-*-x3 chain. -// // m1/ \m2 -// TEST(DCGaussianElimination, Eliminate_x2) { -// Switching self(3); +/**************************************************************************** + * Test elimination function by eliminating x2 in x1-*-x2-*-x3 chain. + * m1/ \m2 + */ +TEST(HybridsGaussianElimination, Eliminate_x2) { + Switching self(3); -// // Gather factors on x2, will be two mixture factors (with x1 and x3, -// resp.). HybridGaussianFactorGraph factors; -// factors.push_dc(self.linearizedFactorGraph.dcGraph()[0]); // involves m1 -// factors.push_dc(self.linearizedFactorGraph.dcGraph()[1]); // involves m2 + // Gather factors on x2, will be two mixture factors (with x1 and x3, resp.). + HybridGaussianFactorGraph factors; + factors.push_back(self.linearizedFactorGraph[1]); // involves m1 + factors.push_back(self.linearizedFactorGraph[2]); // involves m2 + + // TODO(Varun) remove this block since sum is no longer exposed. + // // Check that sum works: + // auto sum = factors.sum(); + // Assignment mode; + // mode[M(1)] = 0; + // mode[M(2)] = 1; + // auto actual = sum(mode); // Selects one of 4 mode + // combinations. EXPECT_LONGS_EQUAL(2, actual.size()); // 2 motion models. + + // Eliminate x2 + Ordering ordering; + ordering += X(2); + + std::pair result = + EliminateHybrid(factors, ordering); + CHECK(result.first); + EXPECT_LONGS_EQUAL(1, result.first->nrFrontals()); + CHECK(result.second); + // Note: separator keys should include m1, m2. + EXPECT_LONGS_EQUAL(4, result.second->size()); +} -// // Check that sum works: -// auto sum = factors.sum(); -// Assignment mode; -// mode[M(1)] = 0; -// mode[M(2)] = 1; -// auto actual = sum(mode); // Selects one of 4 mode -// combinations. EXPECT_LONGS_EQUAL(2, actual.size()); // 2 motion models. +/* +****************************************************************************/ +// Helper method to generate gaussian factor graphs with a specific mode. +GaussianFactorGraph::shared_ptr batchGFG(double between, + Values linearizationPoint) { + NonlinearFactorGraph graph; + graph.addPrior(X(1), 0, Isotropic::Sigma(1, 0.1)); -// // Eliminate x2 -// Ordering ordering; -// ordering += X(2); - -// std::pair> -// result = -// EliminateHybrid(factors, ordering); -// CHECK(result.first); -// EXPECT_LONGS_EQUAL(1, result.first->nrFrontals()); -// CHECK(result.second); -// // Note: separator keys should include m1, m2. -// EXPECT_LONGS_EQUAL(4, result.second->size()); -// } + auto between_x1_x2 = boost::make_shared( + X(1), X(2), between, Isotropic::Sigma(1, 1.0)); -// /* -// ****************************************************************************/ -// // Helper method to generate gaussian factor graphs with a specific mode. -// GaussianFactorGraph::shared_ptr batchGFG(double between, -// Values linearizationPoint) { -// NonlinearFactorGraph graph; -// graph.addPrior(X(1), 0, Isotropic::Sigma(1, 0.1)); + graph.push_back(between_x1_x2); -// auto between_x1_x2 = boost::make_shared( -// X(1), X(2), between, Isotropic::Sigma(1, 1.0)); + return graph.linearize(linearizationPoint); +} -// graph.push_back(between_x1_x2); +/*****************************************************************************/ +// Test elimination function by eliminating x1 and x2 in graph. +TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { + Switching self(2, 1.0, 0.1); -// return graph.linearize(linearizationPoint); -// } + auto factors = self.linearizedFactorGraph; -// /* -// ****************************************************************************/ -// // Test elimination function by eliminating x1 and x2 in graph. -// TEST(DCGaussianElimination, EliminateHybrid_2_Variable) { -// Switching self(2, 1.0, 0.1); - -// auto factors = self.linearizedFactorGraph; - -// // Check that sum works: -// auto sum = factors.sum(); -// Assignment mode; -// mode[M(1)] = 1; -// auto actual = sum(mode); // Selects one of 2 modes. -// EXPECT_LONGS_EQUAL(4, -// actual.size()); // Prior, 1 motion models, 2 -// measurements. - -// // Eliminate x1 -// Ordering ordering; -// ordering += X(1); -// ordering += X(2); - -// AbstractConditional::shared_ptr abstractConditionalMixture; -// boost::shared_ptr factorOnModes; -// std::tie(abstractConditionalMixture, factorOnModes) = -// EliminateHybrid(factors, ordering); - -// auto gaussianConditionalMixture = -// dynamic_pointer_cast(abstractConditionalMixture); - -// CHECK(gaussianConditionalMixture); -// EXPECT_LONGS_EQUAL( -// 2, -// gaussianConditionalMixture->nrFrontals()); // Frontals = [x1, x2] -// EXPECT_LONGS_EQUAL( -// 1, -// gaussianConditionalMixture->nrParents()); // 1 parent, which is the -// mode - -// auto discreteFactor = -// dynamic_pointer_cast(factorOnModes); -// CHECK(discreteFactor); -// EXPECT_LONGS_EQUAL(1, discreteFactor->discreteKeys().size()); -// EXPECT(discreteFactor->root_->isLeaf() == false); -// } + // Eliminate x1 + Ordering ordering; + ordering += X(1); + ordering += X(2); + + HybridConditional::shared_ptr hybridConditionalMixture; + HybridFactor::shared_ptr factorOnModes; + + std::tie(hybridConditionalMixture, factorOnModes) = + EliminateHybrid(factors, ordering); + + auto gaussianConditionalMixture = + dynamic_pointer_cast(hybridConditionalMixture->inner()); + + CHECK(gaussianConditionalMixture); + // Frontals = [x1, x2] + EXPECT_LONGS_EQUAL(2, gaussianConditionalMixture->nrFrontals()); + // 1 parent, which is the mode + EXPECT_LONGS_EQUAL(1, gaussianConditionalMixture->nrParents()); + + // This is now a HybridDiscreteFactor + auto hybridDiscreteFactor = + dynamic_pointer_cast(factorOnModes); + // Access the type-erased inner object and convert to DecisionTreeFactor + auto discreteFactor = + dynamic_pointer_cast(hybridDiscreteFactor->inner()); + CHECK(discreteFactor); + EXPECT_LONGS_EQUAL(1, discreteFactor->discreteKeys().size()); + EXPECT(discreteFactor->root_->isLeaf() == false); +} // /* // ****************************************************************************/ From 16124f36172d1c0e91d5e81d97cf830e786f738d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 2 Aug 2022 19:01:49 -0400 Subject: [PATCH 42/74] get all but 2 tests passing --- .../tests/testHybridNonlinearFactorGraph.cpp | 567 +++++++++--------- 1 file changed, 290 insertions(+), 277 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 1f8abc4f68..947fafacb9 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -19,6 +19,7 @@ #include #include #include +#include #include #include #include @@ -274,9 +275,9 @@ TEST(HybridsGaussianElimination, Eliminate_x2) { EXPECT_LONGS_EQUAL(4, result.second->size()); } -/* -****************************************************************************/ -// Helper method to generate gaussian factor graphs with a specific mode. +/**************************************************************************** + * Helper method to generate gaussian factor graphs with a specific mode. + */ GaussianFactorGraph::shared_ptr batchGFG(double between, Values linearizationPoint) { NonlinearFactorGraph graph; @@ -290,8 +291,9 @@ GaussianFactorGraph::shared_ptr batchGFG(double between, return graph.linearize(linearizationPoint); } -/*****************************************************************************/ -// Test elimination function by eliminating x1 and x2 in graph. +/**************************************************************************** + * Test elimination function by eliminating x1 and x2 in graph. + */ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { Switching self(2, 1.0, 0.1); @@ -371,287 +373,298 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { // EXPECT_DOUBLES_EQUAL(0.6125, actual, 1e-4); // } -// /* -// ****************************************************************************/ -// // Test partial elimination -// TEST_UNSAFE(HybridFactorGraph, Partial_Elimination) { -// Switching self(3); +/**************************************************************************** + * Test partial elimination + */ +TEST(HybridFactorGraph, Partial_Elimination) { + Switching self(3); -// auto linearizedFactorGraph = self.linearizedFactorGraph; + auto linearizedFactorGraph = self.linearizedFactorGraph; -// // Create ordering. -// Ordering ordering; -// for (size_t k = 1; k <= self.K; k++) ordering += X(k); + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= self.K; k++) ordering += X(k); -// // Eliminate partially. -// HybridBayesNet::shared_ptr hybridBayesNet; -// HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; -// std::tie(hybridBayesNet, remainingFactorGraph) = -// linearizedFactorGraph.eliminatePartialSequential(ordering); - -// CHECK(hybridBayesNet); -// // GTSAM_PRINT(*hybridBayesNet); // HybridBayesNet -// EXPECT_LONGS_EQUAL(3, hybridBayesNet->size()); -// EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)}); -// EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)})); -// EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)}); -// EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(2), M(1)})); -// EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)}); -// EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(2), M(1)})); - -// CHECK(remainingFactorGraph); -// // GTSAM_PRINT(*remainingFactorGraph); // HybridFactorGraph -// EXPECT_LONGS_EQUAL(3, remainingFactorGraph->size()); -// EXPECT(remainingFactorGraph->discreteGraph().at(0)->keys() == -// KeyVector({M(1)})); -// EXPECT(remainingFactorGraph->discreteGraph().at(1)->keys() == -// KeyVector({M(2), M(1)})); -// EXPECT(remainingFactorGraph->discreteGraph().at(2)->keys() == -// KeyVector({M(2), M(1)})); -// } + // Eliminate partially. + HybridBayesNet::shared_ptr hybridBayesNet; + HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; + std::tie(hybridBayesNet, remainingFactorGraph) = + linearizedFactorGraph.eliminatePartialSequential(ordering); + + CHECK(hybridBayesNet); + // GTSAM_PRINT(*hybridBayesNet); // HybridBayesNet + EXPECT_LONGS_EQUAL(3, hybridBayesNet->size()); + EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)}); + EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)})); + EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)}); + EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(1), M(2)})); + EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)}); + EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)})); + + CHECK(remainingFactorGraph); + // GTSAM_PRINT(*remainingFactorGraph); // HybridFactorGraph + EXPECT_LONGS_EQUAL(3, remainingFactorGraph->size()); + EXPECT(remainingFactorGraph->at(0)->keys() == KeyVector({M(1)})); + EXPECT(remainingFactorGraph->at(1)->keys() == KeyVector({M(2), M(1)})); + EXPECT(remainingFactorGraph->at(2)->keys() == KeyVector({M(1), M(2)})); +} -// /* -// ****************************************************************************/ -// // Test full elimination -// TEST_UNSAFE(HybridFactorGraph, Full_Elimination) { -// Switching self(3); - -// auto linearizedFactorGraph = self.linearizedFactorGraph; - -// // We first do a partial elimination -// HybridBayesNet::shared_ptr hybridBayesNet_partial; -// HybridGaussianFactorGraph::shared_ptr remainingFactorGraph_partial; -// DiscreteBayesNet discreteBayesNet; - -// { -// // Create ordering. -// Ordering ordering; -// for (size_t k = 1; k <= self.K; k++) ordering += X(k); - -// // Eliminate partially. -// std::tie(hybridBayesNet_partial, remainingFactorGraph_partial) = -// linearizedFactorGraph.eliminatePartialSequential(ordering); - -// DiscreteFactorGraph dfg; -// dfg.push_back(remainingFactorGraph_partial->discreteGraph()); -// ordering.clear(); -// for (size_t k = 1; k < self.K; k++) ordering += M(k); -// discreteBayesNet = *dfg.eliminateSequential(ordering, EliminateForMPE); -// } +/**************************************************************************** + * Test full elimination + */ +TEST(HybridFactorGraph, Full_Elimination) { + Switching self(3); -// // Create ordering. -// Ordering ordering; -// for (size_t k = 1; k <= self.K; k++) ordering += X(k); -// for (size_t k = 1; k < self.K; k++) ordering += M(k); - -// // Eliminate partially. -// HybridBayesNet::shared_ptr hybridBayesNet = -// linearizedFactorGraph.eliminateSequential(ordering); - -// CHECK(hybridBayesNet); -// EXPECT_LONGS_EQUAL(5, hybridBayesNet->size()); -// // p(x1 | x2, m1) -// EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)}); -// EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)})); -// // p(x2 | x3, m1, m2) -// EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)}); -// EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(2), M(1)})); -// // p(x3 | m1, m2) -// EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)}); -// EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(2), M(1)})); -// // P(m1 | m2) -// EXPECT(hybridBayesNet->at(3)->frontals() == KeyVector{M(1)}); -// EXPECT(hybridBayesNet->at(3)->parents() == KeyVector({M(2)})); -// EXPECT(dynamic_pointer_cast(hybridBayesNet->at(3)) -// ->equals(*discreteBayesNet.at(0))); -// // P(m2) -// EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(2)}); -// EXPECT_LONGS_EQUAL(0, hybridBayesNet->at(4)->nrParents()); -// EXPECT(dynamic_pointer_cast(hybridBayesNet->at(4)) -// ->equals(*discreteBayesNet.at(1))); -// } + auto linearizedFactorGraph = self.linearizedFactorGraph; + + // We first do a partial elimination + HybridBayesNet::shared_ptr hybridBayesNet_partial; + HybridGaussianFactorGraph::shared_ptr remainingFactorGraph_partial; + DiscreteBayesNet discreteBayesNet; + + { + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= self.K; k++) ordering += X(k); + + // Eliminate partially. + std::tie(hybridBayesNet_partial, remainingFactorGraph_partial) = + linearizedFactorGraph.eliminatePartialSequential(ordering); + + DiscreteFactorGraph discrete_fg; + //TODO(Varun) Make this a function of HybridGaussianFactorGraph? + for(HybridFactor::shared_ptr& factor: (*remainingFactorGraph_partial)) { + auto df = dynamic_pointer_cast(factor); + discrete_fg.push_back(df->inner()); + } + ordering.clear(); + for (size_t k = 1; k < self.K; k++) ordering += M(k); + discreteBayesNet = *discrete_fg.eliminateSequential(ordering, EliminateForMPE); + } -// /* -// ****************************************************************************/ -// // Test printing -// TEST(HybridFactorGraph, Printing) { -// Switching self(3); + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= self.K; k++) ordering += X(k); + for (size_t k = 1; k < self.K; k++) ordering += M(k); + + // Eliminate partially. + HybridBayesNet::shared_ptr hybridBayesNet = + linearizedFactorGraph.eliminateSequential(ordering); + + CHECK(hybridBayesNet); + EXPECT_LONGS_EQUAL(5, hybridBayesNet->size()); + // p(x1 | x2, m1) + EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)}); + EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)})); + // p(x2 | x3, m1, m2) + EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)}); + EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(1), M(2)})); + // p(x3 | m1, m2) + EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)}); + EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)})); + // P(m1 | m2) + EXPECT(hybridBayesNet->at(3)->frontals() == KeyVector{M(1)}); + EXPECT(hybridBayesNet->at(3)->parents() == KeyVector({M(2)})); + GTSAM_PRINT(*(hybridBayesNet->at(3))); + GTSAM_PRINT(*(discreteBayesNet.at(0))); + //TODO(Varun) FIX!! + // EXPECT( + // dynamic_pointer_cast(hybridBayesNet->at(3)->inner()) + // ->equals(*discreteBayesNet.at(0))); + // // P(m2) + // EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(2)}); + // EXPECT_LONGS_EQUAL(0, hybridBayesNet->at(4)->nrParents()); + // EXPECT( + // dynamic_pointer_cast(hybridBayesNet->at(4)->inner()) + // ->equals(*discreteBayesNet.at(1))); +} -// auto linearizedFactorGraph = self.linearizedFactorGraph; +/* +****************************************************************************/ +// Test printing +TEST(HybridFactorGraph, Printing) { + Switching self(3); -// // Create ordering. -// Ordering ordering; -// for (size_t k = 1; k <= self.K; k++) ordering += X(k); + auto linearizedFactorGraph = self.linearizedFactorGraph; -// // Eliminate partially. -// HybridBayesNet::shared_ptr hybridBayesNet; -// HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; -// std::tie(hybridBayesNet, remainingFactorGraph) = -// linearizedFactorGraph.eliminatePartialSequential(ordering); - -// string expected_hybridFactorGraph = R"( -// size: 8 -// DiscreteFactorGraph -// size: 2 -// factor 0: P( m1 ): -// Leaf 0.5 -// factor 1: P( m2 | m1 ): -// Choice(m2) -// 0 Choice(m1) -// 0 0 Leaf 0.33333333 -// 0 1 Leaf 0.6 -// 1 Choice(m1) -// 1 0 Leaf 0.66666667 -// 1 1 Leaf 0.4 -// DCFactorGraph -// size: 2 -// factor 0: [ x1 x2; m1 ]{ -// Choice(m1) -// 0 Leaf Jacobian factor on 2 keys: -// A[x1] = [ -// -1 -// ] -// A[x2] = [ -// 1 -// ] -// b = [ -1 ] -// No noise model -// 1 Leaf Jacobian factor on 2 keys: -// A[x1] = [ -// -1 -// ] -// A[x2] = [ -// 1 -// ] -// b = [ -0 ] -// No noise model -// } -// factor 1: [ x2 x3; m2 ]{ -// Choice(m2) -// 0 Leaf Jacobian factor on 2 keys: -// A[x2] = [ -// -1 -// ] -// A[x3] = [ -// 1 -// ] -// b = [ -1 ] -// No noise model -// 1 Leaf Jacobian factor on 2 keys: -// A[x2] = [ -// -1 -// ] -// A[x3] = [ -// 1 -// ] -// b = [ -0 ] -// No noise model -// } -// GaussianGraph -// size: 4 -// factor 0: -// A[x1] = [ -// 10 -// ] -// b = [ -10 ] -// No noise model -// factor 1: -// A[x1] = [ -// 10 -// ] -// b = [ -10 ] -// No noise model -// factor 2: -// A[x2] = [ -// 10 -// ] -// b = [ -10 ] -// No noise model -// factor 3: -// A[x3] = [ -// 10 -// ] -// b = [ -10 ] -// No noise model -// )"; -// EXPECT(assert_print_equal(expected_hybridFactorGraph, -// linearizedFactorGraph)); - -// // Expected output for hybridBayesNet. -// string expected_hybridBayesNet = R"( -// size: 3 -// factor 0: GaussianMixture [ x1 | x2 m1 ]{ -// Choice(m1) -// 0 Leaf Jacobian factor on 2 keys: -// p(x1 | x2) -// R = [ 14.1774 ] -// S[x2] = [ -0.0705346 ] -// d = [ -14.0364 ] -// No noise model -// 1 Leaf Jacobian factor on 2 keys: -// p(x1 | x2) -// R = [ 14.1774 ] -// S[x2] = [ -0.0705346 ] -// d = [ -14.1069 ] -// No noise model -// } -// factor 1: GaussianMixture [ x2 | x3 m2 m1 ]{ -// Choice(m2) -// 0 Choice(m1) -// 0 0 Leaf Jacobian factor on 2 keys: -// p(x2 | x3) -// R = [ 10.0993 ] -// S[x3] = [ -0.0990172 ] -// d = [ -9.99975 ] -// No noise model -// 0 1 Leaf Jacobian factor on 2 keys: -// p(x2 | x3) -// R = [ 10.0993 ] -// S[x3] = [ -0.0990172 ] -// d = [ -9.90122 ] -// No noise model -// 1 Choice(m1) -// 1 0 Leaf Jacobian factor on 2 keys: -// p(x2 | x3) -// R = [ 10.0993 ] -// S[x3] = [ -0.0990172 ] -// d = [ -10.0988 ] -// No noise model -// 1 1 Leaf Jacobian factor on 2 keys: -// p(x2 | x3) -// R = [ 10.0993 ] -// S[x3] = [ -0.0990172 ] -// d = [ -10.0002 ] -// No noise model -// } -// factor 2: GaussianMixture [ x3 | m2 m1 ]{ -// Choice(m2) -// 0 Choice(m1) -// 0 0 Leaf Jacobian factor on 1 keys: -// p(x3) -// R = [ 10.0494 ] -// d = [ -10.1489 ] -// No noise model -// 0 1 Leaf Jacobian factor on 1 keys: -// p(x3) -// R = [ 10.0494 ] -// d = [ -10.1479 ] -// No noise model -// 1 Choice(m1) -// 1 0 Leaf Jacobian factor on 1 keys: -// p(x3) -// R = [ 10.0494 ] -// d = [ -10.0504 ] -// No noise model -// 1 1 Leaf Jacobian factor on 1 keys: -// p(x3) -// R = [ 10.0494 ] -// d = [ -10.0494 ] -// No noise model -// } -// )"; -// EXPECT(assert_print_equal(expected_hybridBayesNet, *hybridBayesNet)); -// } + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= self.K; k++) ordering += X(k); + + // Eliminate partially. + HybridBayesNet::shared_ptr hybridBayesNet; + HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; + std::tie(hybridBayesNet, remainingFactorGraph) = + linearizedFactorGraph.eliminatePartialSequential(ordering); + + string expected_hybridFactorGraph = R"( +size: 8 +factor 0: Continuous x1; + + A[x1] = [ + 10 +] + b = [ -10 ] + No noise model +factor 1: Hybrid x1 x2 m1; m1 ]{ + Choice(m1) + 0 Leaf : + A[x1] = [ + -1 +] + A[x2] = [ + 1 +] + b = [ -1 ] + No noise model + + 1 Leaf : + A[x1] = [ + -1 +] + A[x2] = [ + 1 +] + b = [ -0 ] + No noise model + +} +factor 2: Hybrid x2 x3 m2; m2 ]{ + Choice(m2) + 0 Leaf : + A[x2] = [ + -1 +] + A[x3] = [ + 1 +] + b = [ -1 ] + No noise model + + 1 Leaf : + A[x2] = [ + -1 +] + A[x3] = [ + 1 +] + b = [ -0 ] + No noise model + +} +factor 3: Continuous x1; + + A[x1] = [ + 10 +] + b = [ -10 ] + No noise model +factor 4: Continuous x2; + + A[x2] = [ + 10 +] + b = [ -10 ] + No noise model +factor 5: Continuous x3; + + A[x3] = [ + 10 +] + b = [ -10 ] + No noise model +factor 6: Discrete m1 + P( m1 ): + Leaf 0.5 + +factor 7: Discrete m2 m1 + P( m2 | m1 ): + Choice(m2) + 0 Choice(m1) + 0 0 Leaf 0.33333333 + 0 1 Leaf 0.6 + 1 Choice(m1) + 1 0 Leaf 0.66666667 + 1 1 Leaf 0.4 + +)"; + EXPECT(assert_print_equal(expected_hybridFactorGraph, linearizedFactorGraph)); + + // Expected output for hybridBayesNet. + string expected_hybridBayesNet = R"( +size: 3 +factor 0: Hybrid P( x1 | x2 m1) + Discrete Keys = (m1, 2), + Choice(m1) + 0 Leaf p(x1 | x2) + R = [ 14.1774 ] + S[x2] = [ -0.0705346 ] + d = [ -14.0364 ] + No noise model + + 1 Leaf p(x1 | x2) + R = [ 14.1774 ] + S[x2] = [ -0.0705346 ] + d = [ -14.1069 ] + No noise model + +factor 1: Hybrid P( x2 | x3 m1 m2) + Discrete Keys = (m1, 2), (m2, 2), + Choice(m2) + 0 Choice(m1) + 0 0 Leaf p(x2 | x3) + R = [ 10.0993 ] + S[x3] = [ -0.0990172 ] + d = [ -9.99975 ] + No noise model + + 0 1 Leaf p(x2 | x3) + R = [ 10.0993 ] + S[x3] = [ -0.0990172 ] + d = [ -9.90122 ] + No noise model + + 1 Choice(m1) + 1 0 Leaf p(x2 | x3) + R = [ 10.0993 ] + S[x3] = [ -0.0990172 ] + d = [ -10.0988 ] + No noise model + + 1 1 Leaf p(x2 | x3) + R = [ 10.0993 ] + S[x3] = [ -0.0990172 ] + d = [ -10.0002 ] + No noise model + +factor 2: Hybrid P( x3 | m1 m2) + Discrete Keys = (m1, 2), (m2, 2), + Choice(m2) + 0 Choice(m1) + 0 0 Leaf p(x3) + R = [ 10.0494 ] + d = [ -10.1489 ] + No noise model + + 0 1 Leaf p(x3) + R = [ 10.0494 ] + d = [ -10.1479 ] + No noise model + + 1 Choice(m1) + 1 0 Leaf p(x3) + R = [ 10.0494 ] + d = [ -10.0504 ] + No noise model + + 1 1 Leaf p(x3) + R = [ 10.0494 ] + d = [ -10.0494 ] + No noise model + +)"; + EXPECT(assert_print_equal(expected_hybridBayesNet, *hybridBayesNet)); +} // /* ************************************************************************* // */ From ee124c33c3b535e651e8a5c5d31bdb863828302a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 2 Aug 2022 19:10:27 -0400 Subject: [PATCH 43/74] fix discrete only elimination (use EliminateForMPE) --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 2 +- .../tests/testHybridNonlinearFactorGraph.cpp | 21 ++++++++----------- 2 files changed, 10 insertions(+), 13 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 3536f6a361..55fa9a908a 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -164,7 +164,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors, } } - auto result = EliminateDiscrete(dfg, frontalKeys); + auto result = EliminateForMPE(dfg, frontalKeys); return {boost::make_shared(result.first), boost::make_shared(result.second)}; diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 947fafacb9..6e66e5fcb9 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -465,18 +465,15 @@ TEST(HybridFactorGraph, Full_Elimination) { // P(m1 | m2) EXPECT(hybridBayesNet->at(3)->frontals() == KeyVector{M(1)}); EXPECT(hybridBayesNet->at(3)->parents() == KeyVector({M(2)})); - GTSAM_PRINT(*(hybridBayesNet->at(3))); - GTSAM_PRINT(*(discreteBayesNet.at(0))); - //TODO(Varun) FIX!! - // EXPECT( - // dynamic_pointer_cast(hybridBayesNet->at(3)->inner()) - // ->equals(*discreteBayesNet.at(0))); - // // P(m2) - // EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(2)}); - // EXPECT_LONGS_EQUAL(0, hybridBayesNet->at(4)->nrParents()); - // EXPECT( - // dynamic_pointer_cast(hybridBayesNet->at(4)->inner()) - // ->equals(*discreteBayesNet.at(1))); + EXPECT( + dynamic_pointer_cast(hybridBayesNet->at(3)->inner()) + ->equals(*discreteBayesNet.at(0))); + // P(m2) + EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(2)}); + EXPECT_LONGS_EQUAL(0, hybridBayesNet->at(4)->nrParents()); + EXPECT( + dynamic_pointer_cast(hybridBayesNet->at(4)->inner()) + ->equals(*discreteBayesNet.at(1))); } /* From b39c2316e6cdeb91e8e48ca5d526415b645a9be8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 2 Aug 2022 19:17:49 -0400 Subject: [PATCH 44/74] all tests pass!!! --- .../tests/testHybridNonlinearFactorGraph.cpp | 155 +++++++++--------- 1 file changed, 78 insertions(+), 77 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 6e66e5fcb9..c732ec6c51 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -476,9 +476,9 @@ TEST(HybridFactorGraph, Full_Elimination) { ->equals(*discreteBayesNet.at(1))); } -/* -****************************************************************************/ -// Test printing +/**************************************************************************** + * Test printing + */ TEST(HybridFactorGraph, Printing) { Switching self(3); @@ -663,80 +663,81 @@ factor 2: Hybrid P( x3 | m1 m2) EXPECT(assert_print_equal(expected_hybridBayesNet, *hybridBayesNet)); } -// /* ************************************************************************* -// */ -// // Simple PlanarSLAM example test with 2 poses and 2 landmarks (each pose -// // connects to 1 landmark) to expose issue with default decision tree -// creation -// // in hybrid elimination. The hybrid factor is between the poses X0 and X1. -// The -// // issue arises if we eliminate a landmark variable first since it is not -// // connected to a DCFactor. -// TEST(HybridFactorGraph, DefaultDecisionTree) { -// HybridNonlinearFactorGraph fg; - -// // Add a prior on pose x1 at the origin. A prior factor consists of a mean -// and -// // a noise model (covariance matrix) -// Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin -// auto priorNoise = noiseModel::Diagonal::Sigmas( -// Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta -// fg.emplace_nonlinear>(X(0), prior, priorNoise); - -// using PlanarMotionModel = BetweenFactor; - -// // Add odometry factor -// Pose2 odometry(2.0, 0.0, 0.0); -// KeyVector contKeys = {X(0), X(1)}; -// auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); -// auto still = boost::make_shared(X(0), X(1), Pose2(0, 0, -// 0), -// noise_model), -// moving = boost::make_shared(X(0), X(1), odometry, -// noise_model); -// std::vector components = {still, moving}; -// auto dcFactor = boost::make_shared>( -// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); -// fg.push_back(dcFactor); - -// // Add Range-Bearing measurements to from X0 to L0 and X1 to L1. -// // create a noise model for the landmark measurements -// auto measurementNoise = noiseModel::Diagonal::Sigmas( -// Vector2(0.1, 0.2)); // 0.1 rad std on bearing, 20cm on range -// // create the measurement values - indices are (pose id, landmark id) -// Rot2 bearing11 = Rot2::fromDegrees(45), bearing22 = Rot2::fromDegrees(90); -// double range11 = std::sqrt(4.0 + 4.0), range22 = 2.0; - -// // Add Bearing-Range factors -// fg.emplace_nonlinear>( -// X(0), L(0), bearing11, range11, measurementNoise); -// fg.emplace_nonlinear>( -// X(1), L(1), bearing22, range22, measurementNoise); - -// // Create (deliberately inaccurate) initial estimate -// Values initialEstimate; -// initialEstimate.insert(X(0), Pose2(0.5, 0.0, 0.2)); -// initialEstimate.insert(X(1), Pose2(2.3, 0.1, -0.2)); -// initialEstimate.insert(L(0), Point2(1.8, 2.1)); -// initialEstimate.insert(L(1), Point2(4.1, 1.8)); - -// // We want to eliminate variables not connected to DCFactors first. -// Ordering ordering; -// ordering += L(0); -// ordering += L(1); -// ordering += X(0); -// ordering += X(1); - -// HybridGaussianFactorGraph linearized = fg.linearize(initialEstimate); -// gtsam::HybridBayesNet::shared_ptr hybridBayesNet; -// gtsam::HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; - -// // This should NOT fail -// std::tie(hybridBayesNet, remainingFactorGraph) = -// linearized.eliminatePartialSequential(ordering); -// EXPECT_LONGS_EQUAL(4, hybridBayesNet->size()); -// EXPECT_LONGS_EQUAL(1, remainingFactorGraph->size()); -// } +/**************************************************************************** + * Simple PlanarSLAM example test with 2 poses and 2 landmarks (each pose + * connects to 1 landmark) to expose issue with default decision tree creation + * in hybrid elimination. The hybrid factor is between the poses X0 and X1. + * The issue arises if we eliminate a landmark variable first since it is not + * connected to a HybridFactor. + */ +TEST(HybridFactorGraph, DefaultDecisionTree) { + HybridNonlinearFactorGraph fg; + + // Add a prior on pose x1 at the origin. + // A prior factor consists of a mean and a noise model (covariance matrix) + Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin + auto priorNoise = noiseModel::Diagonal::Sigmas( + Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta + fg.emplace_nonlinear>(X(0), prior, priorNoise); + + using PlanarMotionModel = BetweenFactor; + + // Add odometry factor + Pose2 odometry(2.0, 0.0, 0.0); + KeyVector contKeys = {X(0), X(1)}; + auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); + auto still = boost::make_shared(X(0), X(1), Pose2(0, 0, + 0), + noise_model), + moving = boost::make_shared(X(0), X(1), odometry, + noise_model); + std::vector motion_models = {still, moving}; + //TODO(Varun) Make a templated constructor for MixtureFactor which does this? + std::vector components; + for (auto&& f : motion_models) { + components.push_back(boost::dynamic_pointer_cast(f)); + } + fg.emplace_hybrid( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + + // Add Range-Bearing measurements to from X0 to L0 and X1 to L1. + // create a noise model for the landmark measurements + auto measurementNoise = noiseModel::Diagonal::Sigmas( + Vector2(0.1, 0.2)); // 0.1 rad std on bearing, 20cm on range + // create the measurement values - indices are (pose id, landmark id) + Rot2 bearing11 = Rot2::fromDegrees(45), bearing22 = Rot2::fromDegrees(90); + double range11 = std::sqrt(4.0 + 4.0), range22 = 2.0; + + // Add Bearing-Range factors + fg.emplace_nonlinear>( + X(0), L(0), bearing11, range11, measurementNoise); + fg.emplace_nonlinear>( + X(1), L(1), bearing22, range22, measurementNoise); + + // Create (deliberately inaccurate) initial estimate + Values initialEstimate; + initialEstimate.insert(X(0), Pose2(0.5, 0.0, 0.2)); + initialEstimate.insert(X(1), Pose2(2.3, 0.1, -0.2)); + initialEstimate.insert(L(0), Point2(1.8, 2.1)); + initialEstimate.insert(L(1), Point2(4.1, 1.8)); + + // We want to eliminate variables not connected to DCFactors first. + Ordering ordering; + ordering += L(0); + ordering += L(1); + ordering += X(0); + ordering += X(1); + + HybridGaussianFactorGraph linearized = fg.linearize(initialEstimate); + gtsam::HybridBayesNet::shared_ptr hybridBayesNet; + gtsam::HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; + + // This should NOT fail + std::tie(hybridBayesNet, remainingFactorGraph) = + linearized.eliminatePartialSequential(ordering); + EXPECT_LONGS_EQUAL(4, hybridBayesNet->size()); + EXPECT_LONGS_EQUAL(1, remainingFactorGraph->size()); +} /* ************************************************************************* */ int main() { From 0f732d734512ae322fa421773f3b0d0e828ee473 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 3 Aug 2022 02:48:06 -0400 Subject: [PATCH 45/74] fix discrete conditional test --- gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp b/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp index 552bb18f59..0ea0faccde 100644 --- a/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp @@ -125,7 +125,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { auto dc = result->at(2)->asDiscreteConditional(); DiscreteValues dv; dv[C(1)] = 0; - EXPECT_DOUBLES_EQUAL(0.6225, dc->operator()(dv), 1e-3); + EXPECT_DOUBLES_EQUAL(1, dc->operator()(dv), 1e-3); } /* ************************************************************************* */ From 66707792d275dd03da0c6747cba3c1f75efcf4aa Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 3 Aug 2022 04:21:12 -0400 Subject: [PATCH 46/74] Wrap DiscreteLookupTable --- gtsam/discrete/discrete.i | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/gtsam/discrete/discrete.i b/gtsam/discrete/discrete.i index b3a12a8d52..fa98f36fa5 100644 --- a/gtsam/discrete/discrete.i +++ b/gtsam/discrete/discrete.i @@ -219,6 +219,16 @@ class DiscreteBayesTree { }; #include + +class DiscreteLookupTable : gtsam::DiscreteConditional{ + DiscreteLookupTable(size_t nFrontals, const gtsam::DiscreteKeys& keys, + const gtsam::DecisionTreeFactor::ADT& potentials); + void print( + const std::string& s = "Discrete Lookup Table: ", + const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const; + size_t argmax(const gtsam::DiscreteValues& parentsValues) const; +}; + class DiscreteLookupDAG { DiscreteLookupDAG(); void push_back(const gtsam::DiscreteLookupTable* table); From 5965d8f2fb45260a30637c417d0eedd2e2941ed6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 8 Aug 2022 17:16:47 -0400 Subject: [PATCH 47/74] change discrete key variable from C to M --- gtsam/hybrid/tests/Switching.h | 2 +- .../tests/testGaussianHybridFactorGraph.cpp | 178 +++++++++--------- 2 files changed, 89 insertions(+), 91 deletions(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index d584dd60ea..ac440d2a54 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -50,7 +50,7 @@ using symbol_shorthand::X; */ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( size_t n, std::function keyFunc = X, - std::function dKeyFunc = C) { + std::function dKeyFunc = M) { HybridGaussianFactorGraph hfg; hfg.add(JacobianFactor(keyFunc(1), I_3x3, Z_3x1)); diff --git a/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp b/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp index 92fc699bb7..0b2d5f1903 100644 --- a/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp @@ -52,8 +52,8 @@ using namespace boost::assign; using namespace std; using namespace gtsam; -using gtsam::symbol_shorthand::C; using gtsam::symbol_shorthand::D; +using gtsam::symbol_shorthand::M; using gtsam::symbol_shorthand::X; using gtsam::symbol_shorthand::Y; @@ -67,9 +67,9 @@ TEST(HybridGaussianFactorGraph, Creation) { // Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor // graph - GaussianMixture gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{C(0), 2}), + GaussianMixture gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}), GaussianMixture::Conditionals( - C(0), + M(0), boost::make_shared( X(0), Z_3x1, I_3x3, X(1), I_3x3), boost::make_shared( @@ -96,11 +96,11 @@ TEST(HybridGaussianFactorGraph, EliminateMultifrontal) { // Test multifrontal elimination HybridGaussianFactorGraph hfg; - DiscreteKey c(C(1), 2); + DiscreteKey m(M(1), 2); // Add priors on x0 and c1 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); - hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8}))); + hfg.add(HybridDiscreteFactor(DecisionTreeFactor(m, {2, 8}))); Ordering ordering; ordering.push_back(X(0)); @@ -114,7 +114,7 @@ TEST(HybridGaussianFactorGraph, EliminateMultifrontal) { TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { HybridGaussianFactorGraph hfg; - DiscreteKey c1(C(1), 2); + DiscreteKey m1(M(1), 2); // Add prior on x0 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); @@ -123,17 +123,17 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { // Add a gaussian mixture factor Ï•(x1, c1) DecisionTree dt( - C(1), boost::make_shared(X(1), I_3x3, Z_3x1), + M(1), boost::make_shared(X(1), I_3x3, Z_3x1), boost::make_shared(X(1), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); + hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt)); auto result = - hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {C(1)})); + hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {M(1)})); auto dc = result->at(2)->asDiscreteConditional(); DiscreteValues dv; - dv[C(1)] = 0; + dv[M(1)] = 0; EXPECT_DOUBLES_EQUAL(1, dc->operator()(dv), 1e-3); } @@ -141,27 +141,27 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { HybridGaussianFactorGraph hfg; - DiscreteKey c1(C(1), 2); + DiscreteKey m1(M(1), 2); // Add prior on x0 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); // Add factor between x0 and x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - DecisionTree dt( - C(1), boost::make_shared(X(1), I_3x3, Z_3x1), - boost::make_shared(X(1), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); + std::vector factors = { + boost::make_shared(X(1), I_3x3, Z_3x1), + boost::make_shared(X(1), I_3x3, Vector3::Ones())}; + hfg.add(GaussianMixtureFactor({X(1)}, {m1}, factors)); // Discrete probability table for c1 - hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c1, {2, 8}))); + hfg.add(DecisionTreeFactor(m1, {2, 8})); // Joint discrete probability table for c1, c2 - hfg.add(HybridDiscreteFactor( - DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4"))); + hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); - auto result = hfg.eliminateSequential( - Ordering::ColamdConstrainedLast(hfg, {C(1), C(2)})); + HybridBayesNet::shared_ptr result = hfg.eliminateSequential( + Ordering::ColamdConstrainedLast(hfg, {M(1), M(2)})); + // There are 4 variables (2 continuous + 2 discrete) in the bayes net. EXPECT_LONGS_EQUAL(4, result->size()); } @@ -169,31 +169,33 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { HybridGaussianFactorGraph hfg; - DiscreteKey c1(C(1), 2); + DiscreteKey m1(M(1), 2); hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); hfg.add(GaussianMixtureFactor::FromFactors( - {X(1)}, {{C(1), 2}}, + {X(1)}, {{M(1), 2}}, {boost::make_shared(X(1), I_3x3, Z_3x1), boost::make_shared(X(1), I_3x3, Vector3::Ones())})); - hfg.add(DecisionTreeFactor(c1, {2, 8})); - hfg.add(DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4")); + hfg.add(DecisionTreeFactor(m1, {2, 8})); + hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); - auto result = hfg.eliminateMultifrontal( - Ordering::ColamdConstrainedLast(hfg, {C(1), C(2)})); + HybridBayesTree::shared_ptr result = hfg.eliminateMultifrontal( + Ordering::ColamdConstrainedLast(hfg, {M(1), M(2)})); + // The bayes tree should have 3 cliques + EXPECT_LONGS_EQUAL(3, result->size()); // GTSAM_PRINT(*result); - // GTSAM_PRINT(*result->marginalFactor(C(2))); + // GTSAM_PRINT(*result->marginalFactor(M(2))); } /* ************************************************************************* */ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) { HybridGaussianFactorGraph hfg; - DiscreteKey c(C(1), 2); + DiscreteKey m(M(1), 2); // Prior on x0 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); @@ -202,16 +204,16 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) { // Decision tree with different modes on x1 DecisionTree dt( - C(1), boost::make_shared(X(1), I_3x3, Z_3x1), + M(1), boost::make_shared(X(1), I_3x3, Z_3x1), boost::make_shared(X(1), I_3x3, Vector3::Ones())); // Hybrid factor P(x1|c1) - hfg.add(GaussianMixtureFactor({X(1)}, {c}, dt)); + hfg.add(GaussianMixtureFactor({X(1)}, {m}, dt)); // Prior factor on c1 - hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8}))); + hfg.add(HybridDiscreteFactor(DecisionTreeFactor(m, {2, 8}))); // Get a constrained ordering keeping c1 last - auto ordering_full = Ordering::ColamdConstrainedLast(hfg, {C(1)}); + auto ordering_full = Ordering::ColamdConstrainedLast(hfg, {M(1)}); // Returns a Hybrid Bayes Tree with distribution P(x0|x1)P(x1|c1)P(c1) HybridBayesTree::shared_ptr hbt = hfg.eliminateMultifrontal(ordering_full); @@ -232,48 +234,48 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { { hfg.add(GaussianMixtureFactor::FromFactors( - {X(0)}, {{C(0), 2}}, + {X(0)}, {{M(0), 2}}, {boost::make_shared(X(0), I_3x3, Z_3x1), boost::make_shared(X(0), I_3x3, Vector3::Ones())})); DecisionTree dt1( - C(1), boost::make_shared(X(2), I_3x3, Z_3x1), + M(1), boost::make_shared(X(2), I_3x3, Z_3x1), boost::make_shared(X(2), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(2)}, {{C(1), 2}}, dt1)); + hfg.add(GaussianMixtureFactor({X(2)}, {{M(1), 2}}, dt1)); } hfg.add(HybridDiscreteFactor( - DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4"))); + DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"))); hfg.add(JacobianFactor(X(3), I_3x3, X(4), -I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1)); { DecisionTree dt( - C(3), boost::make_shared(X(3), I_3x3, Z_3x1), + M(3), boost::make_shared(X(3), I_3x3, Z_3x1), boost::make_shared(X(3), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(3)}, {{C(3), 2}}, dt)); + hfg.add(GaussianMixtureFactor({X(3)}, {{M(3), 2}}, dt)); DecisionTree dt1( - C(2), boost::make_shared(X(5), I_3x3, Z_3x1), + M(2), boost::make_shared(X(5), I_3x3, Z_3x1), boost::make_shared(X(5), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(5)}, {{C(2), 2}}, dt1)); + hfg.add(GaussianMixtureFactor({X(5)}, {{M(2), 2}}, dt1)); } auto ordering_full = - Ordering::ColamdConstrainedLast(hfg, {C(0), C(1), C(2), C(3)}); + Ordering::ColamdConstrainedLast(hfg, {M(0), M(1), M(2), M(3)}); HybridBayesTree::shared_ptr hbt; HybridGaussianFactorGraph::shared_ptr remaining; std::tie(hbt, remaining) = hfg.eliminatePartialMultifrontal(ordering_full); - // GTSAM_PRINT(*hbt); - // GTSAM_PRINT(*remaining); + // 9 cliques in the bayes tree and 0 remaining variables to eliminate. + EXPECT_LONGS_EQUAL(9, hbt->size()); + EXPECT_LONGS_EQUAL(0, remaining->size()); - // hbt->marginalFactor(X(1))->print("HBT: "); /* (Fan) Explanation: the Junction tree will need to reeliminate to get to the marginal on X(1), which is not possible because it involves eliminating @@ -307,13 +309,13 @@ TEST(HybridGaussianFactorGraph, Switching) { // X(3), X(7) // X(2), X(8) // X(1), X(4), X(6), X(9) - // C(5) will be the center, C(1-4), C(6-8) - // C(3), C(7) - // C(1), C(4), C(2), C(6), C(8) + // M(5) will be the center, M(1-4), M(6-8) + // M(3), M(7) + // M(1), M(4), M(2), M(6), M(8) // auto ordering_full = // Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7), // X(5), - // C(1), C(4), C(2), C(6), C(8), C(3), C(7), C(5)}); + // M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)}); KeyVector ordering; { @@ -336,7 +338,7 @@ TEST(HybridGaussianFactorGraph, Switching) { std::iota(naturalC.begin(), naturalC.end(), 1); std::vector ordC; std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC), - [](int x) { return C(x); }); + [](int x) { return M(x); }); KeyVector ndC; std::vector lvls; @@ -353,9 +355,9 @@ TEST(HybridGaussianFactorGraph, Switching) { HybridGaussianFactorGraph::shared_ptr remaining; std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full); - // GTSAM_PRINT(*hbt); - // GTSAM_PRINT(*remaining); - // hbt->marginalFactor(C(11))->print("HBT: "); + // 12 cliques in the bayes tree and 0 remaining variables to eliminate. + EXPECT_LONGS_EQUAL(12, hbt->size()); + EXPECT_LONGS_EQUAL(0, remaining->size()); } /* ************************************************************************* */ @@ -368,13 +370,13 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) { // X(3), X(7) // X(2), X(8) // X(1), X(4), X(6), X(9) - // C(5) will be the center, C(1-4), C(6-8) - // C(3), C(7) - // C(1), C(4), C(2), C(6), C(8) + // M(5) will be the center, M(1-4), M(6-8) + // M(3), M(7) + // M(1), M(4), M(2), M(6), M(8) // auto ordering_full = // Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7), // X(5), - // C(1), C(4), C(2), C(6), C(8), C(3), C(7), C(5)}); + // M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)}); KeyVector ordering; { @@ -397,7 +399,7 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) { std::iota(naturalC.begin(), naturalC.end(), 1); std::vector ordC; std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC), - [](int x) { return C(x); }); + [](int x) { return M(x); }); KeyVector ndC; std::vector lvls; @@ -407,9 +409,6 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) { } auto ordering_full = Ordering(ordering); - // GTSAM_PRINT(*hfg); - // GTSAM_PRINT(ordering_full); - HybridBayesTree::shared_ptr hbt; HybridGaussianFactorGraph::shared_ptr remaining; std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full); @@ -417,19 +416,18 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) { auto new_fg = makeSwitchingChain(12); auto isam = HybridGaussianISAM(*hbt); - { - HybridGaussianFactorGraph factorGraph; - factorGraph.push_back(new_fg->at(new_fg->size() - 2)); - factorGraph.push_back(new_fg->at(new_fg->size() - 1)); - isam.update(factorGraph); - // std::cout << isam.dot(); - // isam.marginalFactor(C(11))->print(); - } + // Run an ISAM update. + HybridGaussianFactorGraph factorGraph; + factorGraph.push_back(new_fg->at(new_fg->size() - 2)); + factorGraph.push_back(new_fg->at(new_fg->size() - 1)); + isam.update(factorGraph); + + // ISAM should have 12 factors after the last update + EXPECT_LONGS_EQUAL(12, isam.size()); } /* ************************************************************************* */ -// TODO(Varun) Actually test something! -TEST_DISABLED(HybridGaussianFactorGraph, SwitchingTwoVar) { +TEST(HybridGaussianFactorGraph, SwitchingTwoVar) { const int N = 7; auto hfg = makeSwitchingChain(N, X); hfg->push_back(*makeSwitchingChain(N, Y, D)); @@ -449,7 +447,7 @@ TEST_DISABLED(HybridGaussianFactorGraph, SwitchingTwoVar) { } for (size_t i = 1; i <= N - 1; i++) { - ordX.emplace_back(C(i)); + ordX.emplace_back(M(i)); } for (size_t i = 1; i <= N - 1; i++) { ordX.emplace_back(D(i)); @@ -461,8 +459,8 @@ TEST_DISABLED(HybridGaussianFactorGraph, SwitchingTwoVar) { dw.positionHints['c'] = 0; dw.positionHints['d'] = 3; dw.positionHints['y'] = 2; - std::cout << hfg->dot(DefaultKeyFormatter, dw); - std::cout << "\n"; + // std::cout << hfg->dot(DefaultKeyFormatter, dw); + // std::cout << "\n"; } { @@ -471,10 +469,10 @@ TEST_DISABLED(HybridGaussianFactorGraph, SwitchingTwoVar) { // dw.positionHints['c'] = 0; // dw.positionHints['d'] = 3; dw.positionHints['x'] = 1; - std::cout << "\n"; + // std::cout << "\n"; // std::cout << hfg->eliminateSequential(Ordering(ordX)) // ->dot(DefaultKeyFormatter, dw); - hfg->eliminateMultifrontal(Ordering(ordX))->dot(std::cout); + // hfg->eliminateMultifrontal(Ordering(ordX))->dot(std::cout); } Ordering ordering_partial; @@ -482,22 +480,22 @@ TEST_DISABLED(HybridGaussianFactorGraph, SwitchingTwoVar) { ordering_partial.emplace_back(X(i)); ordering_partial.emplace_back(Y(i)); } + HybridBayesNet::shared_ptr hbn; + HybridGaussianFactorGraph::shared_ptr remaining; + std::tie(hbn, remaining) = + hfg->eliminatePartialSequential(ordering_partial); + + EXPECT_LONGS_EQUAL(14, hbn->size()); + EXPECT_LONGS_EQUAL(11, remaining->size()); + { - HybridBayesNet::shared_ptr hbn; - HybridGaussianFactorGraph::shared_ptr remaining; - std::tie(hbn, remaining) = - hfg->eliminatePartialSequential(ordering_partial); - - // remaining->print(); - { - DotWriter dw; - dw.positionHints['x'] = 1; - dw.positionHints['c'] = 0; - dw.positionHints['d'] = 3; - dw.positionHints['y'] = 2; - std::cout << remaining->dot(DefaultKeyFormatter, dw); - std::cout << "\n"; - } + DotWriter dw; + dw.positionHints['x'] = 1; + dw.positionHints['c'] = 0; + dw.positionHints['d'] = 3; + dw.positionHints['y'] = 2; + // std::cout << remaining->dot(DefaultKeyFormatter, dw); + // std::cout << "\n"; } } From f5e046fd109f291d1f4b22797e04a83fb7e4e62c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 8 Aug 2022 17:17:22 -0400 Subject: [PATCH 48/74] split HybridNonlinearFactorGraph to .h and .cpp --- gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 100 ++++++++++++++++++++ gtsam/hybrid/HybridNonlinearFactorGraph.h | 72 ++------------ 2 files changed, 108 insertions(+), 64 deletions(-) create mode 100644 gtsam/hybrid/HybridNonlinearFactorGraph.cpp diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp new file mode 100644 index 0000000000..233badc550 --- /dev/null +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -0,0 +1,100 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridNonlinearFactorGraph.cpp + * @brief Nonlinear hybrid factor graph that uses type erasure + * @author Varun Agrawal + * @date May 28, 2022 + */ + +#include + +namespace gtsam { + +/* ************************************************************************* */ +void HybridNonlinearFactorGraph::add( + boost::shared_ptr factor) { + FactorGraph::add(boost::make_shared(factor)); +} + +/* ************************************************************************* */ +void HybridNonlinearFactorGraph::print(const std::string& s, + const KeyFormatter& keyFormatter) const { + // Base::print(str, keyFormatter); + std::cout << (s.empty() ? "" : s + " ") << std::endl; + std::cout << "size: " << size() << std::endl; + for (size_t i = 0; i < factors_.size(); i++) { + std::stringstream ss; + ss << "factor " << i << ": "; + if (factors_[i]) { + factors_[i]->print(ss.str(), keyFormatter); + std::cout << std::endl; + } + } +} + +/* ************************************************************************* */ +bool HybridNonlinearFactorGraph::equals(const HybridNonlinearFactorGraph& other, + double tol) const { + return false; +} + +/* ************************************************************************* */ +HybridGaussianFactorGraph HybridNonlinearFactorGraph::linearize( + const Values& continuousValues) const { + // create an empty linear FG + HybridGaussianFactorGraph linearFG; + + linearFG.reserve(size()); + + // linearize all hybrid factors + for (auto&& factor : factors_) { + // First check if it is a valid factor + if (factor) { + // Check if the factor is a hybrid factor. + // It can be either a nonlinear MixtureFactor or a linear + // GaussianMixtureFactor. + if (factor->isHybrid()) { + // Check if it is a nonlinear mixture factor + if (auto nlmf = boost::dynamic_pointer_cast(factor)) { + linearFG.push_back(nlmf->linearize(continuousValues)); + } else { + linearFG.push_back(factor); + } + + // Now check if the factor is a continuous only factor. + } else if (factor->isContinuous()) { + // In this case, we check if factor->inner() is nonlinear since + // HybridFactors wrap over continuous factors. + auto nlhf = boost::dynamic_pointer_cast(factor); + if (auto nlf = + boost::dynamic_pointer_cast(nlhf->inner())) { + auto hgf = boost::make_shared( + nlf->linearize(continuousValues)); + linearFG.push_back(hgf); + } else { + linearFG.push_back(factor); + } + // Finally if nothing else, we are discrete-only which doesn't need + // lineariztion. + } else { + linearFG.push_back(factor); + } + + } else { + linearFG.push_back(GaussianFactor::shared_ptr()); + } + } + return linearFG; +} + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.h b/gtsam/hybrid/HybridNonlinearFactorGraph.h index 6c5dd515fa..073581da2c 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.h +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.h @@ -18,9 +18,9 @@ #pragma once -#include #include #include +#include #include #include #include @@ -115,25 +115,15 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { // } /// Add a nonlinear factor as a shared ptr. - void add(boost::shared_ptr factor) { - FactorGraph::add(boost::make_shared(factor)); - } + void add(boost::shared_ptr factor); - /** - * Simply prints the factor graph. - */ + /// Print the factor graph. void print( - const std::string& str = "HybridNonlinearFactorGraph", - const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override {} + const std::string& s = "HybridNonlinearFactorGraph", + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override; - /** - * @return true if all internal graphs of `this` are equal to those of - * `other` - */ - bool equals(const HybridNonlinearFactorGraph& other, - double tol = 1e-9) const { - return false; - } + /// Check if this factor graph is equal to `other`. + bool equals(const HybridNonlinearFactorGraph& other, double tol = 1e-9) const; /** * @brief Linearize all the continuous factors in the @@ -142,53 +132,7 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { * @param continuousValues: Dictionary of continuous values. * @return HybridGaussianFactorGraph::shared_ptr */ - HybridGaussianFactorGraph linearize(const Values& continuousValues) const { - // create an empty linear FG - HybridGaussianFactorGraph linearFG; - - linearFG.reserve(size()); - - // linearize all hybrid factors - for (auto&& factor : factors_) { - // First check if it is a valid factor - if (factor) { - // Check if the factor is a hybrid factor. - // It can be either a nonlinear MixtureFactor or a linear - // GaussianMixtureFactor. - if (factor->isHybrid()) { - // Check if it is a nonlinear mixture factor - if (auto nlmf = boost::dynamic_pointer_cast(factor)) { - linearFG.push_back(nlmf->linearize(continuousValues)); - } else { - linearFG.push_back(factor); - } - - // Now check if the factor is a continuous only factor. - } else if (factor->isContinuous()) { - // In this case, we check if factor->inner() is nonlinear since - // HybridFactors wrap over continuous factors. - auto nlhf = - boost::dynamic_pointer_cast(factor); - if (auto nlf = - boost::dynamic_pointer_cast(nlhf->inner())) { - auto hgf = boost::make_shared( - nlf->linearize(continuousValues)); - linearFG.push_back(hgf); - } else { - linearFG.push_back(factor); - } - // Finally if nothing else, we are discrete-only which doesn't need - // lineariztion. - } else { - linearFG.push_back(factor); - } - - } else { - linearFG.push_back(GaussianFactor::shared_ptr()); - } - } - return linearFG; - } + HybridGaussianFactorGraph linearize(const Values& continuousValues) const; }; template <> From 51d2f077093534921d535028dd45fcc8478dc3de Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 8 Aug 2022 18:02:59 -0400 Subject: [PATCH 49/74] fix printing and key bug in MixtureFactor linearize --- gtsam/hybrid/GaussianMixtureFactor.cpp | 2 +- gtsam/hybrid/HybridFactor.cpp | 10 ++++++---- gtsam/hybrid/HybridNonlinearFactor.cpp | 2 +- gtsam/hybrid/MixtureFactor.h | 21 ++++----------------- 4 files changed, 12 insertions(+), 23 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 8f832d8eab..9b5be188a6 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -51,7 +51,7 @@ GaussianMixtureFactor GaussianMixtureFactor::FromFactors( void GaussianMixtureFactor::print(const std::string &s, const KeyFormatter &formatter) const { HybridFactor::print(s, formatter); - std::cout << "]{\n"; + std::cout << "{\n"; factors_.print( "", [&](Key k) { return formatter(k); }, [&](const GaussianFactor::shared_ptr &gf) -> std::string { diff --git a/gtsam/hybrid/HybridFactor.cpp b/gtsam/hybrid/HybridFactor.cpp index 03fe89c188..5871a13032 100644 --- a/gtsam/hybrid/HybridFactor.cpp +++ b/gtsam/hybrid/HybridFactor.cpp @@ -89,17 +89,19 @@ void HybridFactor::print(const std::string &s, if (isContinuous_) std::cout << "Continuous "; if (isDiscrete_) std::cout << "Discrete "; if (isHybrid_) std::cout << "Hybrid "; - for (size_t c=0; c 0 ? ";" : ""); } } - for(auto && discreteKey: discreteKeys_) { - std::cout << formatter(discreteKey.first) << " "; + for (auto &&discreteKey : discreteKeys_) { + std::cout << " " << formatter(discreteKey.first); } + std::cout << "]"; } } // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp index 0938fd2b12..5a1833d397 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.cpp +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -35,7 +35,7 @@ bool HybridNonlinearFactor::equals(const HybridFactor &lf, double tol) const { void HybridNonlinearFactor::print(const std::string &s, const KeyFormatter &formatter) const { HybridFactor::print(s, formatter); - inner_->print("inner: ", formatter); + inner_->print("\n", formatter); }; } // namespace gtsam diff --git a/gtsam/hybrid/MixtureFactor.h b/gtsam/hybrid/MixtureFactor.h index ba449976a8..8c25457efd 100644 --- a/gtsam/hybrid/MixtureFactor.h +++ b/gtsam/hybrid/MixtureFactor.h @@ -131,21 +131,8 @@ class MixtureFactor : public HybridFactor { const std::string& s = "MixtureFactor", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override { std::cout << (s.empty() ? "" : s + " "); - std::cout << "("; - auto contKeys = keys(); - auto dKeys = discreteKeys(); - for (DiscreteKey key : dKeys) { - auto it = std::find(contKeys.begin(), contKeys.end(), key.first); - contKeys.erase(it); - } - for (Key key : contKeys) { - std::cout << " " << keyFormatter(key); - } - std::cout << ";"; - for (DiscreteKey key : dKeys) { - std::cout << " " << keyFormatter(key.first); - } - std::cout << " ) \n"; + Base::print("", keyFormatter); + std::cout << "\nMixtureFactor\n"; auto valueFormatter = [](const sharedFactor& v) { if (v) { return (boost::format("Nonlinear factor on %d keys") % v->size()).str(); @@ -200,8 +187,8 @@ class MixtureFactor : public HybridFactor { DecisionTree linearized_factors( factors_, linearizeDT); - return boost::make_shared(keys_, discreteKeys_, - linearized_factors); + return boost::make_shared( + continuousKeys_, discreteKeys_, linearized_factors); } /** From a3eacaa5ab39985d3c73f07af2c83ed40a31acd2 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 8 Aug 2022 18:03:14 -0400 Subject: [PATCH 50/74] fix adding priors in Switching --- gtsam/hybrid/tests/Switching.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index ac440d2a54..52ef0439eb 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -156,7 +156,7 @@ struct Switching { // Add measurement factors auto measurement_noise = noiseModel::Isotropic::Sigma(1, 0.1); - for (size_t k = 1; k <= K; k++) { + for (size_t k = 2; k <= K; k++) { nonlinearFactorGraph.emplace_nonlinear>( X(k), 1.0 * (k - 1), measurement_noise); } From 588f56ef3ef7be194bbfaf2b5c4a323d0518c405 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 8 Aug 2022 18:03:38 -0400 Subject: [PATCH 51/74] HybridGaussianISAM unit tests --- gtsam/hybrid/tests/testHybridIncremental.cpp | 639 +++++++++++++++++++ 1 file changed, 639 insertions(+) create mode 100644 gtsam/hybrid/tests/testHybridIncremental.cpp diff --git a/gtsam/hybrid/tests/testHybridIncremental.cpp b/gtsam/hybrid/tests/testHybridIncremental.cpp new file mode 100644 index 0000000000..420dcd3393 --- /dev/null +++ b/gtsam/hybrid/tests/testHybridIncremental.cpp @@ -0,0 +1,639 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridIncremental.cpp + * @brief Unit tests for incremental inference + * @author Fan Jiang, Varun Agrawal, Frank Dellaert + * @date Jan 2021 + */ + +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "Switching.h" + +// Include for test suite +#include + +using namespace std; +using namespace gtsam; +using noiseModel::Isotropic; +using symbol_shorthand::L; +using symbol_shorthand::M; +using symbol_shorthand::W; +using symbol_shorthand::X; +using symbol_shorthand::Y; +using symbol_shorthand::Z; + +/* ****************************************************************************/ +// Test if we can perform elimination incrementally. +TEST(HybridGaussianElimination, IncrementalElimination) { + Switching switching(3); + HybridGaussianISAM isam; + HybridGaussianFactorGraph graph1; + + // Create initial factor graph + // * * * + // | | | + // *- X1 -*- X2 -*- X3 + // \*-M1-*/ + graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1) + graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1) + graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2) + graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) + + // Create ordering. + Ordering ordering; + ordering += X(1); + ordering += X(2); + + // Run update step + isam.update(graph1); + + // Check that after update we have 3 hybrid Bayes net nodes: + // P(X1 | X2, M1) and P(X2, X3 | M1, M2), P(M1, M2) + EXPECT_LONGS_EQUAL(3, isam.size()); + EXPECT(isam[X(1)]->conditional()->frontals() == KeyVector{X(1)}); + EXPECT(isam[X(1)]->conditional()->parents() == KeyVector({X(2), M(1)})); + EXPECT(isam[X(2)]->conditional()->frontals() == KeyVector({X(2), X(3)})); + EXPECT(isam[X(2)]->conditional()->parents() == KeyVector({M(1), M(2)})); + + /********************************************************/ + // New factor graph for incremental update. + HybridGaussianFactorGraph graph2; + + graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2) + graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3) + graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2) + + // Create ordering. + Ordering ordering2; + ordering2 += X(2); + ordering2 += X(3); + + isam.update(graph2); + + // Check that after the second update we have + // 1 additional hybrid Bayes net node: + // P(X2, X3 | M1, M2) + EXPECT_LONGS_EQUAL(3, isam.size()); + EXPECT(isam[X(3)]->conditional()->frontals() == KeyVector({X(2), X(3)})); + EXPECT(isam[X(3)]->conditional()->parents() == KeyVector({M(1), M(2)})); +} + +/* ****************************************************************************/ +// Test if we can incrementally do the inference +TEST(HybridGaussianElimination, IncrementalInference) { + Switching switching(3); + HybridGaussianISAM isam; + HybridGaussianFactorGraph graph1; + + // Create initial factor graph + // * * * + // | | | + // *- X1 -*- X2 -*- X3 + // \*-M1-*/ + graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1) + graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1) + graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2) + graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) + + // Create ordering. + Ordering ordering; + ordering += X(1); + ordering += X(2); + + // Run update step + isam.update(graph1); + + /********************************************************/ + // New factor graph for incremental update. + HybridGaussianFactorGraph graph2; + + graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2) + graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3) + graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2) + + // Create ordering. + Ordering ordering2; + ordering2 += X(2); + ordering2 += X(3); + + isam.update(graph2); + + /********************************************************/ + // Run batch elimination so we can compare results. + ordering.clear(); + ordering += X(1); + ordering += X(2); + ordering += X(3); + + // Now we calculate the actual factors using full elimination + HybridBayesNet::shared_ptr expectedHybridBayesNet; + HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph; + std::tie(expectedHybridBayesNet, expectedRemainingGraph) = + switching.linearizedFactorGraph.eliminatePartialSequential(ordering); + + // The densities on X(1) should be the same + expectedHybridBayesNet->atGaussian(0)->print(); + auto x1_conditional = isam[X(1)]->conditional(); + GTSAM_PRINT(*x1_conditional); + // EXPECT(assert_equal(*(hybridBayesNet.atGaussian(0)), + // *(expectedHybridBayesNet->atGaussian(0)))); + + // // The densities on X(2) should be the same + // EXPECT(assert_equal(*(hybridBayesNet2.atGaussian(1)), + // *(expectedHybridBayesNet->atGaussian(1)))); + + // // The densities on X(3) should be the same + // EXPECT(assert_equal(*(hybridBayesNet2.atGaussian(2)), + // *(expectedHybridBayesNet->atGaussian(2)))); + + // // we only do the manual continuous elimination for 0,0 + // // the other discrete probabilities on M(2) are calculated the same way + // auto m00_prob = [&]() { + // GaussianFactorGraph gf; + // gf.add(switching.linearizedFactorGraph.gaussianGraph().at(3)); + + // DiscreteValues m00; + // m00[M(1)] = 0, m00[M(2)] = 0; + // auto dcMixture = + // dynamic_pointer_cast(graph2.dcGraph().at(0)); + // gf.add(dcMixture->factors()(m00)); + // auto x2_mixed = + // boost::dynamic_pointer_cast(hybridBayesNet.at(1)); + // gf.add(x2_mixed->factors()(m00)); + // auto result_gf = gf.eliminateSequential(); + // return gf.probPrime(result_gf->optimize()); + // }(); + + /// Test if the probability values are as expected with regression tests. +// DiscreteValues assignment; +// EXPECT(assert_equal(m00_prob, 0.60656, 1e-5)); +// assignment[M(1)] = 0; +// assignment[M(2)] = 0; +// EXPECT(assert_equal(m00_prob, (*discreteFactor)(assignment), 1e-5)); +// assignment[M(1)] = 1; +// assignment[M(2)] = 0; +// EXPECT(assert_equal(0.612477, (*discreteFactor)(assignment), 1e-5)); +// assignment[M(1)] = 0; +// assignment[M(2)] = 1; +// EXPECT(assert_equal(0.999952, (*discreteFactor)(assignment), 1e-5)); +// assignment[M(1)] = 1; +// assignment[M(2)] = 1; +// EXPECT(assert_equal(1.0, (*discreteFactor)(assignment), 1e-5)); + +// DiscreteFactorGraph dfg; +// dfg.add(*discreteFactor); +// dfg.add(discreteFactor_m1); +// dfg.add_factors(switching.linearizedFactorGraph.discreteGraph()); + +// // Check if the chordal graph generated from incremental elimination +// matches +// // that of batch elimination. +// auto chordal = dfg.eliminateSequential(); +// auto expectedChordal = +// expectedRemainingGraph->discreteGraph().eliminateSequential(); + +// EXPECT(assert_equal(*expectedChordal, *chordal, 1e-6)); +} + +/* ****************************************************************************/ +// // Test if we can approximately do the inference +// TEST(DCGaussianElimination, Approx_inference) { +// Switching switching(4); +// IncrementalHybrid incrementalHybrid; +// HybridGaussianFactorGraph graph1; + +// // Add the 3 DC factors, x1-x2, x2-x3, x3-x4 +// for (size_t i = 0; i < 3; i++) { +// graph1.push_back(switching.linearizedFactorGraph.dcGraph().at(i)); +// } + +// // Add the Gaussian factors, 1 prior on X(1), 4 measurements +// for (size_t i = 0; i <= 4; i++) { +// graph1.push_back(switching.linearizedFactorGraph.gaussianGraph().at(i)); +// } + +// // Create ordering. +// Ordering ordering; +// for (size_t j = 1; j <= 4; j++) { +// ordering += X(j); +// } + +// // Now we calculate the actual factors using full elimination +// HybridBayesNet::shared_ptr unprunedHybridBayesNet; +// HybridGaussianFactorGraph::shared_ptr unprunedRemainingGraph; +// std::tie(unprunedHybridBayesNet, unprunedRemainingGraph) = +// switching.linearizedFactorGraph.eliminatePartialSequential(ordering); + +// size_t maxComponents = 5; +// incrementalHybrid.update(graph1, ordering, maxComponents); + +// /* +// unpruned factor is: +// Choice(m3) +// 0 Choice(m2) +// 0 0 Choice(m1) +// 0 0 0 Leaf 0.2248 - +// 0 0 1 Leaf 0.3715 - +// 0 1 Choice(m1) +// 0 1 0 Leaf 0.3742 * +// 0 1 1 Leaf 0.6125 * +// 1 Choice(m2) +// 1 0 Choice(m1) +// 1 0 0 Leaf 0.3706 - +// 1 0 1 Leaf 0.6124 * +// 1 1 Choice(m1) +// 1 1 0 Leaf 0.611 * +// 1 1 1 Leaf 1 * + +// pruned factors is: +// Choice(m3) +// 0 Choice(m2) +// 0 0 Leaf 0 +// 0 1 Choice(m1) +// 0 1 0 Leaf 0.3742 +// 0 1 1 Leaf 0.6125 +// 1 Choice(m2) +// 1 0 Choice(m1) +// 1 0 0 Leaf 0 +// 1 0 1 Leaf 0.6124 +// 1 1 Choice(m1) +// 1 1 0 Leaf 0.611 +// 1 1 1 Leaf 1 +// */ + +// // Test that the remaining factor graph has one +// // DecisionTreeFactor on {M3, M2, M1}. +// auto remainingFactorGraph = incrementalHybrid.remainingFactorGraph(); +// EXPECT_LONGS_EQUAL(1, remainingFactorGraph.size()); + +// auto discreteFactor_m1 = *dynamic_pointer_cast( +// incrementalHybrid.remainingDiscreteGraph().at(0)); +// EXPECT(discreteFactor_m1.keys() == KeyVector({M(3), M(2), M(1)})); + +// // Get the number of elements which are equal to 0. +// auto count = [](const double &value, int count) { +// return value > 0 ? count + 1 : count; +// }; +// // Check that the number of leaves after pruning is 5. +// EXPECT_LONGS_EQUAL(5, discreteFactor_m1.fold(count, 0)); + +// /* Expected hybrid Bayes net +// * factor 0: [x1 | x2 m1 ], 2 components +// * factor 1: [x2 | x3 m2 m1 ], 4 components +// * factor 2: [x3 | x4 m3 m2 m1 ], 8 components +// * factor 3: [x4 | m3 m2 m1 ], 8 components +// */ +// auto hybridBayesNet = incrementalHybrid.hybridBayesNet(); + +// // Check if we have a bayes net with 4 hybrid nodes, +// // each with 2, 4, 5 (pruned), and 5 (pruned) leaves respetively. +// EXPECT_LONGS_EQUAL(4, hybridBayesNet.size()); +// EXPECT_LONGS_EQUAL(2, hybridBayesNet.atGaussian(0)->nrComponents()); +// EXPECT_LONGS_EQUAL(4, hybridBayesNet.atGaussian(1)->nrComponents()); +// EXPECT_LONGS_EQUAL(5, hybridBayesNet.atGaussian(2)->nrComponents()); +// EXPECT_LONGS_EQUAL(5, hybridBayesNet.atGaussian(3)->nrComponents()); + +// // Check that the hybrid nodes of the bayes net match those of the bayes +// net +// // before pruning, at the same positions. +// auto &lastDensity = *(hybridBayesNet.atGaussian(3)); +// auto &unprunedLastDensity = *(unprunedHybridBayesNet->atGaussian(3)); +// std::vector> assignments = +// discreteFactor_m1.enumerate(); +// // Loop over all assignments and check the pruned components +// for (auto &&av : assignments) { +// const DiscreteValues &assignment = av.first; +// const double value = av.second; + +// if (value == 0.0) { +// EXPECT(lastDensity(assignment) == nullptr); +// } else { +// CHECK(lastDensity(assignment)); +// EXPECT(assert_equal(*unprunedLastDensity(assignment), +// *lastDensity(assignment))); +// } +// } +// } + +/* ****************************************************************************/ +// // Test approximate inference with an additional pruning step. +// TEST(DCGaussianElimination, Incremental_approximate) { +// Switching switching(5); +// IncrementalHybrid incrementalHybrid; +// HybridGaussianFactorGraph graph1; + +// // Add the 3 DC factors, x1-x2, x2-x3, x3-x4 +// for (size_t i = 0; i < 3; i++) { +// graph1.push_back(switching.linearizedFactorGraph.dcGraph().at(i)); +// } + +// // Add the Gaussian factors, 1 prior on X(1), 4 measurements +// for (size_t i = 0; i <= 4; i++) { +// graph1.push_back(switching.linearizedFactorGraph.gaussianGraph().at(i)); +// } + +// // Create ordering. +// Ordering ordering; +// for (size_t j = 1; j <= 4; j++) { +// ordering += X(j); +// } + +// // Run update with pruning +// size_t maxComponents = 5; +// incrementalHybrid.update(graph1, ordering, maxComponents); + +// // Check if we have a bayes net with 4 hybrid nodes, +// // each with 2, 4, 8, and 5 (pruned) leaves respetively. +// auto actualBayesNet1 = incrementalHybrid.hybridBayesNet(); +// CHECK_EQUAL(4, actualBayesNet1.size()); +// EXPECT_LONGS_EQUAL(2, actualBayesNet1.atGaussian(0)->nrComponents()); +// EXPECT_LONGS_EQUAL(4, actualBayesNet1.atGaussian(1)->nrComponents()); +// EXPECT_LONGS_EQUAL(8, actualBayesNet1.atGaussian(2)->nrComponents()); +// EXPECT_LONGS_EQUAL(5, actualBayesNet1.atGaussian(3)->nrComponents()); + +// /***** Run Round 2 *****/ +// HybridGaussianFactorGraph graph2; +// graph2.push_back(switching.linearizedFactorGraph.dcGraph().at(3)); +// graph2.push_back(switching.linearizedFactorGraph.gaussianGraph().at(5)); + +// Ordering ordering2; +// ordering2 += X(4); +// ordering2 += X(5); + +// // Run update with pruning a second time. +// incrementalHybrid.update(graph2, ordering2, maxComponents); + +// // Check if we have a bayes net with 2 hybrid nodes, +// // each with 10 (pruned), and 5 (pruned) leaves respetively. +// auto actualBayesNet = incrementalHybrid.hybridBayesNet(); +// CHECK_EQUAL(2, actualBayesNet.size()); +// EXPECT_LONGS_EQUAL(10, actualBayesNet.atGaussian(0)->nrComponents()); +// EXPECT_LONGS_EQUAL(5, actualBayesNet.atGaussian(1)->nrComponents()); +// } + +/* ************************************************************************/ +// // Test for figuring out the optimal ordering to ensure we get +// // a discrete graph after elimination. +// TEST(IncrementalHybrid, NonTrivial) { +// // This is a GTSAM-only test for running inference on a single legged +// robot. +// // The leg links are represented by the chain X-Y-Z-W, where X is the base +// and +// // W is the foot. +// // We use BetweenFactor as constraints between each of the poses. + +// /*************** Run Round 1 ***************/ +// NonlinearHybridFactorGraph fg; + +// // Add a prior on pose x1 at the origin. +// // A prior factor consists of a mean and +// // a noise model (covariance matrix) +// Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin +// auto priorNoise = noiseModel::Diagonal::Sigmas( +// Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta +// fg.emplace_nonlinear>(X(0), prior, priorNoise); + +// // create a noise model for the landmark measurements +// auto poseNoise = noiseModel::Isotropic::Sigma(3, 0.1); + +// // We model a robot's single leg as X - Y - Z - W +// // where X is the base link and W is the foot link. + +// // Add connecting poses similar to PoseFactors in GTD +// fg.emplace_nonlinear>(X(0), Y(0), Pose2(0, 1.0, 0), +// poseNoise); +// fg.emplace_nonlinear>(Y(0), Z(0), Pose2(0, 1.0, 0), +// poseNoise); +// fg.emplace_nonlinear>(Z(0), W(0), Pose2(0, 1.0, 0), +// poseNoise); + +// // Create initial estimate +// Values initial; +// initial.insert(X(0), Pose2(0.0, 0.0, 0.0)); +// initial.insert(Y(0), Pose2(0.0, 1.0, 0.0)); +// initial.insert(Z(0), Pose2(0.0, 2.0, 0.0)); +// initial.insert(W(0), Pose2(0.0, 3.0, 0.0)); + +// HybridGaussianFactorGraph gfg = fg.linearize(initial); +// fg = NonlinearHybridFactorGraph(); + +// IncrementalHybrid inc; + +// // Regular ordering going up the chain. +// Ordering ordering; +// ordering += W(0); +// ordering += Z(0); +// ordering += Y(0); +// ordering += X(0); + +// // Update without pruning +// // The result is a HybridBayesNet with no discrete variables +// // (equivalent to a GaussianBayesNet). +// // Factorization is: +// // `P(X | measurements) = P(W0|Z0) P(Z0|Y0) P(Y0|X0) P(X0)` +// inc.update(gfg, ordering); + +// /*************** Run Round 2 ***************/ +// using PlanarMotionModel = BetweenFactor; + +// // Add odometry factor with discrete modes. +// Pose2 odometry(1.0, 0.0, 0.0); +// KeyVector contKeys = {W(0), W(1)}; +// auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); +// auto still = boost::make_shared(W(0), W(1), Pose2(0, 0, +// 0), +// noise_model), +// moving = boost::make_shared(W(0), W(1), odometry, +// noise_model); +// std::vector components = {moving, still}; +// auto dcFactor = boost::make_shared>( +// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); +// fg.push_back(dcFactor); + +// // Add equivalent of ImuFactor +// fg.emplace_nonlinear>(X(0), X(1), Pose2(1.0, 0.0, 0), +// poseNoise); +// // PoseFactors-like at k=1 +// fg.emplace_nonlinear>(X(1), Y(1), Pose2(0, 1, 0), +// poseNoise); +// fg.emplace_nonlinear>(Y(1), Z(1), Pose2(0, 1, 0), +// poseNoise); +// fg.emplace_nonlinear>(Z(1), W(1), Pose2(-1, 1, 0), +// poseNoise); + +// initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); +// initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); +// initial.insert(Z(1), Pose2(1.0, 2.0, 0.0)); +// // The leg link did not move so we set the expected pose accordingly. +// initial.insert(W(1), Pose2(0.0, 3.0, 0.0)); + +// // Ordering for k=1. +// // This ordering follows the intuition that we eliminate the previous +// // timestep, and then the current timestep. +// ordering = Ordering(); +// ordering += W(0); +// ordering += Z(0); +// ordering += Y(0); +// ordering += X(0); +// ordering += W(1); +// ordering += Z(1); +// ordering += Y(1); +// ordering += X(1); + +// gfg = fg.linearize(initial); +// fg = NonlinearHybridFactorGraph(); + +// // Update without pruning +// // The result is a HybridBayesNet with 1 discrete variable M(1). +// // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) +// // P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1, +// M1) +// // P(Y1 | X1, M1)P(X1 | M1)P(M1) +// // The MHS tree is a 2 level tree for time indices (0, 1) with 2 leaves. +// inc.update(gfg, ordering); + +// /*************** Run Round 3 ***************/ +// // Add odometry factor with discrete modes. +// contKeys = {W(1), W(2)}; +// still = boost::make_shared(W(1), W(2), Pose2(0, 0, 0), +// noise_model); +// moving = +// boost::make_shared(W(1), W(2), odometry, +// noise_model); +// components = {moving, still}; +// dcFactor = boost::make_shared>( +// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); +// fg.push_back(dcFactor); + +// // Add equivalent of ImuFactor +// fg.emplace_nonlinear>(X(1), X(2), Pose2(1.0, 0.0, 0), +// poseNoise); +// // PoseFactors-like at k=1 +// fg.emplace_nonlinear>(X(2), Y(2), Pose2(0, 1, 0), +// poseNoise); +// fg.emplace_nonlinear>(Y(2), Z(2), Pose2(0, 1, 0), +// poseNoise); +// fg.emplace_nonlinear>(Z(2), W(2), Pose2(-2, 1, 0), +// poseNoise); + +// initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); +// initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); +// initial.insert(Z(2), Pose2(2.0, 2.0, 0.0)); +// initial.insert(W(2), Pose2(0.0, 3.0, 0.0)); + +// // Ordering at k=2. Same intuition as before. +// ordering = Ordering(); +// ordering += W(1); +// ordering += Z(1); +// ordering += Y(1); +// ordering += X(1); +// ordering += W(2); +// ordering += Z(2); +// ordering += Y(2); +// ordering += X(2); + +// gfg = fg.linearize(initial); +// fg = NonlinearHybridFactorGraph(); + +// // Now we prune! +// // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) +// P(X0 | X1, W1, M1) +// // P(W1|W2, Z1, X1, M1, M2) P(Z1| W2, Y1, X1, M1, M2) +// P(Y1 | W2, X1, M1, M2) +// // P(X1 | W2, X2, M1, M2) P(W2|Z2, X2, M1, M2) +// P(Z2|Y2, X2, M1, M2) +// // P(Y2 | X2, M1, M2)P(X2 | M1, M2) P(M1, M2) +// // The MHS at this point should be a 3 level tree on (0, 1, 2). +// // 0 has 2 choices, 1 has 4 choices and 2 has 4 choices. +// inc.update(gfg, ordering, 2); + +// /*************** Run Round 4 ***************/ +// // Add odometry factor with discrete modes. +// contKeys = {W(2), W(3)}; +// still = boost::make_shared(W(2), W(3), Pose2(0, 0, 0), +// noise_model); +// moving = +// boost::make_shared(W(2), W(3), odometry, +// noise_model); +// components = {moving, still}; +// dcFactor = boost::make_shared>( +// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); +// fg.push_back(dcFactor); + +// // Add equivalent of ImuFactor +// fg.emplace_nonlinear>(X(2), X(3), Pose2(1.0, 0.0, 0), +// poseNoise); +// // PoseFactors-like at k=3 +// fg.emplace_nonlinear>(X(3), Y(3), Pose2(0, 1, 0), +// poseNoise); +// fg.emplace_nonlinear>(Y(3), Z(3), Pose2(0, 1, 0), +// poseNoise); +// fg.emplace_nonlinear>(Z(3), W(3), Pose2(-3, 1, 0), +// poseNoise); + +// initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); +// initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); +// initial.insert(Z(3), Pose2(3.0, 2.0, 0.0)); +// initial.insert(W(3), Pose2(0.0, 3.0, 0.0)); + +// // Ordering at k=3. Same intuition as before. +// ordering = Ordering(); +// ordering += W(2); +// ordering += Z(2); +// ordering += Y(2); +// ordering += X(2); +// ordering += W(3); +// ordering += Z(3); +// ordering += Y(3); +// ordering += X(3); + +// gfg = fg.linearize(initial); +// fg = NonlinearHybridFactorGraph(); + +// // Keep pruning! +// inc.update(gfg, ordering, 3); + +// // The final discrete graph should not be empty since we have eliminated +// // all continuous variables. +// EXPECT(!inc.remainingDiscreteGraph().empty()); + +// // Test if the optimal discrete mode assignment is (1, 1, 1). +// DiscreteValues optimal_assignment = +// inc.remainingDiscreteGraph().optimize(); DiscreteValues +// expected_assignment; expected_assignment[M(1)] = 1; +// expected_assignment[M(2)] = 1; +// expected_assignment[M(3)] = 1; +// EXPECT(assert_equal(expected_assignment, optimal_assignment)); + +// // Test if pruning worked correctly by checking that we only have 3 leaves +// in +// // the last node. +// auto lastConditional = boost::dynamic_pointer_cast( +// inc.hybridBayesNet().at(inc.hybridBayesNet().size() - 1)); +// EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); +// } + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} From 60c88e338bd3d73c80753185a2710f83a0c9b353 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 10 Aug 2022 04:37:26 -0400 Subject: [PATCH 52/74] fix print tests --- .../tests/testGaussianMixtureFactor.cpp | 4 +- .../tests/testHybridNonlinearFactorGraph.cpp | 66 ++++++++----------- 2 files changed, 31 insertions(+), 39 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 36477218b9..cb9068c303 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -74,7 +74,7 @@ TEST(GaussianMixtureFactor, Sum) { // Check that number of keys is 3 EXPECT_LONGS_EQUAL(3, mixtureFactorA.keys().size()); - // Check that number of discrete keys is 1 // TODO(Frank): should not exist? + // Check that number of discrete keys is 1 EXPECT_LONGS_EQUAL(1, mixtureFactorA.discreteKeys().size()); // Create sum of two mixture factors: it will be a decision tree now on both @@ -104,7 +104,7 @@ TEST(GaussianMixtureFactor, Printing) { GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); std::string expected = - R"(Hybrid x1 x2; 1 ]{ + R"(Hybrid [x1 x2; 1]{ Choice(1) 0 Leaf : A[x1] = [ diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index c732ec6c51..b471079b00 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -175,9 +175,8 @@ TEST(HybridFactorGraph, PushBack) { TEST(HybridFactorGraph, Switching) { Switching self(3); - EXPECT_LONGS_EQUAL(8, self.nonlinearFactorGraph.size()); - - EXPECT_LONGS_EQUAL(8, self.linearizedFactorGraph.size()); + EXPECT_LONGS_EQUAL(7, self.nonlinearFactorGraph.size()); + EXPECT_LONGS_EQUAL(7, self.linearizedFactorGraph.size()); } /**************************************************************************** @@ -190,7 +189,7 @@ TEST(HybridFactorGraph, Linearization) { HybridGaussianFactorGraph actualLinearized = self.nonlinearFactorGraph.linearize(self.linearizationPoint); - EXPECT_LONGS_EQUAL(8, actualLinearized.size()); + EXPECT_LONGS_EQUAL(7, actualLinearized.size()); } /**************************************************************************** @@ -495,15 +494,15 @@ TEST(HybridFactorGraph, Printing) { linearizedFactorGraph.eliminatePartialSequential(ordering); string expected_hybridFactorGraph = R"( -size: 8 -factor 0: Continuous x1; +size: 7 +factor 0: Continuous [x1] A[x1] = [ 10 ] b = [ -10 ] No noise model -factor 1: Hybrid x1 x2 m1; m1 ]{ +factor 1: Hybrid [x1 x2; m1]{ Choice(m1) 0 Leaf : A[x1] = [ @@ -526,7 +525,7 @@ factor 1: Hybrid x1 x2 m1; m1 ]{ No noise model } -factor 2: Hybrid x2 x3 m2; m2 ]{ +factor 2: Hybrid [x2 x3; m2]{ Choice(m2) 0 Leaf : A[x2] = [ @@ -549,32 +548,25 @@ factor 2: Hybrid x2 x3 m2; m2 ]{ No noise model } -factor 3: Continuous x1; - - A[x1] = [ - 10 -] - b = [ -10 ] - No noise model -factor 4: Continuous x2; +factor 3: Continuous [x2] A[x2] = [ 10 ] b = [ -10 ] No noise model -factor 5: Continuous x3; +factor 4: Continuous [x3] A[x3] = [ 10 ] b = [ -10 ] No noise model -factor 6: Discrete m1 +factor 5: Discrete [m1] P( m1 ): Leaf 0.5 -factor 7: Discrete m2 m1 +factor 6: Discrete [m2 m1] P( m2 | m1 ): Choice(m2) 0 Choice(m1) @@ -594,15 +586,15 @@ factor 0: Hybrid P( x1 | x2 m1) Discrete Keys = (m1, 2), Choice(m1) 0 Leaf p(x1 | x2) - R = [ 14.1774 ] - S[x2] = [ -0.0705346 ] - d = [ -14.0364 ] + R = [ 10.0499 ] + S[x2] = [ -0.0995037 ] + d = [ -9.85087 ] No noise model 1 Leaf p(x1 | x2) - R = [ 14.1774 ] - S[x2] = [ -0.0705346 ] - d = [ -14.1069 ] + R = [ 10.0499 ] + S[x2] = [ -0.0995037 ] + d = [ -9.95037 ] No noise model factor 1: Hybrid P( x2 | x3 m1 m2) @@ -610,28 +602,28 @@ factor 1: Hybrid P( x2 | x3 m1 m2) Choice(m2) 0 Choice(m1) 0 0 Leaf p(x2 | x3) - R = [ 10.0993 ] - S[x3] = [ -0.0990172 ] - d = [ -9.99975 ] + R = [ 10.099 ] + S[x3] = [ -0.0990196 ] + d = [ -9.99901 ] No noise model 0 1 Leaf p(x2 | x3) - R = [ 10.0993 ] - S[x3] = [ -0.0990172 ] - d = [ -9.90122 ] + R = [ 10.099 ] + S[x3] = [ -0.0990196 ] + d = [ -9.90098 ] No noise model 1 Choice(m1) 1 0 Leaf p(x2 | x3) - R = [ 10.0993 ] - S[x3] = [ -0.0990172 ] - d = [ -10.0988 ] + R = [ 10.099 ] + S[x3] = [ -0.0990196 ] + d = [ -10.098 ] No noise model 1 1 Leaf p(x2 | x3) - R = [ 10.0993 ] - S[x3] = [ -0.0990172 ] - d = [ -10.0002 ] + R = [ 10.099 ] + S[x3] = [ -0.0990196 ] + d = [ -10 ] No noise model factor 2: Hybrid P( x3 | m1 m2) From 4ea897cfbf86a298babd48b6dbcc0339ef8c0b61 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 10 Aug 2022 04:54:32 -0400 Subject: [PATCH 53/74] cleaner printing --- gtsam/hybrid/HybridFactor.cpp | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridFactor.cpp b/gtsam/hybrid/HybridFactor.cpp index 5871a13032..a9fe62cf13 100644 --- a/gtsam/hybrid/HybridFactor.cpp +++ b/gtsam/hybrid/HybridFactor.cpp @@ -95,11 +95,15 @@ void HybridFactor::print(const std::string &s, if (c < continuousKeys_.size() - 1) { std::cout << " "; } else { - std::cout << (discreteKeys_.size() > 0 ? ";" : ""); + std::cout << (discreteKeys_.size() > 0 ? "; " : ""); } } - for (auto &&discreteKey : discreteKeys_) { - std::cout << " " << formatter(discreteKey.first); + for (size_t d = 0; d < discreteKeys_.size(); d++) { + std::cout << formatter(discreteKeys_.at(d).first); + if (d < discreteKeys_.size() - 1) { + std::cout << " "; + } + } std::cout << "]"; } From fbceda3e5665bb7f1d3691eed69ae87949aa5691 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 10 Aug 2022 04:54:52 -0400 Subject: [PATCH 54/74] got some more tests working --- gtsam/hybrid/tests/testHybridIncremental.cpp | 102 ++++++++++--------- 1 file changed, 56 insertions(+), 46 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridIncremental.cpp b/gtsam/hybrid/tests/testHybridIncremental.cpp index 420dcd3393..9afdb98863 100644 --- a/gtsam/hybrid/tests/testHybridIncremental.cpp +++ b/gtsam/hybrid/tests/testHybridIncremental.cpp @@ -21,6 +21,7 @@ #include #include #include +#include #include #include @@ -105,19 +106,21 @@ TEST(HybridGaussianElimination, IncrementalInference) { HybridGaussianFactorGraph graph1; // Create initial factor graph - // * * * - // | | | - // *- X1 -*- X2 -*- X3 - // \*-M1-*/ + // * * * + // | | | + // *- X1 -*- X2 -*- X3 + // | | + // *-M1 - * - M2 graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1) graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1) - graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2) + graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2) graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) - // Create ordering. - Ordering ordering; - ordering += X(1); - ordering += X(2); + //TODO(Varun) we cannot enforce ordering + // // Create ordering. + // Ordering ordering1; + // ordering1 += X(1); + // ordering1 += X(2); // Run update step isam.update(graph1); @@ -126,20 +129,22 @@ TEST(HybridGaussianElimination, IncrementalInference) { // New factor graph for incremental update. HybridGaussianFactorGraph graph2; - graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2) + graph2.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2) graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3) graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2) - // Create ordering. - Ordering ordering2; - ordering2 += X(2); - ordering2 += X(3); + //TODO(Varun) we cannot enforce ordering + // // Create ordering. + // Ordering ordering2; + // ordering2 += X(2); + // ordering2 += X(3); isam.update(graph2); + GTSAM_PRINT(isam); /********************************************************/ // Run batch elimination so we can compare results. - ordering.clear(); + Ordering ordering; ordering += X(1); ordering += X(2); ordering += X(3); @@ -151,37 +156,42 @@ TEST(HybridGaussianElimination, IncrementalInference) { switching.linearizedFactorGraph.eliminatePartialSequential(ordering); // The densities on X(1) should be the same - expectedHybridBayesNet->atGaussian(0)->print(); - auto x1_conditional = isam[X(1)]->conditional(); - GTSAM_PRINT(*x1_conditional); - // EXPECT(assert_equal(*(hybridBayesNet.atGaussian(0)), - // *(expectedHybridBayesNet->atGaussian(0)))); - - // // The densities on X(2) should be the same - // EXPECT(assert_equal(*(hybridBayesNet2.atGaussian(1)), - // *(expectedHybridBayesNet->atGaussian(1)))); - - // // The densities on X(3) should be the same - // EXPECT(assert_equal(*(hybridBayesNet2.atGaussian(2)), - // *(expectedHybridBayesNet->atGaussian(2)))); - - // // we only do the manual continuous elimination for 0,0 - // // the other discrete probabilities on M(2) are calculated the same way - // auto m00_prob = [&]() { - // GaussianFactorGraph gf; - // gf.add(switching.linearizedFactorGraph.gaussianGraph().at(3)); - - // DiscreteValues m00; - // m00[M(1)] = 0, m00[M(2)] = 0; - // auto dcMixture = - // dynamic_pointer_cast(graph2.dcGraph().at(0)); - // gf.add(dcMixture->factors()(m00)); - // auto x2_mixed = - // boost::dynamic_pointer_cast(hybridBayesNet.at(1)); - // gf.add(x2_mixed->factors()(m00)); - // auto result_gf = gf.eliminateSequential(); - // return gf.probPrime(result_gf->optimize()); - // }(); + auto x1_conditional = + dynamic_pointer_cast(isam[X(1)]->conditional()->inner()); + EXPECT( + assert_equal(*x1_conditional, *(expectedHybridBayesNet->atGaussian(0)))); + + // The densities on X(2) should be the same + auto x2_conditional = + dynamic_pointer_cast(isam[X(2)]->conditional()->inner()); + EXPECT( + assert_equal(*x2_conditional, *(expectedHybridBayesNet->atGaussian(1)))); + + // // The densities on X(3) should be the same + // auto x3_conditional = + // dynamic_pointer_cast(isam[X(3)]->conditional()->inner()); + // EXPECT( + // assert_equal(*x3_conditional, *(expectedHybridBayesNet->atGaussian(2)))); + + GTSAM_PRINT(*expectedHybridBayesNet); + + // we only do the manual continuous elimination for 0,0 + // the other discrete probabilities on M(2) are calculated the same way + auto m00_prob = [&]() { + GaussianFactorGraph gf; + // gf.add(switching.linearizedFactorGraph.gaussianGraph().at(3)); + + DiscreteValues m00; + m00[M(1)] = 0, m00[M(2)] = 0; + // auto dcMixture = + // dynamic_pointer_cast(graph2.dcGraph().at(0)); + // gf.add(dcMixture->factors()(m00)); + // auto x2_mixed = + // boost::dynamic_pointer_cast(hybridBayesNet.at(1)); + // gf.add(x2_mixed->factors()(m00)); + auto result_gf = gf.eliminateSequential(); + return gf.probPrime(result_gf->optimize()); + }(); /// Test if the probability values are as expected with regression tests. // DiscreteValues assignment; From 0e4db307136f6d82425d85cf95db8d643add1d53 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 10 Aug 2022 18:37:24 -0400 Subject: [PATCH 55/74] use templetized constructor for MixtureFactor --- gtsam/hybrid/MixtureFactor.h | 15 ++++++++++--- .../tests/testHybridNonlinearFactorGraph.cpp | 21 +++++++------------ 2 files changed, 19 insertions(+), 17 deletions(-) diff --git a/gtsam/hybrid/MixtureFactor.h b/gtsam/hybrid/MixtureFactor.h index 8c25457efd..3cd21e32e5 100644 --- a/gtsam/hybrid/MixtureFactor.h +++ b/gtsam/hybrid/MixtureFactor.h @@ -86,17 +86,26 @@ class MixtureFactor : public HybridFactor { * elements based on the number of discrete keys and the cardinality of the * keys, so that the decision tree is constructed appropriately. * + * @tparam FACTOR The type of the factor shared pointers being passed in. Will + * be typecast to NonlinearFactor shared pointers. * @param keys Vector of keys for continuous factors. * @param discreteKeys Vector of discrete keys. * @param factors Vector of shared pointers to factors. * @param normalized Flag indicating if the factor error is already * normalized. */ + template MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, - const std::vector& factors, + const std::vector>& factors, bool normalized = false) - : MixtureFactor(keys, discreteKeys, Factors(discreteKeys, factors), - normalized) {} + : Base(keys, discreteKeys), normalized_(normalized) { + std::vector nonlinear_factors; + for (auto&& f : factors) { + nonlinear_factors.push_back( + boost::dynamic_pointer_cast(f)); + } + factors_ = Factors(discreteKeys, nonlinear_factors); + } ~MixtureFactor() = default; diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index b471079b00..814492686c 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -105,9 +105,7 @@ TEST(HybridGaussianFactorGraph, Resize) { auto still = boost::make_shared(X(0), X(1), 0.0, noise_model), moving = boost::make_shared(X(0), X(1), 1.0, noise_model); - // TODO(Varun) This is declared as NonlinearFactor instead of MotionModel, aka - // not clear!! - std::vector components = {still, moving}; + std::vector components = {still, moving}; auto dcFactor = boost::make_shared( contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); nhfg.push_back(dcFactor); @@ -431,14 +429,15 @@ TEST(HybridFactorGraph, Full_Elimination) { linearizedFactorGraph.eliminatePartialSequential(ordering); DiscreteFactorGraph discrete_fg; - //TODO(Varun) Make this a function of HybridGaussianFactorGraph? - for(HybridFactor::shared_ptr& factor: (*remainingFactorGraph_partial)) { + // TODO(Varun) Make this a function of HybridGaussianFactorGraph? + for (HybridFactor::shared_ptr& factor : (*remainingFactorGraph_partial)) { auto df = dynamic_pointer_cast(factor); discrete_fg.push_back(df->inner()); } ordering.clear(); for (size_t k = 1; k < self.K; k++) ordering += M(k); - discreteBayesNet = *discrete_fg.eliminateSequential(ordering, EliminateForMPE); + discreteBayesNet = + *discrete_fg.eliminateSequential(ordering, EliminateForMPE); } // Create ordering. @@ -678,19 +677,13 @@ TEST(HybridFactorGraph, DefaultDecisionTree) { Pose2 odometry(2.0, 0.0, 0.0); KeyVector contKeys = {X(0), X(1)}; auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); - auto still = boost::make_shared(X(0), X(1), Pose2(0, 0, - 0), + auto still = boost::make_shared(X(0), X(1), Pose2(0, 0, 0), noise_model), moving = boost::make_shared(X(0), X(1), odometry, noise_model); std::vector motion_models = {still, moving}; - //TODO(Varun) Make a templated constructor for MixtureFactor which does this? - std::vector components; - for (auto&& f : motion_models) { - components.push_back(boost::dynamic_pointer_cast(f)); - } fg.emplace_hybrid( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, motion_models); // Add Range-Bearing measurements to from X0 to L0 and X1 to L1. // create a noise model for the landmark measurements From aa486586264e082c872f9277092590060fea30f8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 12 Aug 2022 16:18:40 -0400 Subject: [PATCH 56/74] more tests running --- gtsam/hybrid/tests/testHybridIncremental.cpp | 95 ++++++++++---------- 1 file changed, 46 insertions(+), 49 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridIncremental.cpp b/gtsam/hybrid/tests/testHybridIncremental.cpp index 9afdb98863..eec260cbc1 100644 --- a/gtsam/hybrid/tests/testHybridIncremental.cpp +++ b/gtsam/hybrid/tests/testHybridIncremental.cpp @@ -116,12 +116,6 @@ TEST(HybridGaussianElimination, IncrementalInference) { graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2) graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) - //TODO(Varun) we cannot enforce ordering - // // Create ordering. - // Ordering ordering1; - // ordering1 += X(1); - // ordering1 += X(2); - // Run update step isam.update(graph1); @@ -133,14 +127,7 @@ TEST(HybridGaussianElimination, IncrementalInference) { graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3) graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2) - //TODO(Varun) we cannot enforce ordering - // // Create ordering. - // Ordering ordering2; - // ordering2 += X(2); - // ordering2 += X(3); - isam.update(graph2); - GTSAM_PRINT(isam); /********************************************************/ // Run batch elimination so we can compare results. @@ -150,68 +137,78 @@ TEST(HybridGaussianElimination, IncrementalInference) { ordering += X(3); // Now we calculate the actual factors using full elimination - HybridBayesNet::shared_ptr expectedHybridBayesNet; + HybridBayesTree::shared_ptr expectedHybridBayesTree; HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph; - std::tie(expectedHybridBayesNet, expectedRemainingGraph) = - switching.linearizedFactorGraph.eliminatePartialSequential(ordering); + std::tie(expectedHybridBayesTree, expectedRemainingGraph) = + switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); // The densities on X(1) should be the same auto x1_conditional = dynamic_pointer_cast(isam[X(1)]->conditional()->inner()); - EXPECT( - assert_equal(*x1_conditional, *(expectedHybridBayesNet->atGaussian(0)))); + auto actual_x1_conditional = dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); + EXPECT(assert_equal(*x1_conditional, *actual_x1_conditional)); // The densities on X(2) should be the same auto x2_conditional = dynamic_pointer_cast(isam[X(2)]->conditional()->inner()); - EXPECT( - assert_equal(*x2_conditional, *(expectedHybridBayesNet->atGaussian(1)))); - - // // The densities on X(3) should be the same - // auto x3_conditional = - // dynamic_pointer_cast(isam[X(3)]->conditional()->inner()); - // EXPECT( - // assert_equal(*x3_conditional, *(expectedHybridBayesNet->atGaussian(2)))); - - GTSAM_PRINT(*expectedHybridBayesNet); - - // we only do the manual continuous elimination for 0,0 - // the other discrete probabilities on M(2) are calculated the same way + auto actual_x2_conditional = dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); + EXPECT(assert_equal(*x2_conditional, *actual_x2_conditional)); + + // The densities on X(3) should be the same + auto x3_conditional = + dynamic_pointer_cast(isam[X(3)]->conditional()->inner()); + auto actual_x3_conditional = dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); + EXPECT(assert_equal(*x3_conditional, *actual_x3_conditional)); + + // We only perform manual continuous elimination for 0,0. + // The other discrete probabilities on M(2) are calculated the same way auto m00_prob = [&]() { GaussianFactorGraph gf; - // gf.add(switching.linearizedFactorGraph.gaussianGraph().at(3)); + auto x2_prior = boost::dynamic_pointer_cast( + switching.linearizedFactorGraph.at(3))->inner(); + gf.add(x2_prior); DiscreteValues m00; m00[M(1)] = 0, m00[M(2)] = 0; - // auto dcMixture = - // dynamic_pointer_cast(graph2.dcGraph().at(0)); - // gf.add(dcMixture->factors()(m00)); - // auto x2_mixed = - // boost::dynamic_pointer_cast(hybridBayesNet.at(1)); - // gf.add(x2_mixed->factors()(m00)); + // P(X2, X3 | M2) + auto dcMixture = + dynamic_pointer_cast(graph2.at(0)); + gf.add(dcMixture->factors()(m00)); + + auto x2_mixed = + boost::dynamic_pointer_cast(isam[X(2)]->conditional()->inner()); + // Perform explicit cast so we can add the conditional to `gf`. + auto x2_cond = boost::dynamic_pointer_cast( + x2_mixed->conditionals()(m00)); + gf.add(x2_cond); + auto result_gf = gf.eliminateSequential(); return gf.probPrime(result_gf->optimize()); }(); - /// Test if the probability values are as expected with regression tests. -// DiscreteValues assignment; -// EXPECT(assert_equal(m00_prob, 0.60656, 1e-5)); -// assignment[M(1)] = 0; -// assignment[M(2)] = 0; -// EXPECT(assert_equal(m00_prob, (*discreteFactor)(assignment), 1e-5)); + auto discreteConditional = isam[M(1)]->conditional()->asDiscreteConditional(); + // Test if the probability values are as expected with regression tests. + // DiscreteValues assignment; + // EXPECT(assert_equal(m00_prob, 0.60656, 1e-5)); + // assignment[M(1)] = 0; + // assignment[M(2)] = 0; + // EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5)); // assignment[M(1)] = 1; // assignment[M(2)] = 0; -// EXPECT(assert_equal(0.612477, (*discreteFactor)(assignment), 1e-5)); +// EXPECT(assert_equal(0.612477, (*discreteConditional)(assignment), 1e-5)); // assignment[M(1)] = 0; // assignment[M(2)] = 1; -// EXPECT(assert_equal(0.999952, (*discreteFactor)(assignment), 1e-5)); +// EXPECT(assert_equal(0.999952, (*discreteConditional)(assignment), 1e-5)); // assignment[M(1)] = 1; // assignment[M(2)] = 1; -// EXPECT(assert_equal(1.0, (*discreteFactor)(assignment), 1e-5)); +// EXPECT(assert_equal(1.0, (*discreteConditional)(assignment), 1e-5)); // DiscreteFactorGraph dfg; -// dfg.add(*discreteFactor); -// dfg.add(discreteFactor_m1); +// dfg.add(*discreteConditional); +// dfg.add(discreteConditional_m1); // dfg.add_factors(switching.linearizedFactorGraph.discreteGraph()); // // Check if the chordal graph generated from incremental elimination From 77bea319ddc727d41fffb8f26fc1907cbaeae61d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 13 Aug 2022 15:11:21 -0400 Subject: [PATCH 57/74] one more test passing --- gtsam/hybrid/tests/testHybridIncremental.cpp | 117 +++++++++---------- 1 file changed, 53 insertions(+), 64 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridIncremental.cpp b/gtsam/hybrid/tests/testHybridIncremental.cpp index eec260cbc1..7ab64462c6 100644 --- a/gtsam/hybrid/tests/testHybridIncremental.cpp +++ b/gtsam/hybrid/tests/testHybridIncremental.cpp @@ -18,7 +18,9 @@ #include #include +#include #include +#include #include #include #include @@ -108,7 +110,7 @@ TEST(HybridGaussianElimination, IncrementalInference) { // Create initial factor graph // * * * // | | | - // *- X1 -*- X2 -*- X3 + // X1 -*- X2 -*- X3 // | | // *-M1 - * - M2 graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1) @@ -119,6 +121,10 @@ TEST(HybridGaussianElimination, IncrementalInference) { // Run update step isam.update(graph1); + auto discreteConditional_m1 = + isam[M(1)]->conditional()->asDiscreteConditional(); + EXPECT(discreteConditional_m1->keys() == KeyVector({M(1)})); + /********************************************************/ // New factor graph for incremental update. HybridGaussianFactorGraph graph2; @@ -165,65 +171,48 @@ TEST(HybridGaussianElimination, IncrementalInference) { // We only perform manual continuous elimination for 0,0. // The other discrete probabilities on M(2) are calculated the same way - auto m00_prob = [&]() { - GaussianFactorGraph gf; - auto x2_prior = boost::dynamic_pointer_cast( - switching.linearizedFactorGraph.at(3))->inner(); - gf.add(x2_prior); - - DiscreteValues m00; - m00[M(1)] = 0, m00[M(2)] = 0; - // P(X2, X3 | M2) - auto dcMixture = - dynamic_pointer_cast(graph2.at(0)); - gf.add(dcMixture->factors()(m00)); - - auto x2_mixed = - boost::dynamic_pointer_cast(isam[X(2)]->conditional()->inner()); - // Perform explicit cast so we can add the conditional to `gf`. - auto x2_cond = boost::dynamic_pointer_cast( - x2_mixed->conditionals()(m00)); - gf.add(x2_cond); - - auto result_gf = gf.eliminateSequential(); - return gf.probPrime(result_gf->optimize()); - }(); - - auto discreteConditional = isam[M(1)]->conditional()->asDiscreteConditional(); + Ordering discrete_ordering; + discrete_ordering += M(1); + discrete_ordering += M(2); + HybridBayesTree::shared_ptr discreteBayesTree = + expectedRemainingGraph->eliminateMultifrontal(discrete_ordering); + + DiscreteValues m00; + m00[M(1)] = 0, m00[M(2)] = 0; + DiscreteConditional decisionTree = *(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional(); + double m00_prob = decisionTree(m00); + + auto discreteConditional = isam[M(2)]->conditional()->asDiscreteConditional(); + // Test if the probability values are as expected with regression tests. - // DiscreteValues assignment; - // EXPECT(assert_equal(m00_prob, 0.60656, 1e-5)); - // assignment[M(1)] = 0; - // assignment[M(2)] = 0; - // EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5)); -// assignment[M(1)] = 1; -// assignment[M(2)] = 0; -// EXPECT(assert_equal(0.612477, (*discreteConditional)(assignment), 1e-5)); -// assignment[M(1)] = 0; -// assignment[M(2)] = 1; -// EXPECT(assert_equal(0.999952, (*discreteConditional)(assignment), 1e-5)); -// assignment[M(1)] = 1; -// assignment[M(2)] = 1; -// EXPECT(assert_equal(1.0, (*discreteConditional)(assignment), 1e-5)); - -// DiscreteFactorGraph dfg; -// dfg.add(*discreteConditional); -// dfg.add(discreteConditional_m1); -// dfg.add_factors(switching.linearizedFactorGraph.discreteGraph()); - -// // Check if the chordal graph generated from incremental elimination -// matches -// // that of batch elimination. -// auto chordal = dfg.eliminateSequential(); -// auto expectedChordal = -// expectedRemainingGraph->discreteGraph().eliminateSequential(); - -// EXPECT(assert_equal(*expectedChordal, *chordal, 1e-6)); + DiscreteValues assignment; + EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5)); + assignment[M(1)] = 0; + assignment[M(2)] = 0; + EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5)); + assignment[M(1)] = 1; + assignment[M(2)] = 0; + EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5)); + assignment[M(1)] = 0; + assignment[M(2)] = 1; + EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5)); + assignment[M(1)] = 1; + assignment[M(2)] = 1; + EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5)); + + // Check if the clique conditional generated from incremental elimination matches + // that of batch elimination. + auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal(); + auto expectedConditional = dynamic_pointer_cast( + (*expectedChordal)[M(2)]->conditional()->inner()); + auto actualConditional = dynamic_pointer_cast( + isam[M(2)]->conditional()->inner()); + EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6)); } /* ****************************************************************************/ -// // Test if we can approximately do the inference -// TEST(DCGaussianElimination, Approx_inference) { +// Test if we can approximately do the inference +TEST(HybridGaussianElimination, Approx_inference) { // Switching switching(4); // IncrementalHybrid incrementalHybrid; // HybridGaussianFactorGraph graph1; @@ -339,11 +328,11 @@ TEST(HybridGaussianElimination, IncrementalInference) { // *lastDensity(assignment))); // } // } -// } +} /* ****************************************************************************/ -// // Test approximate inference with an additional pruning step. -// TEST(DCGaussianElimination, Incremental_approximate) { +// Test approximate inference with an additional pruning step. +TEST(HybridGaussianElimination, Incremental_approximate) { // Switching switching(5); // IncrementalHybrid incrementalHybrid; // HybridGaussianFactorGraph graph1; @@ -395,12 +384,12 @@ TEST(HybridGaussianElimination, IncrementalInference) { // CHECK_EQUAL(2, actualBayesNet.size()); // EXPECT_LONGS_EQUAL(10, actualBayesNet.atGaussian(0)->nrComponents()); // EXPECT_LONGS_EQUAL(5, actualBayesNet.atGaussian(1)->nrComponents()); -// } +} /* ************************************************************************/ -// // Test for figuring out the optimal ordering to ensure we get -// // a discrete graph after elimination. -// TEST(IncrementalHybrid, NonTrivial) { +// Test for figuring out the optimal ordering to ensure we get +// a discrete graph after elimination. +TEST(IncrementalHybrid, NonTrivial) { // // This is a GTSAM-only test for running inference on a single legged // robot. // // The leg links are represented by the chain X-Y-Z-W, where X is the base @@ -637,7 +626,7 @@ TEST(HybridGaussianElimination, IncrementalInference) { // auto lastConditional = boost::dynamic_pointer_cast( // inc.hybridBayesNet().at(inc.hybridBayesNet().size() - 1)); // EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); -// } +} /* ************************************************************************* */ int main() { From 58068503f4e79869a46c4ba0ce1aa5b5e7048e86 Mon Sep 17 00:00:00 2001 From: sjxue Date: Sun, 14 Aug 2022 17:39:35 -0400 Subject: [PATCH 58/74] HybridValues and optimize() method for hybrid gaussian bayes net --- gtsam/hybrid/HybridBayesNet.cpp | 13 + gtsam/hybrid/HybridBayesNet.h | 9 +- gtsam/hybrid/HybridLookupDAG.cpp | 73 +++++ gtsam/hybrid/HybridLookupDAG.h | 118 ++++++++ gtsam/hybrid/HybridValues.h | 138 +++++++++ gtsam/hybrid/hybrid.i | 17 ++ .../tests/testGaussianHybridFactorGraph.cpp | 24 ++ gtsam/hybrid/tests/testHybridLookupDAG.cpp | 276 ++++++++++++++++++ gtsam/hybrid/tests/testHybridValues.cpp | 59 ++++ 9 files changed, 726 insertions(+), 1 deletion(-) create mode 100644 gtsam/hybrid/HybridLookupDAG.cpp create mode 100644 gtsam/hybrid/HybridLookupDAG.h create mode 100644 gtsam/hybrid/HybridValues.h create mode 100644 gtsam/hybrid/tests/testHybridLookupDAG.cpp create mode 100644 gtsam/hybrid/tests/testHybridValues.cpp diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 1292711d89..fc0b93c5c9 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -10,7 +10,20 @@ * @file HybridBayesNet.cpp * @brief A bayes net of Gaussian Conditionals indexed by discrete keys. * @author Fan Jiang + * @author Shangjie Xue * @date January 2022 */ #include +#include +#include + +namespace gtsam { + +/* *******************************************************************************/ +HybridValues HybridBayesNet::optimize() const { + auto dag = HybridLookupDAG::FromBayesNet(*this); + return dag.argmax(); +} + +} // namespace gtsam \ No newline at end of file diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 43eead2801..5665ce5c99 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -18,6 +18,7 @@ #pragma once #include +#include #include namespace gtsam { @@ -34,8 +35,14 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { using shared_ptr = boost::shared_ptr; using sharedConditional = boost::shared_ptr; - /** Construct empty bayes net */ + /// Construct empty bayes net HybridBayesNet() = default; + + /// Destructor + virtual ~HybridBayesNet() {} + + /// Solve the HybridBayesNet by back-substitution. + HybridValues optimize() const; }; } // namespace gtsam diff --git a/gtsam/hybrid/HybridLookupDAG.cpp b/gtsam/hybrid/HybridLookupDAG.cpp new file mode 100644 index 0000000000..7232309f4d --- /dev/null +++ b/gtsam/hybrid/HybridLookupDAG.cpp @@ -0,0 +1,73 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file DiscreteLookupDAG.cpp + * @date Aug, 2022 + * @author Shangjie Xue + */ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +using std::pair; +using std::vector; + +namespace gtsam { + + + +/* ************************************************************************** */ +void HybridLookupTable::argmaxInPlace(HybridValues* values) const { + // For discrete conditional, uses argmaxInPlace() method in DiscreteLookupTable. + if (isDiscrete()){ + boost::static_pointer_cast(inner_)->argmaxInPlace(&(values->discrete)); + } else if (isContinuous()){ + // For Gaussian conditional, uses solve() method in GaussianConditional. + values->continuous.insert(boost::static_pointer_cast(inner_)->solve(values->continuous)); + } else if (isHybrid()){ + // For hybrid conditional, since children should not contain discrete variable, we can condition on + // the discrete variable in the parents and solve the resulting GaussianConditional. + auto conditional = boost::static_pointer_cast(inner_)->conditionals()(values->discrete); + values->continuous.insert(conditional->solve(values->continuous)); + } +} + + +// /* ************************************************************************** */ +HybridLookupDAG HybridLookupDAG::FromBayesNet( + const HybridBayesNet& bayesNet) { + HybridLookupDAG dag; + for (auto&& conditional : bayesNet) { + HybridLookupTable hlt(*conditional); + dag.push_back(hlt); + } + return dag; +} + +HybridValues HybridLookupDAG::argmax(HybridValues result) const { + // Argmax each node in turn in topological sort order (parents first). + for (auto lookupTable : boost::adaptors::reverse(*this)) + lookupTable->argmaxInPlace(&result); + return result; +} +/* ************************************************************************** */ + +} // namespace gtsam \ No newline at end of file diff --git a/gtsam/hybrid/HybridLookupDAG.h b/gtsam/hybrid/HybridLookupDAG.h new file mode 100644 index 0000000000..903cc55190 --- /dev/null +++ b/gtsam/hybrid/HybridLookupDAG.h @@ -0,0 +1,118 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridLookupDAG.h + * @date Aug, 2022 + * @author Shangjie Xue + */ + +#pragma once + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +namespace gtsam { + +/** + * @brief HybridLookupTable table for max-product + * + * Similar to DiscreteLookupTable, inherits from hybrid conditional for convenience. + * Is used in the max-product algorithm. + */ +class GTSAM_EXPORT HybridLookupTable : public HybridConditional { + public: + using Base = HybridConditional; + using This = HybridLookupTable; + using shared_ptr = boost::shared_ptr; + using BaseConditional = Conditional; + + /** + * @brief Construct a new Hybrid Lookup Table object form a HybridConditional. + * + * @param conditional input hybrid conditional + */ + HybridLookupTable(HybridConditional& conditional) : Base(conditional){}; + + /** + * @brief Calculate assignment for frontal variables that maximizes value. + * @param (in/out) parentsValues Known assignments for the parents. + */ + void argmaxInPlace(HybridValues* parentsValues) const; +}; + +/** A DAG made from hybrid lookup tables, as defined above. Similar to DiscreteLookupDAG */ +class GTSAM_EXPORT HybridLookupDAG : public BayesNet { + public: + using Base = BayesNet; + using This = HybridLookupDAG; + using shared_ptr = boost::shared_ptr; + + /// @name Standard Constructors + /// @{ + + /// Construct empty DAG. + HybridLookupDAG() {} + + /// Create from BayesNet with LookupTables + static HybridLookupDAG FromBayesNet(const HybridBayesNet& bayesNet); + + /// Destructor + virtual ~HybridLookupDAG() {} + + /// @} + + /// @name Standard Interface + /// @{ + + /** Add a DiscreteLookupTable */ + template + void add(Args&&... args) { + emplace_shared(std::forward(args)...); + } + + /** + * @brief argmax by back-substitution, optionally given certain variables. + * + * Assumes the DAG is reverse topologically sorted, i.e. last + * conditional will be optimized first *and* that the + * DAG does not contain any conditionals for the given variables. If the DAG + * resulted from eliminating a factor graph, this is true for the elimination + * ordering. + * + * @return given assignment extended w. optimal assignment for all variables. + */ + HybridValues argmax(HybridValues given = HybridValues()) const; + /// @} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE& ar, const unsigned int /*version*/) { + ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + } +}; + +// traits +template <> +struct traits : public Testable {}; + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridValues.h b/gtsam/hybrid/HybridValues.h new file mode 100644 index 0000000000..98f862279a --- /dev/null +++ b/gtsam/hybrid/HybridValues.h @@ -0,0 +1,138 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridValues.h + * @date Jul 28, 2022 + * @author Shangjie Xue + */ + +#pragma once + +#include +#include +#include +#include +#include +#include + + +#include +#include +#include + +namespace gtsam { + +/** + * HybridValues represents a collection of DiscreteValues and VectorValues. It is typically used to store the variables + * of a HybridGaussianFactorGraph. Optimizing a HybridGaussianBayesNet returns this class. + */ +class GTSAM_EXPORT HybridValues { + public: + // DiscreteValue stored the discrete components of the HybridValues. + DiscreteValues discrete; + + // VectorValue stored the continuous components of the HybridValues. + VectorValues continuous; + + // Default constructor creates an empty HybridValues. + HybridValues() : discrete(), continuous() {}; + + // Construct from DiscreteValues and VectorValues. + HybridValues(const DiscreteValues &dv, const VectorValues &cv) : discrete(dv), continuous(cv) {}; + + // print required by Testable for unit testing + void print(const std::string& s = "HybridValues", + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const{ + std::cout << s << ": \n"; + discrete.print(" Discrete", keyFormatter); + continuous.print(" Continuous", keyFormatter); + }; + + // equals required by Testable for unit testing + bool equals(const HybridValues& other, double tol = 1e-9) const { + return discrete.equals(other.discrete, tol) && continuous.equals(other.continuous, tol); + } + + // Check whether a variable with key \c j exists in DiscreteValue. + bool existsDiscrete(Key j){ + return (discrete.find(j) != discrete.end()); + }; + + // Check whether a variable with key \c j exists in VectorValue. + bool existsVector(Key j){ + return continuous.exists(j); + }; + + // Check whether a variable with key \c j exists. + bool exists(Key j){ + return existsDiscrete(j) || existsVector(j); + }; + + /** Insert a discrete \c value with key \c j. Replaces the existing value if the key \c + * j is already used. + * @param value The vector to be inserted. + * @param j The index with which the value will be associated. */ + void insert(Key j, int value){ + discrete[j] = value; + }; + + /** Insert a vector \c value with key \c j. Throws an invalid_argument exception if the key \c + * j is already used. + * @param value The vector to be inserted. + * @param j The index with which the value will be associated. */ + void insert(Key j, const Vector& value) { + continuous.insert(j, value); + } + + // TODO(Shangjie)- update() and insert_or_assign() , similar to Values.h + + /** + * Read/write access to the discrete value with key \c j, throws + * std::out_of_range if \c j does not exist. + */ + size_t& atDiscrete(Key j){ + return discrete.at(j); + }; + + /** + * Read/write access to the vector value with key \c j, throws + * std::out_of_range if \c j does not exist. + */ + Vector& at(Key j) { + return continuous.at(j); + }; + + + /// @name Wrapper support + /// @{ + + /** + * @brief Output as a html table. + * + * @param keyFormatter function that formats keys. + * @return string html output. + */ + std::string html(const KeyFormatter& keyFormatter = DefaultKeyFormatter) const{ + std::stringstream ss; + ss << this->discrete.html(keyFormatter); + ss << this->continuous.html(keyFormatter); + return ss.str(); + }; + + /// @} +}; + +// traits +template <> +struct traits : public Testable {}; + +} // namespace gtsam diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index bbe1e2400d..207f3ff635 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -4,6 +4,22 @@ namespace gtsam { +#include +class HybridValues { + gtsam::DiscreteValues discrete; + gtsam::VectorValues continuous; + HybridValues(); + HybridValues(const gtsam::DiscreteValues &dv, const gtsam::VectorValues &cv); + void print(string s = "HybridValues", + const gtsam::KeyFormatter& keyFormatter = + gtsam::DefaultKeyFormatter) const; + bool equals(const gtsam::HybridValues& other, double tol) const; + void insert(gtsam::Key j, int value); + void insert(gtsam::Key j, const gtsam::Vector& value); + size_t& atDiscrete(gtsam::Key j); + gtsam::Vector& at(gtsam::Key j); +}; + #include virtual class HybridFactor { void print(string s = "HybridFactor\n", @@ -84,6 +100,7 @@ class HybridBayesNet { size_t size() const; gtsam::KeySet keys() const; const gtsam::HybridConditional* at(size_t i) const; + gtsam::HybridValues optimize() const; void print(string s = "HybridBayesNet\n", const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const; diff --git a/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp b/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp index 552bb18f59..b1dbda836b 100644 --- a/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp @@ -17,6 +17,7 @@ #include #include +#include #include #include #include @@ -30,6 +31,7 @@ #include #include #include +#include #include #include #include @@ -585,6 +587,28 @@ TEST(HybridGaussianFactorGraph, SwitchingTwoVar) { } } +TEST(HybridGaussianFactorGraph, optimize) { + HybridGaussianFactorGraph hfg; + + DiscreteKey c1(C(1), 2); + + hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); + hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); + + DecisionTree dt( + C(1), boost::make_shared(X(1), I_3x3, Z_3x1), + boost::make_shared(X(1), I_3x3, Vector3::Ones())); + + hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); + + auto result = + hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {C(1)})); + + HybridValues hv = result->optimize(); + + EXPECT(assert_equal(hv.atDiscrete(C(1)), int(0))); + +} /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/hybrid/tests/testHybridLookupDAG.cpp b/gtsam/hybrid/tests/testHybridLookupDAG.cpp new file mode 100644 index 0000000000..70b09ecbe9 --- /dev/null +++ b/gtsam/hybrid/tests/testHybridLookupDAG.cpp @@ -0,0 +1,276 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridLookupDAG.cpp + * @date Aug, 2022 + * @author Shangjie Xue + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// Include for test suite +#include + +#include + +using namespace std; +using namespace gtsam; +using noiseModel::Isotropic; +using symbol_shorthand::M; +using symbol_shorthand::X; + +TEST(HybridLookupTable, basics) { + // create a conditional gaussian node + Matrix S1(2, 2); + S1(0, 0) = 1; + S1(1, 0) = 2; + S1(0, 1) = 3; + S1(1, 1) = 4; + + Matrix S2(2, 2); + S2(0, 0) = 6; + S2(1, 0) = 0.2; + S2(0, 1) = 8; + S2(1, 1) = 0.4; + + Matrix R1(2, 2); + R1(0, 0) = 0.1; + R1(1, 0) = 0.3; + R1(0, 1) = 0.0; + R1(1, 1) = 0.34; + + Matrix R2(2, 2); + R2(0, 0) = 0.1; + R2(1, 0) = 0.3; + R2(0, 1) = 0.0; + R2(1, 1) = 0.34; + + SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector2(1.0, 0.34)); + + Vector2 d1(0.2, 0.5), d2(0.5, 0.2); + + auto conditional0 = boost::make_shared(X(1), d1, R1, + X(2), S1, model), + conditional1 = boost::make_shared(X(1), d2, R2, + X(2), S2, model); + + // Create decision tree + DiscreteKey m1(1, 2); + GaussianMixture::Conditionals conditionals( + {m1}, + vector{conditional0, conditional1}); +// GaussianMixture mixtureFactor2({X(1)}, {X(2)}, {m1}, conditionals); + + boost::shared_ptr mixtureFactor(new GaussianMixture({X(1)}, {X(2)}, {m1}, conditionals)); + + HybridConditional hc(mixtureFactor); + + GaussianMixture::Conditionals conditional2 = boost::static_pointer_cast(hc.inner())->conditionals(); + + DiscreteValues dv; + dv[1]=1; + + VectorValues cv; + cv.insert(X(2),Vector2(0.0, 0.0)); + + HybridValues hv(dv, cv); + + + + // std::cout << conditional2(values).markdown(); + EXPECT(assert_equal(*conditional2(dv), *conditionals(dv), 1e-6)); + EXPECT(conditional2(dv)==conditionals(dv)); + HybridLookupTable hlt(hc); + +// hlt.argmaxInPlace(&hv); + + HybridLookupDAG dag; + dag.push_back(hlt); + dag.argmax(hv); + +// HybridBayesNet hbn; +// hbn.push_back(hc); +// hbn.optimize(); + +} + +TEST(HybridLookupTable, hybrid_argmax) { + Matrix S1(2, 2); + S1(0, 0) = 1; + S1(1, 0) = 0; + S1(0, 1) = 0; + S1(1, 1) = 1; + + Vector2 d1(0.2, 0.5), d2(-0.5,0.6); + + SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector2(1.0, 0.34)); + + auto conditional0 = boost::make_shared(X(1), d1, S1, model), + conditional1 = boost::make_shared(X(1), d2, S1, model); + + DiscreteKey m1(1, 2); + GaussianMixture::Conditionals conditionals( + {m1}, + vector{conditional0, conditional1}); + boost::shared_ptr mixtureFactor(new GaussianMixture({X(1)},{}, {m1}, conditionals)); + + HybridConditional hc(mixtureFactor); + + DiscreteValues dv; + dv[1]=1; + VectorValues cv; + // cv.insert(X(2),Vector2(0.0, 0.0)); + HybridValues hv(dv, cv); + + HybridLookupTable hlt(hc); + + hlt.argmaxInPlace(&hv); + + EXPECT(assert_equal(hv.at(X(1)), d2)); + + +} + +TEST(HybridLookupTable, discrete_argmax) { + DiscreteKey X(0, 2), Y(1, 2); + + auto conditional = boost::make_shared(X | Y = "0/1 3/2"); + + HybridConditional hc(conditional); + + HybridLookupTable hlt(hc); + + DiscreteValues dv; + dv[1]=0; + VectorValues cv; + // cv.insert(X(2),Vector2(0.0, 0.0)); + HybridValues hv(dv, cv); + + + hlt.argmaxInPlace(&hv); + + EXPECT(assert_equal(hv.atDiscrete(0), 1)); + + DecisionTreeFactor f1(X , "2 3"); + auto conditional2 = boost::make_shared(1,f1); + + HybridConditional hc2(conditional2); + + HybridLookupTable hlt2(hc2); + + HybridValues hv2; + + + hlt2.argmaxInPlace(&hv2); + + EXPECT(assert_equal(hv2.atDiscrete(0), 1)); +} + +TEST(HybridLookupTable, gaussian_argmax) { + Matrix S1(2, 2); + S1(0, 0) = 1; + S1(1, 0) = 0; + S1(0, 1) = 0; + S1(1, 1) = 1; + + Vector2 d1(0.2, 0.5), d2(-0.5,0.6); + + SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector2(1.0, 0.34)); + + auto conditional = boost::make_shared(X(1), d1, S1, + X(2), -S1, model); + + HybridConditional hc(conditional); + + HybridLookupTable hlt(hc); + + DiscreteValues dv; + // dv[1]=0; + VectorValues cv; + cv.insert(X(2),d2); + HybridValues hv(dv, cv); + + + hlt.argmaxInPlace(&hv); + + EXPECT(assert_equal(hv.at(X(1)), d1+d2)); + +} + +TEST(HybridLookupDAG, argmax) { + + Matrix S1(2, 2); + S1(0, 0) = 1; + S1(1, 0) = 0; + S1(0, 1) = 0; + S1(1, 1) = 1; + + Vector2 d1(0.2, 0.5), d2(-0.5,0.6); + + SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector2(1.0, 0.34)); + + auto conditional0 = boost::make_shared(X(2), d1, S1, model), + conditional1 = boost::make_shared(X(2), d2, S1, model); + + DiscreteKey m1(1, 2); + GaussianMixture::Conditionals conditionals( + {m1}, + vector{conditional0, conditional1}); + boost::shared_ptr mixtureFactor(new GaussianMixture({X(2)},{}, {m1}, conditionals)); + HybridConditional hc2(mixtureFactor); + HybridLookupTable hlt2(hc2); + + + auto conditional2 = boost::make_shared(X(1), d1, S1, + X(2), -S1, model); + + HybridConditional hc1(conditional2); + HybridLookupTable hlt1(hc1); + + DecisionTreeFactor f1(m1 , "2 3"); + auto discrete_conditional = boost::make_shared(1,f1); + + HybridConditional hc3(discrete_conditional); + HybridLookupTable hlt3(hc3); + + HybridLookupDAG dag; + dag.push_back(hlt1); + dag.push_back(hlt2); + dag.push_back(hlt3); + auto hv = dag.argmax(); + + EXPECT(assert_equal(hv.atDiscrete(1), 1)); + EXPECT(assert_equal(hv.at(X(2)), d2)); + EXPECT(assert_equal(hv.at(X(1)), d2+d1)); +} + + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ \ No newline at end of file diff --git a/gtsam/hybrid/tests/testHybridValues.cpp b/gtsam/hybrid/tests/testHybridValues.cpp new file mode 100644 index 0000000000..3e821aef2f --- /dev/null +++ b/gtsam/hybrid/tests/testHybridValues.cpp @@ -0,0 +1,59 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridValues.cpp + * @date Jul 28, 2022 + * @author Shangjie Xue + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// Include for test suite +#include + + +using namespace std; +using namespace gtsam; + +TEST(HybridValues, basics) { + HybridValues values; + values.insert(99, Vector2(2, 3)); + values.insert(100, 3); + EXPECT(assert_equal(values.at(99), Vector2(2, 3))); + EXPECT(assert_equal(values.atDiscrete(100), int(3))); + + values.print(); + + HybridValues values2; + values2.insert(100, 3); + values2.insert(99, Vector2(2, 3)); + EXPECT(assert_equal(values2, values)); + + values2.insert(98, Vector2(2,3)); + EXPECT(!assert_equal(values2, values)); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ \ No newline at end of file From 7d36a9eb98803bc4712917bee86583fec50ad030 Mon Sep 17 00:00:00 2001 From: sjxue Date: Sun, 14 Aug 2022 21:08:31 -0400 Subject: [PATCH 59/74] add some comments --- gtsam/hybrid/HybridBayesNet.h | 1 + gtsam/hybrid/HybridValues.h | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index c376c9a86f..b19528120a 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -64,6 +64,7 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { GaussianBayesNet choose(const DiscreteValues &assignment) const; /// Solve the HybridBayesNet by back-substitution. + /// TODO(Shangjie) do we need to create a HybridGaussianBayesNet class, and put this method there? HybridValues optimize() const; }; diff --git a/gtsam/hybrid/HybridValues.h b/gtsam/hybrid/HybridValues.h index 98f862279a..89f7bb58a7 100644 --- a/gtsam/hybrid/HybridValues.h +++ b/gtsam/hybrid/HybridValues.h @@ -53,8 +53,8 @@ class GTSAM_EXPORT HybridValues { void print(const std::string& s = "HybridValues", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const{ std::cout << s << ": \n"; - discrete.print(" Discrete", keyFormatter); - continuous.print(" Continuous", keyFormatter); + discrete.print(" Discrete", keyFormatter); // print discrete components + continuous.print(" Continuous", keyFormatter); //print continuous components }; // equals required by Testable for unit testing From 2a974a4ca7cdb2769db27007bd7c551ea37f7044 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 15 Aug 2022 08:49:51 -0400 Subject: [PATCH 60/74] Address review comments --- gtsam/hybrid/HybridGaussianFactor.h | 4 --- gtsam/hybrid/HybridNonlinearFactor.h | 1 - gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 6 ---- gtsam/hybrid/HybridNonlinearFactorGraph.h | 8 ----- .../tests/testHybridNonlinearFactorGraph.cpp | 31 ++++++++++++++++++- 5 files changed, 30 insertions(+), 20 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 7ffe39e794..e645e63e50 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -39,10 +39,6 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { HybridGaussianFactor() = default; - HybridGaussianFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys) - : Base(continuousKeys, discreteKeys) {} - // Explicit conversion from a shared ptr of GF explicit HybridGaussianFactor(GaussianFactor::shared_ptr other); diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 4a0c0fd9c1..7776347b36 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -39,7 +39,6 @@ class HybridNonlinearFactor : public HybridFactor { // Explicit conversion from a shared ptr of NonlinearFactor explicit HybridNonlinearFactor(const NonlinearFactor::shared_ptr &other); - public: /// @name Testable /// @{ diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index 233badc550..a6abce15aa 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -42,12 +42,6 @@ void HybridNonlinearFactorGraph::print(const std::string& s, } } -/* ************************************************************************* */ -bool HybridNonlinearFactorGraph::equals(const HybridNonlinearFactorGraph& other, - double tol) const { - return false; -} - /* ************************************************************************* */ HybridGaussianFactorGraph HybridNonlinearFactorGraph::linearize( const Values& continuousValues) const { diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.h b/gtsam/hybrid/HybridNonlinearFactorGraph.h index 073581da2c..2ddb8bcea2 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.h +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.h @@ -109,11 +109,6 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { } } - // /// Add a nonlinear factor to the factor graph. - // void add(NonlinearFactor&& factor) { - // Base::add(boost::make_shared(std::move(factor))); - // } - /// Add a nonlinear factor as a shared ptr. void add(boost::shared_ptr factor); @@ -122,9 +117,6 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { const std::string& s = "HybridNonlinearFactorGraph", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override; - /// Check if this factor graph is equal to `other`. - bool equals(const HybridNonlinearFactorGraph& other, double tol = 1e-9) const; - /** * @brief Linearize all the continuous factors in the * HybridNonlinearFactorGraph. diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 814492686c..a3debb3a99 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -68,6 +68,30 @@ TEST(HybridFactorGraph, GaussianFactorGraph) { EXPECT_LONGS_EQUAL(2, ghfg.size()); } +/*************************************************************************** + * Test equality for Hybrid Nonlinear Factor Graph. + */ +TEST(HybridNonlinearFactorGraph, Equals) { + HybridNonlinearFactorGraph graph1; + HybridNonlinearFactorGraph graph2; + + // Test empty factor graphs + EXPECT(assert_equal(graph1, graph2)); + + auto f0 = boost::make_shared>( + 1, Pose2(), noiseModel::Isotropic::Sigma(3, 0.001)); + graph1.push_back(f0); + graph2.push_back(f0); + + auto f1 = boost::make_shared>( + 1, 2, Pose2(), noiseModel::Isotropic::Sigma(3, 0.1)); + graph1.push_back(f1); + graph2.push_back(f1); + + // Test non-empty graphs + EXPECT(assert_equal(graph1, graph2)); +} + /*************************************************************************** * Test that the resize method works correctly for a HybridNonlinearFactorGraph. */ @@ -682,8 +706,13 @@ TEST(HybridFactorGraph, DefaultDecisionTree) { moving = boost::make_shared(X(0), X(1), odometry, noise_model); std::vector motion_models = {still, moving}; + // TODO(Varun) Make a templated constructor for MixtureFactor which does this? + std::vector components; + for (auto&& f : motion_models) { + components.push_back(boost::dynamic_pointer_cast(f)); + } fg.emplace_hybrid( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, motion_models); + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); // Add Range-Bearing measurements to from X0 to L0 and X1 to L1. // create a noise model for the landmark measurements From ac20cff710fbecd0305625ed1d63575c8d84dce9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 16 Aug 2022 17:23:52 -0400 Subject: [PATCH 61/74] add incremental pruning to HybridGaussianISAM --- gtsam/hybrid/GaussianMixture.cpp | 27 +- gtsam/hybrid/GaussianMixture.h | 12 +- gtsam/hybrid/HybridGaussianISAM.cpp | 54 +- gtsam/hybrid/HybridGaussianISAM.h | 10 + gtsam/hybrid/tests/testHybridIncremental.cpp | 817 +++++++++---------- 5 files changed, 503 insertions(+), 417 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index b04800d21e..6816dfbf67 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -119,11 +119,36 @@ void GaussianMixture::print(const std::string &s, "", [&](Key k) { return formatter(k); }, [&](const GaussianConditional::shared_ptr &gf) -> std::string { RedirectCout rd; - if (!gf->empty()) + if (gf && !gf->empty()) gf->print("", formatter); else return {"nullptr"}; return rd.str(); }); } + +/* *******************************************************************************/ +void GaussianMixture::prune(const DecisionTreeFactor &decisionTree) { + // Functional which loops over all assignments and create a set of + // GaussianConditionals + auto pruner = [&decisionTree]( + const Assignment &choices, + const GaussianConditional::shared_ptr &conditional) + -> GaussianConditional::shared_ptr { + // typecast so we can use this to get probability value + DiscreteValues values(choices); + + if (decisionTree(values) == 0.0) { + // empty aka null pointer + boost::shared_ptr null; + return null; + } else { + return conditional; + } + }; + + auto pruned_conditionals = conditionals_.apply(pruner); + conditionals_.root_ = pruned_conditionals.root_; +} + } // namespace gtsam diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index 49aa31c4c4..75deb4d553 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -21,6 +21,7 @@ #include #include +#include #include #include #include @@ -121,7 +122,7 @@ class GTSAM_EXPORT GaussianMixture /// Test equality with base HybridFactor bool equals(const HybridFactor &lf, double tol = 1e-9) const override; - /* print utility */ + /// Print utility void print( const std::string &s = "GaussianMixture\n", const KeyFormatter &formatter = DefaultKeyFormatter) const override; @@ -131,6 +132,15 @@ class GTSAM_EXPORT GaussianMixture /// Getter for the underlying Conditionals DecisionTree const Conditionals &conditionals(); + /** + * @brief Prune the decision tree of Gaussian factors as per the discrete + * `decisionTree`. + * + * @param decisionTree A pruned decision tree of discrete keys where the + * leaves are probabilities. + */ + void prune(const DecisionTreeFactor &decisionTree); + /** * @brief Merge the Gaussian Factor Graphs in `this` and `sum` while * maintaining the decision tree structure. diff --git a/gtsam/hybrid/HybridGaussianISAM.cpp b/gtsam/hybrid/HybridGaussianISAM.cpp index 7783a88ddc..df78037221 100644 --- a/gtsam/hybrid/HybridGaussianISAM.cpp +++ b/gtsam/hybrid/HybridGaussianISAM.cpp @@ -41,6 +41,7 @@ HybridGaussianISAM::HybridGaussianISAM(const HybridBayesTree& bayesTree) void HybridGaussianISAM::updateInternal( const HybridGaussianFactorGraph& newFactors, HybridBayesTree::Cliques* orphans, + const boost::optional& ordering, const HybridBayesTree::Eliminate& function) { // Remove the contaminated part of the Bayes tree BayesNetType bn; @@ -78,12 +79,19 @@ void HybridGaussianISAM::updateInternal( // Get an ordering where the new keys are eliminated last const VariableIndex index(factors); - const Ordering ordering = Ordering::ColamdConstrainedLast( - index, KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()), - true); + Ordering elimination_ordering; + if (ordering) { + elimination_ordering = *ordering; + } else { + elimination_ordering = Ordering::ColamdConstrainedLast( + index, + KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()), + true); + } // eliminate all factors (top, added, orphans) into a new Bayes tree - auto bayesTree = factors.eliminateMultifrontal(ordering, function, index); + HybridBayesTree::shared_ptr bayesTree = + factors.eliminateMultifrontal(elimination_ordering, function, index); // Re-add into Bayes tree data structures this->roots_.insert(this->roots_.end(), bayesTree->roots().begin(), @@ -93,9 +101,45 @@ void HybridGaussianISAM::updateInternal( /* ************************************************************************* */ void HybridGaussianISAM::update(const HybridGaussianFactorGraph& newFactors, + const boost::optional& ordering, const HybridBayesTree::Eliminate& function) { Cliques orphans; - this->updateInternal(newFactors, &orphans, function); + this->updateInternal(newFactors, &orphans, ordering, function); +} + +void HybridGaussianISAM::prune(const Key& root, const size_t maxNrLeaves) { + auto decisionTree = boost::dynamic_pointer_cast( + this->clique(root)->conditional()->inner()); + DecisionTreeFactor prunedDiscreteFactor = decisionTree->prune(maxNrLeaves); + decisionTree->root_ = prunedDiscreteFactor.root_; + + std::vector prunedKeys; + for (auto&& clique : nodes()) { + // The cliques can be repeated for each frontal so we record it in + // prunedKeys and check if we have already pruned a particular clique. + if (std::find(prunedKeys.begin(), prunedKeys.end(), clique.first) != + prunedKeys.end()) { + continue; + } + + // Add all the keys of the current clique to be pruned to prunedKeys + for (auto&& key : clique.second->conditional()->frontals()) { + prunedKeys.push_back(key); + } + + // Convert parents() to a KeyVector for comparison + KeyVector parents; + for (auto&& parent : clique.second->conditional()->parents()) { + parents.push_back(parent); + } + + if (parents == decisionTree->keys()) { + auto gaussianMixture = boost::dynamic_pointer_cast( + clique.second->conditional()->inner()); + + gaussianMixture->prune(prunedDiscreteFactor); + } + } } } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianISAM.h b/gtsam/hybrid/HybridGaussianISAM.h index d5b6271da7..4fc206582d 100644 --- a/gtsam/hybrid/HybridGaussianISAM.h +++ b/gtsam/hybrid/HybridGaussianISAM.h @@ -48,6 +48,7 @@ class GTSAM_EXPORT HybridGaussianISAM : public ISAM { void updateInternal( const HybridGaussianFactorGraph& newFactors, HybridBayesTree::Cliques* orphans, + const boost::optional& ordering = boost::none, const HybridBayesTree::Eliminate& function = HybridBayesTree::EliminationTraitsType::DefaultEliminate); @@ -59,8 +60,17 @@ class GTSAM_EXPORT HybridGaussianISAM : public ISAM { * @param function Elimination function. */ void update(const HybridGaussianFactorGraph& newFactors, + const boost::optional& ordering = boost::none, const HybridBayesTree::Eliminate& function = HybridBayesTree::EliminationTraitsType::DefaultEliminate); + + /** + * @brief + * + * @param root The root key in the discrete conditional decision tree. + * @param maxNumberLeaves + */ + void prune(const Key& root, const size_t maxNumberLeaves); }; /// traits diff --git a/gtsam/hybrid/tests/testHybridIncremental.cpp b/gtsam/hybrid/tests/testHybridIncremental.cpp index 7ab64462c6..afd0d0a70d 100644 --- a/gtsam/hybrid/tests/testHybridIncremental.cpp +++ b/gtsam/hybrid/tests/testHybridIncremental.cpp @@ -52,10 +52,10 @@ TEST(HybridGaussianElimination, IncrementalElimination) { HybridGaussianFactorGraph graph1; // Create initial factor graph - // * * * - // | | | - // *- X1 -*- X2 -*- X3 - // \*-M1-*/ + // * * * + // | | | + // X1 -*- X2 -*- X3 + // \*-M1-*/ graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1) graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1) graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2) @@ -179,7 +179,8 @@ TEST(HybridGaussianElimination, IncrementalInference) { DiscreteValues m00; m00[M(1)] = 0, m00[M(2)] = 0; - DiscreteConditional decisionTree = *(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional(); + DiscreteConditional decisionTree = + *(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional(); double m00_prob = decisionTree(m00); auto discreteConditional = isam[M(2)]->conditional()->asDiscreteConditional(); @@ -200,8 +201,8 @@ TEST(HybridGaussianElimination, IncrementalInference) { assignment[M(2)] = 1; EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5)); - // Check if the clique conditional generated from incremental elimination matches - // that of batch elimination. + // Check if the clique conditional generated from incremental elimination + // matches that of batch elimination. auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal(); auto expectedConditional = dynamic_pointer_cast( (*expectedChordal)[M(2)]->conditional()->inner()); @@ -213,419 +214,415 @@ TEST(HybridGaussianElimination, IncrementalInference) { /* ****************************************************************************/ // Test if we can approximately do the inference TEST(HybridGaussianElimination, Approx_inference) { -// Switching switching(4); -// IncrementalHybrid incrementalHybrid; -// HybridGaussianFactorGraph graph1; - -// // Add the 3 DC factors, x1-x2, x2-x3, x3-x4 -// for (size_t i = 0; i < 3; i++) { -// graph1.push_back(switching.linearizedFactorGraph.dcGraph().at(i)); -// } - -// // Add the Gaussian factors, 1 prior on X(1), 4 measurements -// for (size_t i = 0; i <= 4; i++) { -// graph1.push_back(switching.linearizedFactorGraph.gaussianGraph().at(i)); -// } - -// // Create ordering. -// Ordering ordering; -// for (size_t j = 1; j <= 4; j++) { -// ordering += X(j); -// } - -// // Now we calculate the actual factors using full elimination -// HybridBayesNet::shared_ptr unprunedHybridBayesNet; -// HybridGaussianFactorGraph::shared_ptr unprunedRemainingGraph; -// std::tie(unprunedHybridBayesNet, unprunedRemainingGraph) = -// switching.linearizedFactorGraph.eliminatePartialSequential(ordering); - -// size_t maxComponents = 5; -// incrementalHybrid.update(graph1, ordering, maxComponents); - -// /* -// unpruned factor is: -// Choice(m3) -// 0 Choice(m2) -// 0 0 Choice(m1) -// 0 0 0 Leaf 0.2248 - -// 0 0 1 Leaf 0.3715 - -// 0 1 Choice(m1) -// 0 1 0 Leaf 0.3742 * -// 0 1 1 Leaf 0.6125 * -// 1 Choice(m2) -// 1 0 Choice(m1) -// 1 0 0 Leaf 0.3706 - -// 1 0 1 Leaf 0.6124 * -// 1 1 Choice(m1) -// 1 1 0 Leaf 0.611 * -// 1 1 1 Leaf 1 * - -// pruned factors is: -// Choice(m3) -// 0 Choice(m2) -// 0 0 Leaf 0 -// 0 1 Choice(m1) -// 0 1 0 Leaf 0.3742 -// 0 1 1 Leaf 0.6125 -// 1 Choice(m2) -// 1 0 Choice(m1) -// 1 0 0 Leaf 0 -// 1 0 1 Leaf 0.6124 -// 1 1 Choice(m1) -// 1 1 0 Leaf 0.611 -// 1 1 1 Leaf 1 -// */ - -// // Test that the remaining factor graph has one -// // DecisionTreeFactor on {M3, M2, M1}. -// auto remainingFactorGraph = incrementalHybrid.remainingFactorGraph(); -// EXPECT_LONGS_EQUAL(1, remainingFactorGraph.size()); - -// auto discreteFactor_m1 = *dynamic_pointer_cast( -// incrementalHybrid.remainingDiscreteGraph().at(0)); -// EXPECT(discreteFactor_m1.keys() == KeyVector({M(3), M(2), M(1)})); - -// // Get the number of elements which are equal to 0. -// auto count = [](const double &value, int count) { -// return value > 0 ? count + 1 : count; -// }; -// // Check that the number of leaves after pruning is 5. -// EXPECT_LONGS_EQUAL(5, discreteFactor_m1.fold(count, 0)); - -// /* Expected hybrid Bayes net -// * factor 0: [x1 | x2 m1 ], 2 components -// * factor 1: [x2 | x3 m2 m1 ], 4 components -// * factor 2: [x3 | x4 m3 m2 m1 ], 8 components -// * factor 3: [x4 | m3 m2 m1 ], 8 components -// */ -// auto hybridBayesNet = incrementalHybrid.hybridBayesNet(); - -// // Check if we have a bayes net with 4 hybrid nodes, -// // each with 2, 4, 5 (pruned), and 5 (pruned) leaves respetively. -// EXPECT_LONGS_EQUAL(4, hybridBayesNet.size()); -// EXPECT_LONGS_EQUAL(2, hybridBayesNet.atGaussian(0)->nrComponents()); -// EXPECT_LONGS_EQUAL(4, hybridBayesNet.atGaussian(1)->nrComponents()); -// EXPECT_LONGS_EQUAL(5, hybridBayesNet.atGaussian(2)->nrComponents()); -// EXPECT_LONGS_EQUAL(5, hybridBayesNet.atGaussian(3)->nrComponents()); - -// // Check that the hybrid nodes of the bayes net match those of the bayes -// net -// // before pruning, at the same positions. -// auto &lastDensity = *(hybridBayesNet.atGaussian(3)); -// auto &unprunedLastDensity = *(unprunedHybridBayesNet->atGaussian(3)); -// std::vector> assignments = -// discreteFactor_m1.enumerate(); -// // Loop over all assignments and check the pruned components -// for (auto &&av : assignments) { -// const DiscreteValues &assignment = av.first; -// const double value = av.second; - -// if (value == 0.0) { -// EXPECT(lastDensity(assignment) == nullptr); -// } else { -// CHECK(lastDensity(assignment)); -// EXPECT(assert_equal(*unprunedLastDensity(assignment), -// *lastDensity(assignment))); -// } -// } + Switching switching(4); + HybridGaussianISAM incrementalHybrid; + HybridGaussianFactorGraph graph1; + + // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4 + for (size_t i = 1; i < 4; i++) { + graph1.push_back(switching.linearizedFactorGraph.at(i)); + } + + // Add the Gaussian factors, 1 prior on X(1), + // 3 measurements on X(2), X(3), X(4) + graph1.push_back(switching.linearizedFactorGraph.at(0)); + for (size_t i = 4; i <= 7; i++) { + graph1.push_back(switching.linearizedFactorGraph.at(i)); + } + + // Create ordering. + Ordering ordering; + for (size_t j = 1; j <= 4; j++) { + ordering += X(j); + } + + // Now we calculate the actual factors using full elimination + HybridBayesTree::shared_ptr unprunedHybridBayesTree; + HybridGaussianFactorGraph::shared_ptr unprunedRemainingGraph; + std::tie(unprunedHybridBayesTree, unprunedRemainingGraph) = + switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); + + size_t maxNrLeaves = 5; + incrementalHybrid.update(graph1); + + incrementalHybrid.prune(M(3), maxNrLeaves); + + /* + unpruned factor is: + Choice(m3) + 0 Choice(m2) + 0 0 Choice(m1) + 0 0 0 Leaf 0.11267528 + 0 0 1 Leaf 0.18576102 + 0 1 Choice(m1) + 0 1 0 Leaf 0.18754662 + 0 1 1 Leaf 0.30623871 + 1 Choice(m2) + 1 0 Choice(m1) + 1 0 0 Leaf 0.18576102 + 1 0 1 Leaf 0.30622428 + 1 1 Choice(m1) + 1 1 0 Leaf 0.30623871 + 1 1 1 Leaf 0.5 + + pruned factors is: + Choice(m3) + 0 Choice(m2) + 0 0 Leaf 0 + 0 1 Choice(m1) + 0 1 0 Leaf 0.18754662 + 0 1 1 Leaf 0.30623871 + 1 Choice(m2) + 1 0 Choice(m1) + 1 0 0 Leaf 0 + 1 0 1 Leaf 0.30622428 + 1 1 Choice(m1) + 1 1 0 Leaf 0.30623871 + 1 1 1 Leaf 0.5 + */ + + auto discreteConditional_m1 = *dynamic_pointer_cast( + incrementalHybrid[M(1)]->conditional()->inner()); + EXPECT(discreteConditional_m1.keys() == KeyVector({M(1), M(2), M(3)})); + + // Get the number of elements which are greater than 0. + auto count = [](const double &value, int count) { + return value > 0 ? count + 1 : count; + }; + // Check that the number of leaves after pruning is 5. + EXPECT_LONGS_EQUAL(5, discreteConditional_m1.fold(count, 0)); + + // Check that the hybrid nodes of the bayes net match those of the pre-pruning + // bayes net, at the same positions. + auto &unprunedLastDensity = *dynamic_pointer_cast( + unprunedHybridBayesTree->clique(X(4))->conditional()->inner()); + auto &lastDensity = *dynamic_pointer_cast( + incrementalHybrid[X(4)]->conditional()->inner()); + + std::vector> assignments = + discreteConditional_m1.enumerate(); + // Loop over all assignments and check the pruned components + for (auto &&av : assignments) { + const DiscreteValues &assignment = av.first; + const double value = av.second; + + if (value == 0.0) { + EXPECT(lastDensity(assignment) == nullptr); + } else { + CHECK(lastDensity(assignment)); + EXPECT(assert_equal(*unprunedLastDensity(assignment), + *lastDensity(assignment))); + } + } } /* ****************************************************************************/ // Test approximate inference with an additional pruning step. TEST(HybridGaussianElimination, Incremental_approximate) { -// Switching switching(5); -// IncrementalHybrid incrementalHybrid; -// HybridGaussianFactorGraph graph1; - -// // Add the 3 DC factors, x1-x2, x2-x3, x3-x4 -// for (size_t i = 0; i < 3; i++) { -// graph1.push_back(switching.linearizedFactorGraph.dcGraph().at(i)); -// } - -// // Add the Gaussian factors, 1 prior on X(1), 4 measurements -// for (size_t i = 0; i <= 4; i++) { -// graph1.push_back(switching.linearizedFactorGraph.gaussianGraph().at(i)); -// } - -// // Create ordering. -// Ordering ordering; -// for (size_t j = 1; j <= 4; j++) { -// ordering += X(j); -// } - -// // Run update with pruning -// size_t maxComponents = 5; -// incrementalHybrid.update(graph1, ordering, maxComponents); - -// // Check if we have a bayes net with 4 hybrid nodes, -// // each with 2, 4, 8, and 5 (pruned) leaves respetively. -// auto actualBayesNet1 = incrementalHybrid.hybridBayesNet(); -// CHECK_EQUAL(4, actualBayesNet1.size()); -// EXPECT_LONGS_EQUAL(2, actualBayesNet1.atGaussian(0)->nrComponents()); -// EXPECT_LONGS_EQUAL(4, actualBayesNet1.atGaussian(1)->nrComponents()); -// EXPECT_LONGS_EQUAL(8, actualBayesNet1.atGaussian(2)->nrComponents()); -// EXPECT_LONGS_EQUAL(5, actualBayesNet1.atGaussian(3)->nrComponents()); - -// /***** Run Round 2 *****/ -// HybridGaussianFactorGraph graph2; -// graph2.push_back(switching.linearizedFactorGraph.dcGraph().at(3)); -// graph2.push_back(switching.linearizedFactorGraph.gaussianGraph().at(5)); - -// Ordering ordering2; -// ordering2 += X(4); -// ordering2 += X(5); - -// // Run update with pruning a second time. -// incrementalHybrid.update(graph2, ordering2, maxComponents); - -// // Check if we have a bayes net with 2 hybrid nodes, -// // each with 10 (pruned), and 5 (pruned) leaves respetively. -// auto actualBayesNet = incrementalHybrid.hybridBayesNet(); -// CHECK_EQUAL(2, actualBayesNet.size()); -// EXPECT_LONGS_EQUAL(10, actualBayesNet.atGaussian(0)->nrComponents()); -// EXPECT_LONGS_EQUAL(5, actualBayesNet.atGaussian(1)->nrComponents()); + Switching switching(5); + HybridGaussianISAM incrementalHybrid; + HybridGaussianFactorGraph graph1; + + /***** Run Round 1 *****/ + // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4 + for (size_t i = 1; i < 4; i++) { + graph1.push_back(switching.linearizedFactorGraph.at(i)); + } + + // Add the Gaussian factors, 1 prior on X(1), + // 3 measurements on X(2), X(3), X(4) + graph1.push_back(switching.linearizedFactorGraph.at(0)); + for (size_t i = 5; i <= 7; i++) { + graph1.push_back(switching.linearizedFactorGraph.at(i)); + } + + // Create ordering. + Ordering ordering; + for (size_t j = 1; j <= 4; j++) { + ordering += X(j); + } + + // Run update with pruning + size_t maxComponents = 5; + incrementalHybrid.update(graph1); + incrementalHybrid.prune(M(3), maxComponents); + + // Check if we have a bayes tree with 4 hybrid nodes, + // each with 2, 4, 8, and 5 (pruned) leaves respetively. + EXPECT_LONGS_EQUAL(4, incrementalHybrid.size()); + EXPECT_LONGS_EQUAL( + 2, incrementalHybrid[X(1)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 4, incrementalHybrid[X(2)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents()); + + /***** Run Round 2 *****/ + HybridGaussianFactorGraph graph2; + graph2.push_back(switching.linearizedFactorGraph.at(4)); + graph2.push_back(switching.linearizedFactorGraph.at(8)); + + Ordering ordering2; + ordering2 += X(4); + ordering2 += X(5); + + // Run update with pruning a second time. + incrementalHybrid.update(graph2); + incrementalHybrid.prune(M(4), maxComponents); + + // Check if we have a bayes tree with pruned hybrid nodes, + // with 5 (pruned) leaves. + CHECK_EQUAL(5, incrementalHybrid.size()); + EXPECT_LONGS_EQUAL( + 5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 5, incrementalHybrid[X(5)]->conditional()->asMixture()->nrComponents()); } /* ************************************************************************/ // Test for figuring out the optimal ordering to ensure we get // a discrete graph after elimination. -TEST(IncrementalHybrid, NonTrivial) { -// // This is a GTSAM-only test for running inference on a single legged -// robot. -// // The leg links are represented by the chain X-Y-Z-W, where X is the base -// and -// // W is the foot. -// // We use BetweenFactor as constraints between each of the poses. - -// /*************** Run Round 1 ***************/ -// NonlinearHybridFactorGraph fg; - -// // Add a prior on pose x1 at the origin. -// // A prior factor consists of a mean and -// // a noise model (covariance matrix) -// Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin -// auto priorNoise = noiseModel::Diagonal::Sigmas( -// Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta -// fg.emplace_nonlinear>(X(0), prior, priorNoise); - -// // create a noise model for the landmark measurements -// auto poseNoise = noiseModel::Isotropic::Sigma(3, 0.1); - -// // We model a robot's single leg as X - Y - Z - W -// // where X is the base link and W is the foot link. - -// // Add connecting poses similar to PoseFactors in GTD -// fg.emplace_nonlinear>(X(0), Y(0), Pose2(0, 1.0, 0), -// poseNoise); -// fg.emplace_nonlinear>(Y(0), Z(0), Pose2(0, 1.0, 0), -// poseNoise); -// fg.emplace_nonlinear>(Z(0), W(0), Pose2(0, 1.0, 0), -// poseNoise); - -// // Create initial estimate -// Values initial; -// initial.insert(X(0), Pose2(0.0, 0.0, 0.0)); -// initial.insert(Y(0), Pose2(0.0, 1.0, 0.0)); -// initial.insert(Z(0), Pose2(0.0, 2.0, 0.0)); -// initial.insert(W(0), Pose2(0.0, 3.0, 0.0)); - -// HybridGaussianFactorGraph gfg = fg.linearize(initial); -// fg = NonlinearHybridFactorGraph(); - -// IncrementalHybrid inc; - -// // Regular ordering going up the chain. -// Ordering ordering; -// ordering += W(0); -// ordering += Z(0); -// ordering += Y(0); -// ordering += X(0); - -// // Update without pruning -// // The result is a HybridBayesNet with no discrete variables -// // (equivalent to a GaussianBayesNet). -// // Factorization is: -// // `P(X | measurements) = P(W0|Z0) P(Z0|Y0) P(Y0|X0) P(X0)` -// inc.update(gfg, ordering); - -// /*************** Run Round 2 ***************/ -// using PlanarMotionModel = BetweenFactor; - -// // Add odometry factor with discrete modes. -// Pose2 odometry(1.0, 0.0, 0.0); -// KeyVector contKeys = {W(0), W(1)}; -// auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); -// auto still = boost::make_shared(W(0), W(1), Pose2(0, 0, -// 0), -// noise_model), -// moving = boost::make_shared(W(0), W(1), odometry, -// noise_model); -// std::vector components = {moving, still}; -// auto dcFactor = boost::make_shared>( -// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); -// fg.push_back(dcFactor); - -// // Add equivalent of ImuFactor -// fg.emplace_nonlinear>(X(0), X(1), Pose2(1.0, 0.0, 0), -// poseNoise); -// // PoseFactors-like at k=1 -// fg.emplace_nonlinear>(X(1), Y(1), Pose2(0, 1, 0), -// poseNoise); -// fg.emplace_nonlinear>(Y(1), Z(1), Pose2(0, 1, 0), -// poseNoise); -// fg.emplace_nonlinear>(Z(1), W(1), Pose2(-1, 1, 0), -// poseNoise); - -// initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); -// initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); -// initial.insert(Z(1), Pose2(1.0, 2.0, 0.0)); -// // The leg link did not move so we set the expected pose accordingly. -// initial.insert(W(1), Pose2(0.0, 3.0, 0.0)); - -// // Ordering for k=1. -// // This ordering follows the intuition that we eliminate the previous -// // timestep, and then the current timestep. -// ordering = Ordering(); -// ordering += W(0); -// ordering += Z(0); -// ordering += Y(0); -// ordering += X(0); -// ordering += W(1); -// ordering += Z(1); -// ordering += Y(1); -// ordering += X(1); - -// gfg = fg.linearize(initial); -// fg = NonlinearHybridFactorGraph(); - -// // Update without pruning -// // The result is a HybridBayesNet with 1 discrete variable M(1). -// // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) -// // P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1, -// M1) -// // P(Y1 | X1, M1)P(X1 | M1)P(M1) -// // The MHS tree is a 2 level tree for time indices (0, 1) with 2 leaves. -// inc.update(gfg, ordering); - -// /*************** Run Round 3 ***************/ -// // Add odometry factor with discrete modes. -// contKeys = {W(1), W(2)}; -// still = boost::make_shared(W(1), W(2), Pose2(0, 0, 0), -// noise_model); -// moving = -// boost::make_shared(W(1), W(2), odometry, -// noise_model); -// components = {moving, still}; -// dcFactor = boost::make_shared>( -// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); -// fg.push_back(dcFactor); - -// // Add equivalent of ImuFactor -// fg.emplace_nonlinear>(X(1), X(2), Pose2(1.0, 0.0, 0), -// poseNoise); -// // PoseFactors-like at k=1 -// fg.emplace_nonlinear>(X(2), Y(2), Pose2(0, 1, 0), -// poseNoise); -// fg.emplace_nonlinear>(Y(2), Z(2), Pose2(0, 1, 0), -// poseNoise); -// fg.emplace_nonlinear>(Z(2), W(2), Pose2(-2, 1, 0), -// poseNoise); - -// initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); -// initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); -// initial.insert(Z(2), Pose2(2.0, 2.0, 0.0)); -// initial.insert(W(2), Pose2(0.0, 3.0, 0.0)); - -// // Ordering at k=2. Same intuition as before. -// ordering = Ordering(); -// ordering += W(1); -// ordering += Z(1); -// ordering += Y(1); -// ordering += X(1); -// ordering += W(2); -// ordering += Z(2); -// ordering += Y(2); -// ordering += X(2); - -// gfg = fg.linearize(initial); -// fg = NonlinearHybridFactorGraph(); - -// // Now we prune! -// // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) -// P(X0 | X1, W1, M1) -// // P(W1|W2, Z1, X1, M1, M2) P(Z1| W2, Y1, X1, M1, M2) -// P(Y1 | W2, X1, M1, M2) -// // P(X1 | W2, X2, M1, M2) P(W2|Z2, X2, M1, M2) -// P(Z2|Y2, X2, M1, M2) -// // P(Y2 | X2, M1, M2)P(X2 | M1, M2) P(M1, M2) -// // The MHS at this point should be a 3 level tree on (0, 1, 2). -// // 0 has 2 choices, 1 has 4 choices and 2 has 4 choices. -// inc.update(gfg, ordering, 2); - -// /*************** Run Round 4 ***************/ -// // Add odometry factor with discrete modes. -// contKeys = {W(2), W(3)}; -// still = boost::make_shared(W(2), W(3), Pose2(0, 0, 0), -// noise_model); -// moving = -// boost::make_shared(W(2), W(3), odometry, -// noise_model); -// components = {moving, still}; -// dcFactor = boost::make_shared>( -// contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); -// fg.push_back(dcFactor); - -// // Add equivalent of ImuFactor -// fg.emplace_nonlinear>(X(2), X(3), Pose2(1.0, 0.0, 0), -// poseNoise); -// // PoseFactors-like at k=3 -// fg.emplace_nonlinear>(X(3), Y(3), Pose2(0, 1, 0), -// poseNoise); -// fg.emplace_nonlinear>(Y(3), Z(3), Pose2(0, 1, 0), -// poseNoise); -// fg.emplace_nonlinear>(Z(3), W(3), Pose2(-3, 1, 0), -// poseNoise); - -// initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); -// initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); -// initial.insert(Z(3), Pose2(3.0, 2.0, 0.0)); -// initial.insert(W(3), Pose2(0.0, 3.0, 0.0)); - -// // Ordering at k=3. Same intuition as before. -// ordering = Ordering(); -// ordering += W(2); -// ordering += Z(2); -// ordering += Y(2); -// ordering += X(2); -// ordering += W(3); -// ordering += Z(3); -// ordering += Y(3); -// ordering += X(3); - -// gfg = fg.linearize(initial); -// fg = NonlinearHybridFactorGraph(); - -// // Keep pruning! -// inc.update(gfg, ordering, 3); - -// // The final discrete graph should not be empty since we have eliminated -// // all continuous variables. -// EXPECT(!inc.remainingDiscreteGraph().empty()); - -// // Test if the optimal discrete mode assignment is (1, 1, 1). -// DiscreteValues optimal_assignment = -// inc.remainingDiscreteGraph().optimize(); DiscreteValues -// expected_assignment; expected_assignment[M(1)] = 1; -// expected_assignment[M(2)] = 1; -// expected_assignment[M(3)] = 1; -// EXPECT(assert_equal(expected_assignment, optimal_assignment)); - -// // Test if pruning worked correctly by checking that we only have 3 leaves -// in -// // the last node. -// auto lastConditional = boost::dynamic_pointer_cast( -// inc.hybridBayesNet().at(inc.hybridBayesNet().size() - 1)); -// EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); +TEST(HybridGaussianISAM, NonTrivial) { + // This is a GTSAM-only test for running inference on a single legged robot. + // The leg links are represented by the chain X-Y-Z-W, where X is the base and + // W is the foot. + // We use BetweenFactor as constraints between each of the poses. + + /*************** Run Round 1 ***************/ + HybridNonlinearFactorGraph fg; + + // // Add a prior on pose x1 at the origin. + // // A prior factor consists of a mean and + // // a noise model (covariance matrix) + // Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin + // auto priorNoise = noiseModel::Diagonal::Sigmas( + // Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta + // fg.emplace_nonlinear>(X(0), prior, priorNoise); + + // // create a noise model for the landmark measurements + // auto poseNoise = noiseModel::Isotropic::Sigma(3, 0.1); + + // // We model a robot's single leg as X - Y - Z - W + // // where X is the base link and W is the foot link. + + // // Add connecting poses similar to PoseFactors in GTD + // fg.emplace_nonlinear>(X(0), Y(0), Pose2(0, 1.0, 0), + // poseNoise); + // fg.emplace_nonlinear>(Y(0), Z(0), Pose2(0, 1.0, 0), + // poseNoise); + // fg.emplace_nonlinear>(Z(0), W(0), Pose2(0, 1.0, 0), + // poseNoise); + + // // Create initial estimate + // Values initial; + // initial.insert(X(0), Pose2(0.0, 0.0, 0.0)); + // initial.insert(Y(0), Pose2(0.0, 1.0, 0.0)); + // initial.insert(Z(0), Pose2(0.0, 2.0, 0.0)); + // initial.insert(W(0), Pose2(0.0, 3.0, 0.0)); + + // HybridGaussianFactorGraph gfg = fg.linearize(initial); + // fg = HybridNonlinearFactorGraph(); + + // HybridGaussianISAM inc; + + // // Regular ordering going up the chain. + // Ordering ordering; + // ordering += W(0); + // ordering += Z(0); + // ordering += Y(0); + // ordering += X(0); + + // // Update without pruning + // // The result is a HybridBayesNet with no discrete variables + // // (equivalent to a GaussianBayesNet). + // // Factorization is: + // // `P(X | measurements) = P(W0|Z0) P(Z0|Y0) P(Y0|X0) P(X0)` + // inc.update(gfg, ordering); + + // /*************** Run Round 2 ***************/ + // using PlanarMotionModel = BetweenFactor; + + // // Add odometry factor with discrete modes. + // Pose2 odometry(1.0, 0.0, 0.0); + // KeyVector contKeys = {W(0), W(1)}; + // auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); + // auto still = boost::make_shared(W(0), W(1), Pose2(0, + // 0, 0), + // noise_model), + // moving = boost::make_shared(W(0), W(1), odometry, + // noise_model); + // std::vector components = {moving, still}; + // auto dcFactor = boost::make_shared>( + // contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + // fg.push_back(dcFactor); + + // // Add equivalent of ImuFactor + // fg.emplace_nonlinear>(X(0), X(1), Pose2(1.0, 0.0, + // 0), + // poseNoise); + // // PoseFactors-like at k=1 + // fg.emplace_nonlinear>(X(1), Y(1), Pose2(0, 1, 0), + // poseNoise); + // fg.emplace_nonlinear>(Y(1), Z(1), Pose2(0, 1, 0), + // poseNoise); + // fg.emplace_nonlinear>(Z(1), W(1), Pose2(-1, 1, 0), + // poseNoise); + + // initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); + // initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); + // initial.insert(Z(1), Pose2(1.0, 2.0, 0.0)); + // // The leg link did not move so we set the expected pose accordingly. + // initial.insert(W(1), Pose2(0.0, 3.0, 0.0)); + + // // Ordering for k=1. + // // This ordering follows the intuition that we eliminate the previous + // // timestep, and then the current timestep. + // ordering = Ordering(); + // ordering += W(0); + // ordering += Z(0); + // ordering += Y(0); + // ordering += X(0); + // ordering += W(1); + // ordering += Z(1); + // ordering += Y(1); + // ordering += X(1); + + // gfg = fg.linearize(initial); + // fg = HybridNonlinearFactorGraph(); + + // // Update without pruning + // // The result is a HybridBayesNet with 1 discrete variable M(1). + // // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, + // M1) + // // P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1, + // M1) + // // P(Y1 | X1, M1)P(X1 | M1)P(M1) + // // The MHS tree is a 2 level tree for time indices (0, 1) with 2 leaves. + // inc.update(gfg, ordering); + + // /*************** Run Round 3 ***************/ + // // Add odometry factor with discrete modes. + // contKeys = {W(1), W(2)}; + // still = boost::make_shared(W(1), W(2), Pose2(0, 0, 0), + // noise_model); + // moving = + // boost::make_shared(W(1), W(2), odometry, + // noise_model); + // components = {moving, still}; + // dcFactor = boost::make_shared>( + // contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); + // fg.push_back(dcFactor); + + // // Add equivalent of ImuFactor + // fg.emplace_nonlinear>(X(1), X(2), Pose2(1.0, 0.0, + // 0), + // poseNoise); + // // PoseFactors-like at k=1 + // fg.emplace_nonlinear>(X(2), Y(2), Pose2(0, 1, 0), + // poseNoise); + // fg.emplace_nonlinear>(Y(2), Z(2), Pose2(0, 1, 0), + // poseNoise); + // fg.emplace_nonlinear>(Z(2), W(2), Pose2(-2, 1, 0), + // poseNoise); + + // initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); + // initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); + // initial.insert(Z(2), Pose2(2.0, 2.0, 0.0)); + // initial.insert(W(2), Pose2(0.0, 3.0, 0.0)); + + // // Ordering at k=2. Same intuition as before. + // ordering = Ordering(); + // ordering += W(1); + // ordering += Z(1); + // ordering += Y(1); + // ordering += X(1); + // ordering += W(2); + // ordering += Z(2); + // ordering += Y(2); + // ordering += X(2); + + // gfg = fg.linearize(initial); + // fg = HybridNonlinearFactorGraph(); + + // // Now we prune! + // // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, + // M1) P(X0 | X1, W1, M1) + // // P(W1|W2, Z1, X1, M1, M2) P(Z1| W2, Y1, X1, M1, + // M2) P(Y1 | W2, X1, M1, M2) + // // P(X1 | W2, X2, M1, M2) P(W2|Z2, X2, M1, M2) + // P(Z2|Y2, X2, M1, M2) + // // P(Y2 | X2, M1, M2)P(X2 | M1, M2) P(M1, M2) + // // The MHS at this point should be a 3 level tree on (0, 1, 2). + // // 0 has 2 choices, 1 has 4 choices and 2 has 4 choices. + // inc.update(gfg, ordering, 2); + + // /*************** Run Round 4 ***************/ + // // Add odometry factor with discrete modes. + // contKeys = {W(2), W(3)}; + // still = boost::make_shared(W(2), W(3), Pose2(0, 0, 0), + // noise_model); + // moving = + // boost::make_shared(W(2), W(3), odometry, + // noise_model); + // components = {moving, still}; + // dcFactor = boost::make_shared>( + // contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); + // fg.push_back(dcFactor); + + // // Add equivalent of ImuFactor + // fg.emplace_nonlinear>(X(2), X(3), Pose2(1.0, 0.0, + // 0), + // poseNoise); + // // PoseFactors-like at k=3 + // fg.emplace_nonlinear>(X(3), Y(3), Pose2(0, 1, 0), + // poseNoise); + // fg.emplace_nonlinear>(Y(3), Z(3), Pose2(0, 1, 0), + // poseNoise); + // fg.emplace_nonlinear>(Z(3), W(3), Pose2(-3, 1, 0), + // poseNoise); + + // initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); + // initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); + // initial.insert(Z(3), Pose2(3.0, 2.0, 0.0)); + // initial.insert(W(3), Pose2(0.0, 3.0, 0.0)); + + // // Ordering at k=3. Same intuition as before. + // ordering = Ordering(); + // ordering += W(2); + // ordering += Z(2); + // ordering += Y(2); + // ordering += X(2); + // ordering += W(3); + // ordering += Z(3); + // ordering += Y(3); + // ordering += X(3); + + // gfg = fg.linearize(initial); + // fg = HybridNonlinearFactorGraph(); + + // // Keep pruning! + // inc.update(gfg, ordering, 3); + + // // The final discrete graph should not be empty since we have eliminated + // // all continuous variables. + // EXPECT(!inc.remainingDiscreteGraph().empty()); + + // // Test if the optimal discrete mode assignment is (1, 1, 1). + // DiscreteValues optimal_assignment = + // inc.remainingDiscreteGraph().optimize(); DiscreteValues + // expected_assignment; expected_assignment[M(1)] = 1; + // expected_assignment[M(2)] = 1; + // expected_assignment[M(3)] = 1; + // EXPECT(assert_equal(expected_assignment, optimal_assignment)); + + // // Test if pruning worked correctly by checking that we only have 3 + // leaves in + // // the last node. + // auto lastConditional = boost::dynamic_pointer_cast( + // inc.hybridBayesNet().at(inc.hybridBayesNet().size() - 1)); + // EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); } /* ************************************************************************* */ From 379a65f40fd502c04eacd69a871575c0771a074b Mon Sep 17 00:00:00 2001 From: sjxue Date: Tue, 16 Aug 2022 18:26:59 -0400 Subject: [PATCH 62/74] Address review comments --- gtsam/hybrid/HybridBayesNet.h | 3 +- gtsam/hybrid/HybridLookupDAG.cpp | 40 ++++--- gtsam/hybrid/HybridLookupDAG.h | 11 +- gtsam/hybrid/HybridValues.h | 73 +++++------- .../tests/testGaussianHybridFactorGraph.cpp | 1 - gtsam/hybrid/tests/testHybridLookupDAG.cpp | 112 +++++++++--------- gtsam/hybrid/tests/testHybridValues.cpp | 11 +- 7 files changed, 120 insertions(+), 131 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index b19528120a..9d6d5f2361 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -64,7 +64,8 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { GaussianBayesNet choose(const DiscreteValues &assignment) const; /// Solve the HybridBayesNet by back-substitution. - /// TODO(Shangjie) do we need to create a HybridGaussianBayesNet class, and put this method there? + /// TODO(Shangjie) do we need to create a HybridGaussianBayesNet class, and + /// put this method there? HybridValues optimize() const; }; diff --git a/gtsam/hybrid/HybridLookupDAG.cpp b/gtsam/hybrid/HybridLookupDAG.cpp index 7232309f4d..7acff081bf 100644 --- a/gtsam/hybrid/HybridLookupDAG.cpp +++ b/gtsam/hybrid/HybridLookupDAG.cpp @@ -18,10 +18,10 @@ #include #include #include -#include -#include #include +#include #include +#include #include #include @@ -32,28 +32,32 @@ using std::vector; namespace gtsam { - - /* ************************************************************************** */ void HybridLookupTable::argmaxInPlace(HybridValues* values) const { - // For discrete conditional, uses argmaxInPlace() method in DiscreteLookupTable. - if (isDiscrete()){ - boost::static_pointer_cast(inner_)->argmaxInPlace(&(values->discrete)); - } else if (isContinuous()){ + // For discrete conditional, uses argmaxInPlace() method in + // DiscreteLookupTable. + if (isDiscrete()) { + boost::static_pointer_cast(inner_)->argmaxInPlace( + &(values->discrete)); + } else if (isContinuous()) { // For Gaussian conditional, uses solve() method in GaussianConditional. - values->continuous.insert(boost::static_pointer_cast(inner_)->solve(values->continuous)); - } else if (isHybrid()){ - // For hybrid conditional, since children should not contain discrete variable, we can condition on - // the discrete variable in the parents and solve the resulting GaussianConditional. - auto conditional = boost::static_pointer_cast(inner_)->conditionals()(values->discrete); + values->continuous.insert( + boost::static_pointer_cast(inner_)->solve( + values->continuous)); + } else if (isHybrid()) { + // For hybrid conditional, since children should not contain discrete + // variable, we can condition on the discrete variable in the parents and + // solve the resulting GaussianConditional. + auto conditional = + boost::static_pointer_cast(inner_)->conditionals()( + values->discrete); values->continuous.insert(conditional->solve(values->continuous)); - } + } } - -// /* ************************************************************************** */ -HybridLookupDAG HybridLookupDAG::FromBayesNet( - const HybridBayesNet& bayesNet) { +// /* ************************************************************************** +// */ +HybridLookupDAG HybridLookupDAG::FromBayesNet(const HybridBayesNet& bayesNet) { HybridLookupDAG dag; for (auto&& conditional : bayesNet) { HybridLookupTable hlt(*conditional); diff --git a/gtsam/hybrid/HybridLookupDAG.h b/gtsam/hybrid/HybridLookupDAG.h index 903cc55190..cc1c58c58f 100644 --- a/gtsam/hybrid/HybridLookupDAG.h +++ b/gtsam/hybrid/HybridLookupDAG.h @@ -19,10 +19,10 @@ #include #include -#include -#include #include #include +#include +#include #include #include @@ -34,8 +34,8 @@ namespace gtsam { /** * @brief HybridLookupTable table for max-product * - * Similar to DiscreteLookupTable, inherits from hybrid conditional for convenience. - * Is used in the max-product algorithm. + * Similar to DiscreteLookupTable, inherits from hybrid conditional for + * convenience. Is used in the max-product algorithm. */ class GTSAM_EXPORT HybridLookupTable : public HybridConditional { public: @@ -58,7 +58,8 @@ class GTSAM_EXPORT HybridLookupTable : public HybridConditional { void argmaxInPlace(HybridValues* parentsValues) const; }; -/** A DAG made from hybrid lookup tables, as defined above. Similar to DiscreteLookupDAG */ +/** A DAG made from hybrid lookup tables, as defined above. Similar to + * DiscreteLookupDAG */ class GTSAM_EXPORT HybridLookupDAG : public BayesNet { public: using Base = BayesNet; diff --git a/gtsam/hybrid/HybridValues.h b/gtsam/hybrid/HybridValues.h index 89f7bb58a7..5e1bd41646 100644 --- a/gtsam/hybrid/HybridValues.h +++ b/gtsam/hybrid/HybridValues.h @@ -19,11 +19,10 @@ #include #include +#include #include -#include #include -#include - +#include #include #include @@ -32,8 +31,9 @@ namespace gtsam { /** - * HybridValues represents a collection of DiscreteValues and VectorValues. It is typically used to store the variables - * of a HybridGaussianFactorGraph. Optimizing a HybridGaussianBayesNet returns this class. + * HybridValues represents a collection of DiscreteValues and VectorValues. It + * is typically used to store the variables of a HybridGaussianFactorGraph. + * Optimizing a HybridGaussianBayesNet returns this class. */ class GTSAM_EXPORT HybridValues { public: @@ -44,54 +44,47 @@ class GTSAM_EXPORT HybridValues { VectorValues continuous; // Default constructor creates an empty HybridValues. - HybridValues() : discrete(), continuous() {}; + HybridValues() : discrete(), continuous(){}; // Construct from DiscreteValues and VectorValues. - HybridValues(const DiscreteValues &dv, const VectorValues &cv) : discrete(dv), continuous(cv) {}; + HybridValues(const DiscreteValues& dv, const VectorValues& cv) + : discrete(dv), continuous(cv){}; // print required by Testable for unit testing void print(const std::string& s = "HybridValues", - const KeyFormatter& keyFormatter = DefaultKeyFormatter) const{ + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { std::cout << s << ": \n"; - discrete.print(" Discrete", keyFormatter); // print discrete components - continuous.print(" Continuous", keyFormatter); //print continuous components + discrete.print(" Discrete", keyFormatter); // print discrete components + continuous.print(" Continuous", + keyFormatter); // print continuous components }; // equals required by Testable for unit testing bool equals(const HybridValues& other, double tol = 1e-9) const { - return discrete.equals(other.discrete, tol) && continuous.equals(other.continuous, tol); + return discrete.equals(other.discrete, tol) && + continuous.equals(other.continuous, tol); } // Check whether a variable with key \c j exists in DiscreteValue. - bool existsDiscrete(Key j){ - return (discrete.find(j) != discrete.end()); - }; + bool existsDiscrete(Key j) { return (discrete.find(j) != discrete.end()); }; // Check whether a variable with key \c j exists in VectorValue. - bool existsVector(Key j){ - return continuous.exists(j); - }; + bool existsVector(Key j) { return continuous.exists(j); }; // Check whether a variable with key \c j exists. - bool exists(Key j){ - return existsDiscrete(j) || existsVector(j); - }; + bool exists(Key j) { return existsDiscrete(j) || existsVector(j); }; - /** Insert a discrete \c value with key \c j. Replaces the existing value if the key \c - * j is already used. - * @param value The vector to be inserted. - * @param j The index with which the value will be associated. */ - void insert(Key j, int value){ - discrete[j] = value; - }; + /** Insert a discrete \c value with key \c j. Replaces the existing value if + * the key \c j is already used. + * @param value The vector to be inserted. + * @param j The index with which the value will be associated. */ + void insert(Key j, int value) { discrete[j] = value; }; - /** Insert a vector \c value with key \c j. Throws an invalid_argument exception if the key \c - * j is already used. - * @param value The vector to be inserted. - * @param j The index with which the value will be associated. */ - void insert(Key j, const Vector& value) { - continuous.insert(j, value); - } + /** Insert a vector \c value with key \c j. Throws an invalid_argument + * exception if the key \c j is already used. + * @param value The vector to be inserted. + * @param j The index with which the value will be associated. */ + void insert(Key j, const Vector& value) { continuous.insert(j, value); } // TODO(Shangjie)- update() and insert_or_assign() , similar to Values.h @@ -99,18 +92,13 @@ class GTSAM_EXPORT HybridValues { * Read/write access to the discrete value with key \c j, throws * std::out_of_range if \c j does not exist. */ - size_t& atDiscrete(Key j){ - return discrete.at(j); - }; + size_t& atDiscrete(Key j) { return discrete.at(j); }; /** * Read/write access to the vector value with key \c j, throws * std::out_of_range if \c j does not exist. */ - Vector& at(Key j) { - return continuous.at(j); - }; - + Vector& at(Key j) { return continuous.at(j); }; /// @name Wrapper support /// @{ @@ -121,7 +109,8 @@ class GTSAM_EXPORT HybridValues { * @param keyFormatter function that formats keys. * @return string html output. */ - std::string html(const KeyFormatter& keyFormatter = DefaultKeyFormatter) const{ + std::string html( + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { std::stringstream ss; ss << this->discrete.html(keyFormatter); ss << this->continuous.html(keyFormatter); diff --git a/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp b/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp index 7e532b0137..17a2d94d74 100644 --- a/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp @@ -523,7 +523,6 @@ TEST(HybridGaussianFactorGraph, optimize) { HybridValues hv = result->optimize(); EXPECT(assert_equal(hv.atDiscrete(C(1)), int(0))); - } /* ************************************************************************* */ int main() { diff --git a/gtsam/hybrid/tests/testHybridLookupDAG.cpp b/gtsam/hybrid/tests/testHybridLookupDAG.cpp index 70b09ecbe9..c472aa22f5 100644 --- a/gtsam/hybrid/tests/testHybridLookupDAG.cpp +++ b/gtsam/hybrid/tests/testHybridLookupDAG.cpp @@ -17,19 +17,19 @@ #include #include -#include #include -#include #include -#include -#include -#include -#include -#include +#include +#include +#include #include #include -#include +#include +#include +#include #include +#include +#include // Include for test suite #include @@ -43,7 +43,7 @@ using symbol_shorthand::M; using symbol_shorthand::X; TEST(HybridLookupTable, basics) { - // create a conditional gaussian node + // create a conditional gaussian node Matrix S1(2, 2); S1(0, 0) = 1; S1(1, 0) = 2; @@ -82,39 +82,38 @@ TEST(HybridLookupTable, basics) { GaussianMixture::Conditionals conditionals( {m1}, vector{conditional0, conditional1}); -// GaussianMixture mixtureFactor2({X(1)}, {X(2)}, {m1}, conditionals); - - boost::shared_ptr mixtureFactor(new GaussianMixture({X(1)}, {X(2)}, {m1}, conditionals)); - + // GaussianMixture mixtureFactor2({X(1)}, {X(2)}, {m1}, conditionals); + + boost::shared_ptr mixtureFactor( + new GaussianMixture({X(1)}, {X(2)}, {m1}, conditionals)); + HybridConditional hc(mixtureFactor); - GaussianMixture::Conditionals conditional2 = boost::static_pointer_cast(hc.inner())->conditionals(); + GaussianMixture::Conditionals conditional2 = + boost::static_pointer_cast(hc.inner())->conditionals(); DiscreteValues dv; - dv[1]=1; + dv[1] = 1; VectorValues cv; - cv.insert(X(2),Vector2(0.0, 0.0)); - - HybridValues hv(dv, cv); + cv.insert(X(2), Vector2(0.0, 0.0)); - + HybridValues hv(dv, cv); // std::cout << conditional2(values).markdown(); EXPECT(assert_equal(*conditional2(dv), *conditionals(dv), 1e-6)); - EXPECT(conditional2(dv)==conditionals(dv)); + EXPECT(conditional2(dv) == conditionals(dv)); HybridLookupTable hlt(hc); -// hlt.argmaxInPlace(&hv); - + // hlt.argmaxInPlace(&hv); + HybridLookupDAG dag; dag.push_back(hlt); dag.argmax(hv); -// HybridBayesNet hbn; -// hbn.push_back(hc); -// hbn.optimize(); - + // HybridBayesNet hbn; + // hbn.push_back(hc); + // hbn.optimize(); } TEST(HybridLookupTable, hybrid_argmax) { @@ -124,23 +123,26 @@ TEST(HybridLookupTable, hybrid_argmax) { S1(0, 1) = 0; S1(1, 1) = 1; - Vector2 d1(0.2, 0.5), d2(-0.5,0.6); + Vector2 d1(0.2, 0.5), d2(-0.5, 0.6); SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector2(1.0, 0.34)); - auto conditional0 = boost::make_shared(X(1), d1, S1, model), - conditional1 = boost::make_shared(X(1), d2, S1, model); + auto conditional0 = + boost::make_shared(X(1), d1, S1, model), + conditional1 = + boost::make_shared(X(1), d2, S1, model); DiscreteKey m1(1, 2); GaussianMixture::Conditionals conditionals( {m1}, vector{conditional0, conditional1}); - boost::shared_ptr mixtureFactor(new GaussianMixture({X(1)},{}, {m1}, conditionals)); + boost::shared_ptr mixtureFactor( + new GaussianMixture({X(1)}, {}, {m1}, conditionals)); HybridConditional hc(mixtureFactor); DiscreteValues dv; - dv[1]=1; + dv[1] = 1; VectorValues cv; // cv.insert(X(2),Vector2(0.0, 0.0)); HybridValues hv(dv, cv); @@ -150,8 +152,6 @@ TEST(HybridLookupTable, hybrid_argmax) { hlt.argmaxInPlace(&hv); EXPECT(assert_equal(hv.at(X(1)), d2)); - - } TEST(HybridLookupTable, discrete_argmax) { @@ -164,18 +164,17 @@ TEST(HybridLookupTable, discrete_argmax) { HybridLookupTable hlt(hc); DiscreteValues dv; - dv[1]=0; + dv[1] = 0; VectorValues cv; // cv.insert(X(2),Vector2(0.0, 0.0)); HybridValues hv(dv, cv); - hlt.argmaxInPlace(&hv); EXPECT(assert_equal(hv.atDiscrete(0), 1)); - DecisionTreeFactor f1(X , "2 3"); - auto conditional2 = boost::make_shared(1,f1); + DecisionTreeFactor f1(X, "2 3"); + auto conditional2 = boost::make_shared(1, f1); HybridConditional hc2(conditional2); @@ -183,7 +182,6 @@ TEST(HybridLookupTable, discrete_argmax) { HybridValues hv2; - hlt2.argmaxInPlace(&hv2); EXPECT(assert_equal(hv2.atDiscrete(0), 1)); @@ -196,12 +194,12 @@ TEST(HybridLookupTable, gaussian_argmax) { S1(0, 1) = 0; S1(1, 1) = 1; - Vector2 d1(0.2, 0.5), d2(-0.5,0.6); + Vector2 d1(0.2, 0.5), d2(-0.5, 0.6); SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector2(1.0, 0.34)); - auto conditional = boost::make_shared(X(1), d1, S1, - X(2), -S1, model); + auto conditional = + boost::make_shared(X(1), d1, S1, X(2), -S1, model); HybridConditional hc(conditional); @@ -210,52 +208,51 @@ TEST(HybridLookupTable, gaussian_argmax) { DiscreteValues dv; // dv[1]=0; VectorValues cv; - cv.insert(X(2),d2); + cv.insert(X(2), d2); HybridValues hv(dv, cv); - hlt.argmaxInPlace(&hv); - EXPECT(assert_equal(hv.at(X(1)), d1+d2)); - + EXPECT(assert_equal(hv.at(X(1)), d1 + d2)); } TEST(HybridLookupDAG, argmax) { - Matrix S1(2, 2); S1(0, 0) = 1; S1(1, 0) = 0; S1(0, 1) = 0; S1(1, 1) = 1; - Vector2 d1(0.2, 0.5), d2(-0.5,0.6); + Vector2 d1(0.2, 0.5), d2(-0.5, 0.6); SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector2(1.0, 0.34)); - auto conditional0 = boost::make_shared(X(2), d1, S1, model), - conditional1 = boost::make_shared(X(2), d2, S1, model); + auto conditional0 = + boost::make_shared(X(2), d1, S1, model), + conditional1 = + boost::make_shared(X(2), d2, S1, model); DiscreteKey m1(1, 2); GaussianMixture::Conditionals conditionals( {m1}, vector{conditional0, conditional1}); - boost::shared_ptr mixtureFactor(new GaussianMixture({X(2)},{}, {m1}, conditionals)); + boost::shared_ptr mixtureFactor( + new GaussianMixture({X(2)}, {}, {m1}, conditionals)); HybridConditional hc2(mixtureFactor); HybridLookupTable hlt2(hc2); - - auto conditional2 = boost::make_shared(X(1), d1, S1, - X(2), -S1, model); + auto conditional2 = + boost::make_shared(X(1), d1, S1, X(2), -S1, model); HybridConditional hc1(conditional2); HybridLookupTable hlt1(hc1); - DecisionTreeFactor f1(m1 , "2 3"); - auto discrete_conditional = boost::make_shared(1,f1); + DecisionTreeFactor f1(m1, "2 3"); + auto discrete_conditional = boost::make_shared(1, f1); HybridConditional hc3(discrete_conditional); HybridLookupTable hlt3(hc3); - + HybridLookupDAG dag; dag.push_back(hlt1); dag.push_back(hlt2); @@ -264,10 +261,9 @@ TEST(HybridLookupDAG, argmax) { EXPECT(assert_equal(hv.atDiscrete(1), 1)); EXPECT(assert_equal(hv.at(X(2)), d2)); - EXPECT(assert_equal(hv.at(X(1)), d2+d1)); + EXPECT(assert_equal(hv.at(X(1)), d2 + d1)); } - /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/hybrid/tests/testHybridValues.cpp b/gtsam/hybrid/tests/testHybridValues.cpp index 3e821aef2f..9581faaa09 100644 --- a/gtsam/hybrid/tests/testHybridValues.cpp +++ b/gtsam/hybrid/tests/testHybridValues.cpp @@ -17,19 +17,18 @@ #include #include -#include #include #include +#include +#include #include -#include -#include #include -#include +#include +#include // Include for test suite #include - using namespace std; using namespace gtsam; @@ -47,7 +46,7 @@ TEST(HybridValues, basics) { values2.insert(99, Vector2(2, 3)); EXPECT(assert_equal(values2, values)); - values2.insert(98, Vector2(2,3)); + values2.insert(98, Vector2(2, 3)); EXPECT(!assert_equal(values2, values)); } From 83b8103db30e6e3426f542563021ebadb63db5e8 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 17 Aug 2022 16:28:47 -0400 Subject: [PATCH 63/74] last test to get running --- gtsam/hybrid/HybridGaussianISAM.cpp | 2 - gtsam/hybrid/tests/testHybridIncremental.cpp | 426 +++++++++---------- 2 files changed, 211 insertions(+), 217 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianISAM.cpp b/gtsam/hybrid/HybridGaussianISAM.cpp index df78037221..afcda8c208 100644 --- a/gtsam/hybrid/HybridGaussianISAM.cpp +++ b/gtsam/hybrid/HybridGaussianISAM.cpp @@ -75,8 +75,6 @@ void HybridGaussianISAM::updateInternal( std::copy(allDiscrete.begin(), allDiscrete.end(), std::back_inserter(newKeysDiscreteLast)); - // KeyVector new - // Get an ordering where the new keys are eliminated last const VariableIndex index(factors); Ordering elimination_ordering; diff --git a/gtsam/hybrid/tests/testHybridIncremental.cpp b/gtsam/hybrid/tests/testHybridIncremental.cpp index afd0d0a70d..8feab52216 100644 --- a/gtsam/hybrid/tests/testHybridIncremental.cpp +++ b/gtsam/hybrid/tests/testHybridIncremental.cpp @@ -393,221 +393,217 @@ TEST(HybridGaussianISAM, NonTrivial) { /*************** Run Round 1 ***************/ HybridNonlinearFactorGraph fg; - // // Add a prior on pose x1 at the origin. - // // A prior factor consists of a mean and - // // a noise model (covariance matrix) - // Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin - // auto priorNoise = noiseModel::Diagonal::Sigmas( - // Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta - // fg.emplace_nonlinear>(X(0), prior, priorNoise); - - // // create a noise model for the landmark measurements - // auto poseNoise = noiseModel::Isotropic::Sigma(3, 0.1); - - // // We model a robot's single leg as X - Y - Z - W - // // where X is the base link and W is the foot link. - - // // Add connecting poses similar to PoseFactors in GTD - // fg.emplace_nonlinear>(X(0), Y(0), Pose2(0, 1.0, 0), - // poseNoise); - // fg.emplace_nonlinear>(Y(0), Z(0), Pose2(0, 1.0, 0), - // poseNoise); - // fg.emplace_nonlinear>(Z(0), W(0), Pose2(0, 1.0, 0), - // poseNoise); - - // // Create initial estimate - // Values initial; - // initial.insert(X(0), Pose2(0.0, 0.0, 0.0)); - // initial.insert(Y(0), Pose2(0.0, 1.0, 0.0)); - // initial.insert(Z(0), Pose2(0.0, 2.0, 0.0)); - // initial.insert(W(0), Pose2(0.0, 3.0, 0.0)); - - // HybridGaussianFactorGraph gfg = fg.linearize(initial); - // fg = HybridNonlinearFactorGraph(); - - // HybridGaussianISAM inc; - - // // Regular ordering going up the chain. - // Ordering ordering; - // ordering += W(0); - // ordering += Z(0); - // ordering += Y(0); - // ordering += X(0); - - // // Update without pruning - // // The result is a HybridBayesNet with no discrete variables - // // (equivalent to a GaussianBayesNet). - // // Factorization is: - // // `P(X | measurements) = P(W0|Z0) P(Z0|Y0) P(Y0|X0) P(X0)` - // inc.update(gfg, ordering); - - // /*************** Run Round 2 ***************/ - // using PlanarMotionModel = BetweenFactor; - - // // Add odometry factor with discrete modes. - // Pose2 odometry(1.0, 0.0, 0.0); - // KeyVector contKeys = {W(0), W(1)}; - // auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); - // auto still = boost::make_shared(W(0), W(1), Pose2(0, - // 0, 0), - // noise_model), - // moving = boost::make_shared(W(0), W(1), odometry, - // noise_model); - // std::vector components = {moving, still}; - // auto dcFactor = boost::make_shared>( - // contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); - // fg.push_back(dcFactor); - - // // Add equivalent of ImuFactor - // fg.emplace_nonlinear>(X(0), X(1), Pose2(1.0, 0.0, - // 0), - // poseNoise); - // // PoseFactors-like at k=1 - // fg.emplace_nonlinear>(X(1), Y(1), Pose2(0, 1, 0), - // poseNoise); - // fg.emplace_nonlinear>(Y(1), Z(1), Pose2(0, 1, 0), - // poseNoise); - // fg.emplace_nonlinear>(Z(1), W(1), Pose2(-1, 1, 0), - // poseNoise); - - // initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); - // initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); - // initial.insert(Z(1), Pose2(1.0, 2.0, 0.0)); - // // The leg link did not move so we set the expected pose accordingly. - // initial.insert(W(1), Pose2(0.0, 3.0, 0.0)); - - // // Ordering for k=1. - // // This ordering follows the intuition that we eliminate the previous - // // timestep, and then the current timestep. - // ordering = Ordering(); - // ordering += W(0); - // ordering += Z(0); - // ordering += Y(0); - // ordering += X(0); - // ordering += W(1); - // ordering += Z(1); - // ordering += Y(1); - // ordering += X(1); - - // gfg = fg.linearize(initial); - // fg = HybridNonlinearFactorGraph(); - - // // Update without pruning - // // The result is a HybridBayesNet with 1 discrete variable M(1). - // // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, - // M1) - // // P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1, - // M1) - // // P(Y1 | X1, M1)P(X1 | M1)P(M1) - // // The MHS tree is a 2 level tree for time indices (0, 1) with 2 leaves. - // inc.update(gfg, ordering); - - // /*************** Run Round 3 ***************/ - // // Add odometry factor with discrete modes. - // contKeys = {W(1), W(2)}; - // still = boost::make_shared(W(1), W(2), Pose2(0, 0, 0), - // noise_model); - // moving = - // boost::make_shared(W(1), W(2), odometry, - // noise_model); - // components = {moving, still}; - // dcFactor = boost::make_shared>( - // contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); - // fg.push_back(dcFactor); - - // // Add equivalent of ImuFactor - // fg.emplace_nonlinear>(X(1), X(2), Pose2(1.0, 0.0, - // 0), - // poseNoise); - // // PoseFactors-like at k=1 - // fg.emplace_nonlinear>(X(2), Y(2), Pose2(0, 1, 0), - // poseNoise); - // fg.emplace_nonlinear>(Y(2), Z(2), Pose2(0, 1, 0), - // poseNoise); - // fg.emplace_nonlinear>(Z(2), W(2), Pose2(-2, 1, 0), - // poseNoise); - - // initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); - // initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); - // initial.insert(Z(2), Pose2(2.0, 2.0, 0.0)); - // initial.insert(W(2), Pose2(0.0, 3.0, 0.0)); - - // // Ordering at k=2. Same intuition as before. - // ordering = Ordering(); - // ordering += W(1); - // ordering += Z(1); - // ordering += Y(1); - // ordering += X(1); - // ordering += W(2); - // ordering += Z(2); - // ordering += Y(2); - // ordering += X(2); - - // gfg = fg.linearize(initial); - // fg = HybridNonlinearFactorGraph(); - - // // Now we prune! - // // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, - // M1) P(X0 | X1, W1, M1) - // // P(W1|W2, Z1, X1, M1, M2) P(Z1| W2, Y1, X1, M1, - // M2) P(Y1 | W2, X1, M1, M2) - // // P(X1 | W2, X2, M1, M2) P(W2|Z2, X2, M1, M2) - // P(Z2|Y2, X2, M1, M2) - // // P(Y2 | X2, M1, M2)P(X2 | M1, M2) P(M1, M2) - // // The MHS at this point should be a 3 level tree on (0, 1, 2). - // // 0 has 2 choices, 1 has 4 choices and 2 has 4 choices. - // inc.update(gfg, ordering, 2); - - // /*************** Run Round 4 ***************/ - // // Add odometry factor with discrete modes. - // contKeys = {W(2), W(3)}; - // still = boost::make_shared(W(2), W(3), Pose2(0, 0, 0), - // noise_model); - // moving = - // boost::make_shared(W(2), W(3), odometry, - // noise_model); - // components = {moving, still}; - // dcFactor = boost::make_shared>( - // contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); - // fg.push_back(dcFactor); - - // // Add equivalent of ImuFactor - // fg.emplace_nonlinear>(X(2), X(3), Pose2(1.0, 0.0, - // 0), - // poseNoise); - // // PoseFactors-like at k=3 - // fg.emplace_nonlinear>(X(3), Y(3), Pose2(0, 1, 0), - // poseNoise); - // fg.emplace_nonlinear>(Y(3), Z(3), Pose2(0, 1, 0), - // poseNoise); - // fg.emplace_nonlinear>(Z(3), W(3), Pose2(-3, 1, 0), - // poseNoise); - - // initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); - // initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); - // initial.insert(Z(3), Pose2(3.0, 2.0, 0.0)); - // initial.insert(W(3), Pose2(0.0, 3.0, 0.0)); - - // // Ordering at k=3. Same intuition as before. - // ordering = Ordering(); - // ordering += W(2); - // ordering += Z(2); - // ordering += Y(2); - // ordering += X(2); - // ordering += W(3); - // ordering += Z(3); - // ordering += Y(3); - // ordering += X(3); - - // gfg = fg.linearize(initial); - // fg = HybridNonlinearFactorGraph(); - - // // Keep pruning! - // inc.update(gfg, ordering, 3); - - // // The final discrete graph should not be empty since we have eliminated - // // all continuous variables. - // EXPECT(!inc.remainingDiscreteGraph().empty()); + // Add a prior on pose x1 at the origin. + // A prior factor consists of a mean and + // a noise model (covariance matrix) + Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin + auto priorNoise = noiseModel::Diagonal::Sigmas( + Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta + fg.emplace_nonlinear>(X(0), prior, priorNoise); + + // create a noise model for the landmark measurements + auto poseNoise = noiseModel::Isotropic::Sigma(3, 0.1); + + // We model a robot's single leg as X - Y - Z - W + // where X is the base link and W is the foot link. + + // Add connecting poses similar to PoseFactors in GTD + fg.emplace_nonlinear>(X(0), Y(0), Pose2(0, 1.0, 0), + poseNoise); + fg.emplace_nonlinear>(Y(0), Z(0), Pose2(0, 1.0, 0), + poseNoise); + fg.emplace_nonlinear>(Z(0), W(0), Pose2(0, 1.0, 0), + poseNoise); + + // Create initial estimate + Values initial; + initial.insert(X(0), Pose2(0.0, 0.0, 0.0)); + initial.insert(Y(0), Pose2(0.0, 1.0, 0.0)); + initial.insert(Z(0), Pose2(0.0, 2.0, 0.0)); + initial.insert(W(0), Pose2(0.0, 3.0, 0.0)); + + HybridGaussianFactorGraph gfg = fg.linearize(initial); + fg = HybridNonlinearFactorGraph(); + + HybridGaussianISAM inc; + + // Regular ordering going up the chain. + Ordering ordering; + ordering += W(0); + ordering += Z(0); + ordering += Y(0); + ordering += X(0); + + // Update without pruning + // The result is a HybridBayesNet with no discrete variables + // (equivalent to a GaussianBayesNet). + // Factorization is: + // `P(X | measurements) = P(W0|Z0) P(Z0|Y0) P(Y0|X0) P(X0)` + // TODO(Varun) ClusterTree-inst.h L202 segfaults with custom ordering. + inc.update(gfg); + + /*************** Run Round 2 ***************/ + using PlanarMotionModel = BetweenFactor; + + // Add odometry factor with discrete modes. + Pose2 odometry(1.0, 0.0, 0.0); + KeyVector contKeys = {W(0), W(1)}; + auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); + auto still = boost::make_shared(W(0), W(1), Pose2(0, 0, 0), + noise_model), + moving = boost::make_shared(W(0), W(1), odometry, + noise_model); + std::vector components = {moving, still}; + auto mixtureFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + fg.push_back(mixtureFactor); + + // Add equivalent of ImuFactor + fg.emplace_nonlinear>(X(0), X(1), Pose2(1.0, 0.0, 0), + poseNoise); + // PoseFactors-like at k=1 + fg.emplace_nonlinear>(X(1), Y(1), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Y(1), Z(1), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Z(1), W(1), Pose2(-1, 1, 0), + poseNoise); + + initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); + initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); + initial.insert(Z(1), Pose2(1.0, 2.0, 0.0)); + // The leg link did not move so we set the expected pose accordingly. + initial.insert(W(1), Pose2(0.0, 3.0, 0.0)); + + gfg = fg.linearize(initial); + fg = HybridNonlinearFactorGraph(); + + // Ordering for k=1. + // This ordering follows the intuition that we eliminate the previous + // timestep, and then the current timestep. + ordering = Ordering(); + ordering += W(0); + ordering += Z(0); + ordering += Y(0); + ordering += X(0); + ordering += W(1); + ordering += Z(1); + ordering += Y(1); + ordering += X(1); + + // Update without pruning + // The result is a HybridBayesNet with 1 discrete variable M(1). + // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) + // P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1, M1) + // P(Y1 | X1, M1)P(X1 | M1)P(M1) + // The MHS tree is a 1 level tree for time indices (1,) with 2 leaves. + // TODO(Varun) ClusterTree-inst.h L202 segfaults with custom ordering. + inc.update(gfg); + + /*************** Run Round 3 ***************/ + // Add odometry factor with discrete modes. + contKeys = {W(1), W(2)}; + still = boost::make_shared(W(1), W(2), Pose2(0, 0, 0), + noise_model); + moving = + boost::make_shared(W(1), W(2), odometry, noise_model); + components = {moving, still}; + mixtureFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); + fg.push_back(mixtureFactor); + + // Add equivalent of ImuFactor + fg.emplace_nonlinear>(X(1), X(2), Pose2(1.0, 0.0, 0), + poseNoise); + // PoseFactors-like at k=1 + fg.emplace_nonlinear>(X(2), Y(2), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Y(2), Z(2), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Z(2), W(2), Pose2(-2, 1, 0), + poseNoise); + + initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); + initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); + initial.insert(Z(2), Pose2(2.0, 2.0, 0.0)); + initial.insert(W(2), Pose2(0.0, 3.0, 0.0)); + + // Ordering at k=2. Same intuition as before. + ordering = Ordering(); + ordering += W(1); + ordering += Z(1); + ordering += Y(1); + ordering += X(1); + ordering += W(2); + ordering += Z(2); + ordering += Y(2); + ordering += X(2); + + gfg = fg.linearize(initial); + fg = HybridNonlinearFactorGraph(); + + // Now we prune! + // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) + // P(X0 | X1, W1, M1) P(W1|W2, Z1, X1, M1, M2) + // P(Z1| W2, Y1, X1, M1, M2) P(Y1 | W2, X1, M1, M2) + // P(X1 | W2, X2, M1, M2) P(W2|Z2, X2, M1, M2) + // P(Z2|Y2, X2, M1, M2) P(Y2 | X2, M1, M2) + // P(X2 | M1, M2) P(M1, M2) + // The MHS at this point should be a 2 level tree on (1, 2). + // 1 has 2 choices, and 2 has 4 choices. + inc.update(gfg); + inc.prune(M(2), 2); + + /*************** Run Round 4 ***************/ + // Add odometry factor with discrete modes. + contKeys = {W(2), W(3)}; + still = boost::make_shared(W(2), W(3), Pose2(0, 0, 0), + noise_model); + moving = + boost::make_shared(W(2), W(3), odometry, noise_model); + components = {moving, still}; + mixtureFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); + fg.push_back(mixtureFactor); + + // Add equivalent of ImuFactor + fg.emplace_nonlinear>(X(2), X(3), Pose2(1.0, 0.0, 0), + poseNoise); + // PoseFactors-like at k=3 + fg.emplace_nonlinear>(X(3), Y(3), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Y(3), Z(3), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Z(3), W(3), Pose2(-3, 1, 0), + poseNoise); + + initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); + initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); + initial.insert(Z(3), Pose2(3.0, 2.0, 0.0)); + initial.insert(W(3), Pose2(0.0, 3.0, 0.0)); + + gfg = fg.linearize(initial); + fg = HybridNonlinearFactorGraph(); + + // Ordering at k=3. Same intuition as before. + ordering = Ordering(); + ordering += W(2); + ordering += Z(2); + ordering += Y(2); + ordering += X(2); + ordering += W(3); + ordering += Z(3); + ordering += Y(3); + ordering += X(3); + + // Keep pruning! + inc.update(gfg); + inc.prune(M(3), 3); + inc.print(); + + // The final discrete graph should not be empty since we have eliminated + // all continuous variables. + // EXPECT(!inc.remainingDiscreteGraph().empty()); // // Test if the optimal discrete mode assignment is (1, 1, 1). // DiscreteValues optimal_assignment = From 746ca7856df7848e677bf19751a4ea8b0f5a9e59 Mon Sep 17 00:00:00 2001 From: sjxue Date: Thu, 18 Aug 2022 17:50:20 -0400 Subject: [PATCH 64/74] Address review comments --- gtsam/hybrid/HybridLookupDAG.cpp | 7 ++-- python/gtsam/tests/test_HybridFactorGraph.py | 31 +++++++++++++++ python/gtsam/tests/test_HybridValues.py | 41 ++++++++++++++++++++ 3 files changed, 75 insertions(+), 4 deletions(-) create mode 100644 python/gtsam/tests/test_HybridValues.py diff --git a/gtsam/hybrid/HybridLookupDAG.cpp b/gtsam/hybrid/HybridLookupDAG.cpp index 7acff081bf..a322a81776 100644 --- a/gtsam/hybrid/HybridLookupDAG.cpp +++ b/gtsam/hybrid/HybridLookupDAG.cpp @@ -55,8 +55,7 @@ void HybridLookupTable::argmaxInPlace(HybridValues* values) const { } } -// /* ************************************************************************** -// */ +/* ************************************************************************** */ HybridLookupDAG HybridLookupDAG::FromBayesNet(const HybridBayesNet& bayesNet) { HybridLookupDAG dag; for (auto&& conditional : bayesNet) { @@ -66,12 +65,12 @@ HybridLookupDAG HybridLookupDAG::FromBayesNet(const HybridBayesNet& bayesNet) { return dag; } +/* ************************************************************************** */ HybridValues HybridLookupDAG::argmax(HybridValues result) const { // Argmax each node in turn in topological sort order (parents first). for (auto lookupTable : boost::adaptors::reverse(*this)) lookupTable->argmaxInPlace(&result); return result; } -/* ************************************************************************** */ -} // namespace gtsam \ No newline at end of file +} // namespace gtsam diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 781cfd9240..44fb175e8f 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -55,6 +55,37 @@ def test_create(self): discrete_conditional = hbn.at(hbn.size() - 1).inner() self.assertIsInstance(discrete_conditional, gtsam.DiscreteConditional) + def test_optimize(self): + """Test contruction of hybrid factor graph.""" + noiseModel = gtsam.noiseModel.Unit.Create(3) + dk = gtsam.DiscreteKeys() + dk.push_back((C(0), 2)) + + jf1 = gtsam.JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), + noiseModel) + jf2 = gtsam.JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), + noiseModel) + + gmf = gtsam.GaussianMixtureFactor.FromFactors([X(0)], dk, [jf1, jf2]) + + hfg = gtsam.HybridGaussianFactorGraph() + hfg.add(jf1) + hfg.add(jf2) + hfg.push_back(gmf) + + dtf = gtsam.DecisionTreeFactor([(C(0), 2)],"0 1") + hfg.add(dtf) + + hbn = hfg.eliminateSequential( + gtsam.Ordering.ColamdConstrainedLastHybridGaussianFactorGraph( + hfg, [C(0)])) + + # print("hbn = ", hbn) + hv = hbn.optimize() + self.assertEqual(hv.atDiscrete(C(0)), 1) + + self.assertEqual(hv.at(X(0)), np.ones((3, 1))) + if __name__ == "__main__": unittest.main() diff --git a/python/gtsam/tests/test_HybridValues.py b/python/gtsam/tests/test_HybridValues.py new file mode 100644 index 0000000000..63e7c8e7dd --- /dev/null +++ b/python/gtsam/tests/test_HybridValues.py @@ -0,0 +1,41 @@ +""" +GTSAM Copyright 2010-2019, Georgia Tech Research Corporation, +Atlanta, Georgia 30332-0415 +All Rights Reserved + +See LICENSE for the license information + +Unit tests for Hybrid Values. +Author: Shangjie Xue +""" +# pylint: disable=invalid-name, no-name-in-module, no-member + +from __future__ import print_function + +import unittest + +import gtsam +import numpy as np +from gtsam.symbol_shorthand import C, X +from gtsam.utils.test_case import GtsamTestCase + + +class TestHybridGaussianFactorGraph(GtsamTestCase): + """Unit tests for HybridValues.""" + + def test_basic(self): + """Test contruction and basic methods of hybrid values.""" + + hv1 = gtsam.HybridValues() + hv1.insert(X(0), np.ones((3,1))) + hv1.insert(C(0), 2) + + hv2 = gtsam.HybridValues() + hv2.insert(C(0), 2) + hv2.insert(X(0), np.ones((3,1))) + + self.assertEqual(hv1.atDiscrete(C(0)), 2) + self.assertEqual(hv1.at(X(0))[0], np.ones((3,1))[0]) + +if __name__ == "__main__": + unittest.main() From c4184e192b4605303cc0b0d51129e470eb4b4ed1 Mon Sep 17 00:00:00 2001 From: sjxue Date: Thu, 18 Aug 2022 19:35:06 -0400 Subject: [PATCH 65/74] fix error in python unit test --- python/gtsam/tests/test_HybridFactorGraph.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 44fb175e8f..576efa82fd 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -84,8 +84,5 @@ def test_optimize(self): hv = hbn.optimize() self.assertEqual(hv.atDiscrete(C(0)), 1) - self.assertEqual(hv.at(X(0)), np.ones((3, 1))) - - if __name__ == "__main__": unittest.main() From 07f0101db7401be00917c1f2c099cd6326e3369c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 21 Aug 2022 12:48:33 -0400 Subject: [PATCH 66/74] check subset rather than equality for GaussianISAM pruning --- gtsam/hybrid/HybridGaussianISAM.cpp | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridGaussianISAM.cpp b/gtsam/hybrid/HybridGaussianISAM.cpp index afcda8c208..23a95c021d 100644 --- a/gtsam/hybrid/HybridGaussianISAM.cpp +++ b/gtsam/hybrid/HybridGaussianISAM.cpp @@ -105,6 +105,22 @@ void HybridGaussianISAM::update(const HybridGaussianFactorGraph& newFactors, this->updateInternal(newFactors, &orphans, ordering, function); } +/* ************************************************************************* */ +/** + * @brief Check if `b` is a subset of `a`. + * Non-const since they need to be sorted. + * + * @param a KeyVector + * @param b KeyVector + * @return True if the keys of b is a subset of a, else false. + */ +bool IsSubset(KeyVector a, KeyVector b) { + std::sort(a.begin(), a.end()); + std::sort(b.begin(), b.end()); + return std::includes(a.begin(), a.end(), b.begin(), b.end()); +} + +/* ************************************************************************* */ void HybridGaussianISAM::prune(const Key& root, const size_t maxNrLeaves) { auto decisionTree = boost::dynamic_pointer_cast( this->clique(root)->conditional()->inner()); @@ -131,7 +147,7 @@ void HybridGaussianISAM::prune(const Key& root, const size_t maxNrLeaves) { parents.push_back(parent); } - if (parents == decisionTree->keys()) { + if (IsSubset(parents, decisionTree->keys())) { auto gaussianMixture = boost::dynamic_pointer_cast( clique.second->conditional()->inner()); From 29c19ee77b5fda682fabdf110baf1a7f320c87b3 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 21 Aug 2022 12:49:13 -0400 Subject: [PATCH 67/74] handle HybridConditional and explicitly set Gaussian Factor Graphs to empty --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 31 +++++++++++++++++++--- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 55fa9a908a..af381de041 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -96,8 +96,15 @@ GaussianMixtureFactor::Sum sumFrontals( } } else if (f->isContinuous()) { - deferredFactors.push_back( - boost::dynamic_pointer_cast(f)->inner()); + // Check if f is HybridConditional or HybridGaussianFactor. + if (auto hc = boost::dynamic_pointer_cast(f)) { + auto conditional = + boost::dynamic_pointer_cast(hc->inner()); + deferredFactors.push_back(conditional); + } else if (auto gf = boost::dynamic_pointer_cast(f) + ->inner()) { + deferredFactors.push_back(gf); + } } else if (f->isDiscrete()) { // Don't do anything for discrete-only factors @@ -184,6 +191,19 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // sum out frontals, this is the factor on the separator GaussianMixtureFactor::Sum sum = sumFrontals(factors); + // If a tree leaf contains nullptr, + // convert that leaf to an empty GaussianFactorGraph. + // Needed since the DecisionTree will otherwise create + // a GFG with a single (null) factor. + auto emptyGaussian = [](const GaussianFactorGraph &gfg) { + bool hasNull = + std::any_of(gfg.begin(), gfg.end(), + [](const GaussianFactor::shared_ptr &ptr) { return !ptr; }); + + return hasNull ? GaussianFactorGraph() : gfg; + }; + sum = GaussianMixtureFactor::Sum(sum, emptyGaussian); + using EliminationPair = GaussianFactorGraph::EliminationResult; KeyVector keysOfEliminated; // Not the ordering @@ -195,7 +215,10 @@ hybridElimination(const HybridGaussianFactorGraph &factors, if (graph.empty()) { return {nullptr, nullptr}; } - auto result = EliminatePreferCholesky(graph, frontalKeys); + std::pair, + boost::shared_ptr> + result = EliminatePreferCholesky(graph, frontalKeys); + if (keysOfEliminated.empty()) { keysOfEliminated = result.first->keys(); // Initialize the keysOfEliminated to be the @@ -235,7 +258,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, boost::make_shared(discreteFactor)}; } else { - // Create a resulting DCGaussianMixture on the separator. + // Create a resulting GaussianMixtureFactor on the separator. auto factor = boost::make_shared( KeyVector(continuousSeparator.begin(), continuousSeparator.end()), discreteSeparator, separatorFactors); From f6df641b191ef7b1e4bd8b867c707abdc86a4549 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 21 Aug 2022 12:56:14 -0400 Subject: [PATCH 68/74] remove custom orderings, let it happen automatically --- gtsam/hybrid/tests/testHybridIncremental.cpp | 111 ++++--------------- 1 file changed, 23 insertions(+), 88 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridIncremental.cpp b/gtsam/hybrid/tests/testHybridIncremental.cpp index 8feab52216..4449aba0b3 100644 --- a/gtsam/hybrid/tests/testHybridIncremental.cpp +++ b/gtsam/hybrid/tests/testHybridIncremental.cpp @@ -61,11 +61,6 @@ TEST(HybridGaussianElimination, IncrementalElimination) { graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2) graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) - // Create ordering. - Ordering ordering; - ordering += X(1); - ordering += X(2); - // Run update step isam.update(graph1); @@ -85,11 +80,6 @@ TEST(HybridGaussianElimination, IncrementalElimination) { graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3) graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2) - // Create ordering. - Ordering ordering2; - ordering2 += X(2); - ordering2 += X(3); - isam.update(graph2); // Check that after the second update we have @@ -336,12 +326,6 @@ TEST(HybridGaussianElimination, Incremental_approximate) { graph1.push_back(switching.linearizedFactorGraph.at(i)); } - // Create ordering. - Ordering ordering; - for (size_t j = 1; j <= 4; j++) { - ordering += X(j); - } - // Run update with pruning size_t maxComponents = 5; incrementalHybrid.update(graph1); @@ -364,10 +348,6 @@ TEST(HybridGaussianElimination, Incremental_approximate) { graph2.push_back(switching.linearizedFactorGraph.at(4)); graph2.push_back(switching.linearizedFactorGraph.at(8)); - Ordering ordering2; - ordering2 += X(4); - ordering2 += X(5); - // Run update with pruning a second time. incrementalHybrid.update(graph2); incrementalHybrid.prune(M(4), maxComponents); @@ -382,14 +362,11 @@ TEST(HybridGaussianElimination, Incremental_approximate) { } /* ************************************************************************/ -// Test for figuring out the optimal ordering to ensure we get -// a discrete graph after elimination. +// A GTSAM-only test for running inference on a single-legged robot. +// The leg links are represented by the chain X-Y-Z-W, where X is the base and +// W is the foot. +// We use BetweenFactor as constraints between each of the poses. TEST(HybridGaussianISAM, NonTrivial) { - // This is a GTSAM-only test for running inference on a single legged robot. - // The leg links are represented by the chain X-Y-Z-W, where X is the base and - // W is the foot. - // We use BetweenFactor as constraints between each of the poses. - /*************** Run Round 1 ***************/ HybridNonlinearFactorGraph fg; @@ -427,19 +404,11 @@ TEST(HybridGaussianISAM, NonTrivial) { HybridGaussianISAM inc; - // Regular ordering going up the chain. - Ordering ordering; - ordering += W(0); - ordering += Z(0); - ordering += Y(0); - ordering += X(0); - // Update without pruning // The result is a HybridBayesNet with no discrete variables // (equivalent to a GaussianBayesNet). // Factorization is: // `P(X | measurements) = P(W0|Z0) P(Z0|Y0) P(Y0|X0) P(X0)` - // TODO(Varun) ClusterTree-inst.h L202 segfaults with custom ordering. inc.update(gfg); /*************** Run Round 2 ***************/ @@ -478,26 +447,12 @@ TEST(HybridGaussianISAM, NonTrivial) { gfg = fg.linearize(initial); fg = HybridNonlinearFactorGraph(); - // Ordering for k=1. - // This ordering follows the intuition that we eliminate the previous - // timestep, and then the current timestep. - ordering = Ordering(); - ordering += W(0); - ordering += Z(0); - ordering += Y(0); - ordering += X(0); - ordering += W(1); - ordering += Z(1); - ordering += Y(1); - ordering += X(1); - // Update without pruning // The result is a HybridBayesNet with 1 discrete variable M(1). // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) // P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1, M1) // P(Y1 | X1, M1)P(X1 | M1)P(M1) // The MHS tree is a 1 level tree for time indices (1,) with 2 leaves. - // TODO(Varun) ClusterTree-inst.h L202 segfaults with custom ordering. inc.update(gfg); /*************** Run Round 3 ***************/ @@ -528,17 +483,6 @@ TEST(HybridGaussianISAM, NonTrivial) { initial.insert(Z(2), Pose2(2.0, 2.0, 0.0)); initial.insert(W(2), Pose2(0.0, 3.0, 0.0)); - // Ordering at k=2. Same intuition as before. - ordering = Ordering(); - ordering += W(1); - ordering += Z(1); - ordering += Y(1); - ordering += X(1); - ordering += W(2); - ordering += Z(2); - ordering += Y(2); - ordering += X(2); - gfg = fg.linearize(initial); fg = HybridNonlinearFactorGraph(); @@ -585,40 +529,31 @@ TEST(HybridGaussianISAM, NonTrivial) { gfg = fg.linearize(initial); fg = HybridNonlinearFactorGraph(); - // Ordering at k=3. Same intuition as before. - ordering = Ordering(); - ordering += W(2); - ordering += Z(2); - ordering += Y(2); - ordering += X(2); - ordering += W(3); - ordering += Z(3); - ordering += Y(3); - ordering += X(3); - // Keep pruning! inc.update(gfg); inc.prune(M(3), 3); - inc.print(); // The final discrete graph should not be empty since we have eliminated // all continuous variables. - // EXPECT(!inc.remainingDiscreteGraph().empty()); - - // // Test if the optimal discrete mode assignment is (1, 1, 1). - // DiscreteValues optimal_assignment = - // inc.remainingDiscreteGraph().optimize(); DiscreteValues - // expected_assignment; expected_assignment[M(1)] = 1; - // expected_assignment[M(2)] = 1; - // expected_assignment[M(3)] = 1; - // EXPECT(assert_equal(expected_assignment, optimal_assignment)); - - // // Test if pruning worked correctly by checking that we only have 3 - // leaves in - // // the last node. - // auto lastConditional = boost::dynamic_pointer_cast( - // inc.hybridBayesNet().at(inc.hybridBayesNet().size() - 1)); - // EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); + auto discreteTree = inc[M(3)]->conditional()->asDiscreteConditional(); + EXPECT_LONGS_EQUAL(3, discreteTree->size()); + + // Test if the optimal discrete mode assignment is (1, 1, 1). + DiscreteFactorGraph discreteGraph; + discreteGraph.push_back(discreteTree); + DiscreteValues optimal_assignment = discreteGraph.optimize(); + + DiscreteValues expected_assignment; + expected_assignment[M(1)] = 1; + expected_assignment[M(2)] = 1; + expected_assignment[M(3)] = 1; + + EXPECT(assert_equal(expected_assignment, optimal_assignment)); + + // Test if pruning worked correctly by checking that we only have 3 leaves in + // the last node. + auto lastConditional = inc[X(3)]->conditional()->asMixture(); + EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); } /* ************************************************************************* */ From 6b792c0a4ca19b750a9c259c1dd84468786884ff Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 21 Aug 2022 12:56:59 -0400 Subject: [PATCH 69/74] add note about sumFrontals --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index af381de041..b57d495d8d 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -77,6 +77,19 @@ static GaussianMixtureFactor::Sum &addGaussian( } /* ************************************************************************ */ +/* Function to eliminate variables **under the following assumptions**: + * 1. When the ordering is fully continuous, and the graph only contains + * continuous and hybrid factors + * 2. When the ordering is fully discrete, and the graph only contains discrete + * factors + * + * Any usage outside of this is considered incorrect. + * + * \warning This function is not meant to be used with arbitrary hybrid factor + * graphs. For example, if there exists continuous parents, and one tries to + * eliminate a discrete variable (as specified in the ordering), the result will + * be INCORRECT and there will be NO error raised. + */ GaussianMixtureFactor::Sum sumFrontals( const HybridGaussianFactorGraph &factors) { // sum out frontals, this is the factor on the separator From 893c5f77f827b6c21af6b04b9c2e236010e7298e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 22 Aug 2022 08:29:32 -0400 Subject: [PATCH 70/74] cast to only HybridGaussianFactor --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 39 +++++++++------------- 1 file changed, 16 insertions(+), 23 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index b57d495d8d..c024c1255f 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -77,19 +77,6 @@ static GaussianMixtureFactor::Sum &addGaussian( } /* ************************************************************************ */ -/* Function to eliminate variables **under the following assumptions**: - * 1. When the ordering is fully continuous, and the graph only contains - * continuous and hybrid factors - * 2. When the ordering is fully discrete, and the graph only contains discrete - * factors - * - * Any usage outside of this is considered incorrect. - * - * \warning This function is not meant to be used with arbitrary hybrid factor - * graphs. For example, if there exists continuous parents, and one tries to - * eliminate a discrete variable (as specified in the ordering), the result will - * be INCORRECT and there will be NO error raised. - */ GaussianMixtureFactor::Sum sumFrontals( const HybridGaussianFactorGraph &factors) { // sum out frontals, this is the factor on the separator @@ -109,15 +96,8 @@ GaussianMixtureFactor::Sum sumFrontals( } } else if (f->isContinuous()) { - // Check if f is HybridConditional or HybridGaussianFactor. - if (auto hc = boost::dynamic_pointer_cast(f)) { - auto conditional = - boost::dynamic_pointer_cast(hc->inner()); - deferredFactors.push_back(conditional); - } else if (auto gf = boost::dynamic_pointer_cast(f) - ->inner()) { - deferredFactors.push_back(gf); - } + deferredFactors.push_back( + boost::dynamic_pointer_cast(f)->inner()); } else if (f->isDiscrete()) { // Don't do anything for discrete-only factors @@ -278,7 +258,20 @@ hybridElimination(const HybridGaussianFactorGraph &factors, return {boost::make_shared(conditional), factor}; } } -/* ************************************************************************ */ +/* ************************************************************************ + * Function to eliminate variables **under the following assumptions**: + * 1. When the ordering is fully continuous, and the graph only contains + * continuous and hybrid factors + * 2. When the ordering is fully discrete, and the graph only contains discrete + * factors + * + * Any usage outside of this is considered incorrect. + * + * \warning This function is not meant to be used with arbitrary hybrid factor + * graphs. For example, if there exists continuous parents, and one tries to + * eliminate a discrete variable (as specified in the ordering), the result will + * be INCORRECT and there will be NO error raised. + */ std::pair // EliminateHybrid(const HybridGaussianFactorGraph &factors, const Ordering &frontalKeys) { From 8fd6091a986b2661aaf61d935746cba559cd4ecd Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 22 Aug 2022 17:35:07 -0400 Subject: [PATCH 71/74] add new line --- gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index a3debb3a99..c10310b5ed 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -758,4 +758,4 @@ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } -/* ************************************************************************* */ \ No newline at end of file +/* ************************************************************************* */ From b07964b2aa8d52efa53379043851e3a439d89e0b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 22 Aug 2022 17:35:19 -0400 Subject: [PATCH 72/74] name file correctly in doc string --- gtsam/linear/GaussianISAM.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/linear/GaussianISAM.h b/gtsam/linear/GaussianISAM.h index b77b262408..98bf5df884 100644 --- a/gtsam/linear/GaussianISAM.h +++ b/gtsam/linear/GaussianISAM.h @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file SymbolicISAM.h + * @file GaussianISAM.h * @date July 29, 2013 * @author Frank Dellaert * @author Richard Roberts From aa52b39d9b584a5fb0f359217261842a5c8bb0b4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 23 Aug 2022 12:17:11 -0400 Subject: [PATCH 73/74] fix docstring --- gtsam/inference/FactorGraph.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/inference/FactorGraph.h b/gtsam/inference/FactorGraph.h index 101134c838..89fd09037c 100644 --- a/gtsam/inference/FactorGraph.h +++ b/gtsam/inference/FactorGraph.h @@ -116,7 +116,7 @@ class FactorGraph { using HasDerivedValueType = typename std::enable_if< std::is_base_of::value>::type; - /// Check if T has a value_type derived from FactorType. + /// Check if T has a pointer type derived from FactorType. template using HasDerivedElementType = typename std::enable_if::value>::type; From df64982d7227142370f97c8600a3608840c72aa3 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 23 Aug 2022 12:17:42 -0400 Subject: [PATCH 74/74] add extra tests --- gtsam/hybrid/tests/testHybridLookupDAG.cpp | 2 +- .../tests/testHybridNonlinearFactorGraph.cpp | 15 +++++++++++++++ gtsam/hybrid/tests/testHybridValues.cpp | 2 +- 3 files changed, 17 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridLookupDAG.cpp b/gtsam/hybrid/tests/testHybridLookupDAG.cpp index c472aa22f5..0ab012d104 100644 --- a/gtsam/hybrid/tests/testHybridLookupDAG.cpp +++ b/gtsam/hybrid/tests/testHybridLookupDAG.cpp @@ -269,4 +269,4 @@ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } -/* ************************************************************************* */ \ No newline at end of file +/* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index c10310b5ed..018b017a9d 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -188,7 +188,22 @@ TEST(HybridFactorGraph, PushBack) { ghfg = HybridGaussianFactorGraph(); ghfg.push_back(dcFactor); + HybridGaussianFactorGraph hgfg2; + hgfg2.push_back(ghfg.begin(), ghfg.end()); + EXPECT_LONGS_EQUAL(ghfg.size(), 1); + + HybridNonlinearFactorGraph hnfg; + NonlinearFactorGraph factors; + auto noise = noiseModel::Isotropic::Sigma(3, 1.0); + factors.emplace_shared>(0, Pose2(0, 0, 0), noise); + factors.emplace_shared>(1, Pose2(1, 0, 0), noise); + factors.emplace_shared>(2, Pose2(2, 0, 0), noise); + // TODO(Varun) This does not currently work. It should work once HybridFactor + // becomes a base class of NonlinearFactor. + // hnfg.push_back(factors.begin(), factors.end()); + + // EXPECT_LONGS_EQUAL(3, hnfg.size()); } /**************************************************************************** diff --git a/gtsam/hybrid/tests/testHybridValues.cpp b/gtsam/hybrid/tests/testHybridValues.cpp index 9581faaa09..6f510601d4 100644 --- a/gtsam/hybrid/tests/testHybridValues.cpp +++ b/gtsam/hybrid/tests/testHybridValues.cpp @@ -55,4 +55,4 @@ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } -/* ************************************************************************* */ \ No newline at end of file +/* ************************************************************************* */