From 5d0bc3191a86b9adb337f88122239c0eae62a973 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 4 Jan 2023 21:41:22 -0800 Subject: [PATCH 1/8] Fix some comments --- .../tests/testHybridGaussianFactorGraph.cpp | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index cc45718751..cab867715f 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -764,13 +764,10 @@ TEST(HybridGaussianFactorGraph, EliminateTiny22) { // regression EXPECT_DOUBLES_EQUAL(0.018253037966018862, expected_ratio, 1e-6); - // 3. Do sampling + // Test ratios for a number of independent samples: constexpr int num_samples = 100; for (size_t i = 0; i < num_samples; i++) { - // Sample from the bayes net HybridValues sample = bn.sample(&rng); - - // Check that the ratio is constant. EXPECT_DOUBLES_EQUAL(expected_ratio, compute_ratio(&sample), 1e-6); } } @@ -822,7 +819,7 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { // Create measurements consistent with moving right every time: const VectorValues measurements{ {Z(0), Vector1(0.0)}, {Z(1), Vector1(1.0)}, {Z(2), Vector1(2.0)}}; - const auto fg = bn.toFactorGraph(measurements); + const HybridGaussianFactorGraph fg = bn.toFactorGraph(measurements); // Create ordering that eliminates in time order, then discrete modes: Ordering ordering; @@ -835,11 +832,11 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { ordering.push_back(M(1)); ordering.push_back(M(2)); - // Test elimination result has correct size: - const auto posterior = fg.eliminateSequential(ordering); + // Do elimination: + const HybridBayesNet::shared_ptr posterior = fg.eliminateSequential(ordering); // GTSAM_PRINT(*posterior); - // Test elimination result has correct size: + // Test resulting posterior Bayes net has correct size: EXPECT_LONGS_EQUAL(8, posterior->size()); // TODO(dellaert): below is copy/pasta from above, refactor @@ -861,13 +858,10 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { // regression EXPECT_DOUBLES_EQUAL(0.0094526745785019472, expected_ratio, 1e-6); - // 3. Do sampling + // Test ratios for a number of independent samples: constexpr int num_samples = 100; for (size_t i = 0; i < num_samples; i++) { - // Sample from the bayes net HybridValues sample = bn.sample(&rng); - - // Check that the ratio is constant. EXPECT_DOUBLES_EQUAL(expected_ratio, compute_ratio(&sample), 1e-6); } } From 1d0f87255554d1f9ef120ff6f2ef84d10c721391 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 5 Jan 2023 11:52:24 -0800 Subject: [PATCH 2/8] Use const & --- gtsam/hybrid/HybridConditional.cpp | 6 +++--- gtsam/hybrid/HybridConditional.h | 7 ++++--- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/HybridConditional.cpp b/gtsam/hybrid/HybridConditional.cpp index 0bfcfec4da..f10a692aff 100644 --- a/gtsam/hybrid/HybridConditional.cpp +++ b/gtsam/hybrid/HybridConditional.cpp @@ -39,7 +39,7 @@ HybridConditional::HybridConditional(const KeyVector &continuousFrontals, /* ************************************************************************ */ HybridConditional::HybridConditional( - boost::shared_ptr continuousConditional) + const boost::shared_ptr &continuousConditional) : HybridConditional(continuousConditional->keys(), {}, continuousConditional->nrFrontals()) { inner_ = continuousConditional; @@ -47,7 +47,7 @@ HybridConditional::HybridConditional( /* ************************************************************************ */ HybridConditional::HybridConditional( - boost::shared_ptr discreteConditional) + const boost::shared_ptr &discreteConditional) : HybridConditional({}, discreteConditional->discreteKeys(), discreteConditional->nrFrontals()) { inner_ = discreteConditional; @@ -55,7 +55,7 @@ HybridConditional::HybridConditional( /* ************************************************************************ */ HybridConditional::HybridConditional( - boost::shared_ptr gaussianMixture) + const boost::shared_ptr &gaussianMixture) : BaseFactor(KeyVector(gaussianMixture->keys().begin(), gaussianMixture->keys().begin() + gaussianMixture->nrContinuous()), diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index 6c2f4a65bd..6199fe7b0b 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -111,7 +111,7 @@ class GTSAM_EXPORT HybridConditional * HybridConditional. */ HybridConditional( - boost::shared_ptr continuousConditional); + const boost::shared_ptr& continuousConditional); /** * @brief Construct a new Hybrid Conditional object @@ -119,7 +119,8 @@ class GTSAM_EXPORT HybridConditional * @param discreteConditional Conditional used to create the * HybridConditional. */ - HybridConditional(boost::shared_ptr discreteConditional); + HybridConditional( + const boost::shared_ptr& discreteConditional); /** * @brief Construct a new Hybrid Conditional object @@ -127,7 +128,7 @@ class GTSAM_EXPORT HybridConditional * @param gaussianMixture Gaussian Mixture Conditional used to create the * HybridConditional. */ - HybridConditional(boost::shared_ptr gaussianMixture); + HybridConditional(const boost::shared_ptr& gaussianMixture); /// @} /// @name Testable From 04c7d2edb2838b04c86682ceeb1b4f3b825067a0 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 5 Jan 2023 11:52:32 -0800 Subject: [PATCH 3/8] Simplified API --- gtsam/hybrid/HybridBayesNet.cpp | 18 +---------- gtsam/hybrid/HybridBayesNet.h | 55 ++++++++------------------------- 2 files changed, 14 insertions(+), 59 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index e01fcbdcf2..4c61085d7f 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -197,8 +197,7 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) { prunedGaussianMixture->prune(*decisionTree); // imperative :-( // Type-erase and add to the pruned Bayes Net fragment. - prunedBayesNetFragment.push_back( - boost::make_shared(prunedGaussianMixture)); + prunedBayesNetFragment.push_back(prunedGaussianMixture); } else { // Add the non-GaussianMixture conditional @@ -209,21 +208,6 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) { return prunedBayesNetFragment; } -/* ************************************************************************* */ -GaussianMixture::shared_ptr HybridBayesNet::atMixture(size_t i) const { - return at(i)->asMixture(); -} - -/* ************************************************************************* */ -GaussianConditional::shared_ptr HybridBayesNet::atGaussian(size_t i) const { - return at(i)->asGaussian(); -} - -/* ************************************************************************* */ -DiscreteConditional::shared_ptr HybridBayesNet::atDiscrete(size_t i) const { - return at(i)->asDiscrete(); -} - /* ************************************************************************* */ GaussianBayesNet HybridBayesNet::choose( const DiscreteValues &assignment) const { diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index de0a38271c..8d0671c9d6 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -63,55 +63,26 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { /// @{ /// Add HybridConditional to Bayes Net - using Base::add; + using Base::emplace_shared; - /// Add a Gaussian Mixture to the Bayes Net. - void addMixture(const GaussianMixture::shared_ptr &ptr) { - push_back(HybridConditional(ptr)); + /// Add a conditional directly using a pointer. + template + void emplace_back(Conditional *conditional) { + factors_.push_back(boost::make_shared( + boost::shared_ptr(conditional))); } - /// Add a Gaussian conditional to the Bayes Net. - void addGaussian(const GaussianConditional::shared_ptr &ptr) { - push_back(HybridConditional(ptr)); + /// Add a conditional directly using a shared_ptr. + void push_back(boost::shared_ptr conditional) { + factors_.push_back(conditional); } - /// Add a discrete conditional to the Bayes Net. - void addDiscrete(const DiscreteConditional::shared_ptr &ptr) { - push_back(HybridConditional(ptr)); + /// Add a conditional directly using implicit conversion. + void push_back(HybridConditional &&conditional) { + factors_.push_back( + boost::make_shared(std::move(conditional))); } - /// Add a Gaussian Mixture to the Bayes Net. - template - void emplaceMixture(T &&...args) { - push_back(HybridConditional( - boost::make_shared(std::forward(args)...))); - } - - /// Add a Gaussian conditional to the Bayes Net. - template - void emplaceGaussian(T &&...args) { - push_back(HybridConditional( - boost::make_shared(std::forward(args)...))); - } - - /// Add a discrete conditional to the Bayes Net. - template - void emplaceDiscrete(T &&...args) { - push_back(HybridConditional( - boost::make_shared(std::forward(args)...))); - } - - using Base::push_back; - - /// Get a specific Gaussian mixture by index `i`. - GaussianMixture::shared_ptr atMixture(size_t i) const; - - /// Get a specific Gaussian conditional by index `i`. - GaussianConditional::shared_ptr atGaussian(size_t i) const; - - /// Get a specific discrete conditional by index `i`. - DiscreteConditional::shared_ptr atDiscrete(size_t i) const; - /** * @brief Get the Gaussian Bayes Net which corresponds to a specific discrete * value assignment. From 53cb6a4e160e0ea7eddda751620e291b4a32ea0d Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 5 Jan 2023 11:52:48 -0800 Subject: [PATCH 4/8] Rvalues --- gtsam/hybrid/GaussianMixture.cpp | 8 ++++---- gtsam/hybrid/GaussianMixture.h | 7 +++---- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 12e88f81d8..a8d3ca502b 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -44,11 +44,11 @@ const GaussianMixture::Conditionals &GaussianMixture::conditionals() const { /* *******************************************************************************/ GaussianMixture::GaussianMixture( - const KeyVector &continuousFrontals, const KeyVector &continuousParents, - const DiscreteKeys &discreteParents, - const std::vector &conditionalsList) + KeyVector &&continuousFrontals, KeyVector &&continuousParents, + DiscreteKeys &&discreteParents, + std::vector &&conditionals) : GaussianMixture(continuousFrontals, continuousParents, discreteParents, - Conditionals(discreteParents, conditionalsList)) {} + Conditionals(discreteParents, conditionals)) {} /* *******************************************************************************/ GaussianFactorGraphTree GaussianMixture::add( diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index ba84b5ade3..8077059a4e 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -112,10 +112,9 @@ class GTSAM_EXPORT GaussianMixture * @param discreteParents Discrete parents variables * @param conditionals List of conditionals */ - GaussianMixture( - const KeyVector &continuousFrontals, const KeyVector &continuousParents, - const DiscreteKeys &discreteParents, - const std::vector &conditionals); + GaussianMixture(KeyVector &&continuousFrontals, KeyVector &&continuousParents, + DiscreteKeys &&discreteParents, + std::vector &&conditionals); /// @} /// @name Testable From 9e9172b17a87a1aa4af3a0435f6c6b393617c931 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 5 Jan 2023 11:52:56 -0800 Subject: [PATCH 5/8] Fix all tests --- gtsam/hybrid/HybridSmoother.cpp | 4 +- gtsam/hybrid/tests/TinyHybridExample.h | 16 +++---- gtsam/hybrid/tests/testHybridBayesNet.cpp | 38 +++++++++-------- gtsam/hybrid/tests/testHybridEstimation.cpp | 2 +- .../tests/testHybridGaussianFactorGraph.cpp | 42 +++++++++---------- 5 files changed, 52 insertions(+), 50 deletions(-) diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index ef77a24131..35dd5f88bc 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -46,7 +46,7 @@ void HybridSmoother::update(HybridGaussianFactorGraph graph, } // Add the partial bayes net to the posterior bayes net. - hybridBayesNet_.push_back(*bayesNetFragment); + hybridBayesNet_.add(*bayesNetFragment); } /* ************************************************************************* */ @@ -100,7 +100,7 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph, /* ************************************************************************* */ GaussianMixture::shared_ptr HybridSmoother::gaussianMixture( size_t index) const { - return hybridBayesNet_.atMixture(index); + return hybridBayesNet_.at(index)->asMixture(); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index a427d20429..c34d55bf02 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -43,22 +43,22 @@ inline HybridBayesNet createHybridBayesNet(int num_measurements = 1, // Create Gaussian mixture z_i = x0 + noise for each measurement. for (int i = 0; i < num_measurements; i++) { const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode; - GaussianMixture gm({Z(i)}, {X(0)}, {mode_i}, - {GaussianConditional::sharedMeanAndStddev( - Z(i), I_1x1, X(0), Z_1x1, 0.5), - GaussianConditional::sharedMeanAndStddev( - Z(i), I_1x1, X(0), Z_1x1, 3)}); - bayesNet.emplaceMixture(gm); // copy :-( + bayesNet.emplace_back( + new GaussianMixture({Z(i)}, {X(0)}, {mode_i}, + {GaussianConditional::sharedMeanAndStddev( + Z(i), I_1x1, X(0), Z_1x1, 0.5), + GaussianConditional::sharedMeanAndStddev( + Z(i), I_1x1, X(0), Z_1x1, 3)})); } // Create prior on X(0). - bayesNet.addGaussian( + bayesNet.push_back( GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5)); // Add prior on mode. const size_t nrModes = manyModes ? num_measurements : 1; for (int i = 0; i < nrModes; i++) { - bayesNet.emplaceDiscrete(DiscreteKey{M(i), 2}, "4/6"); + bayesNet.emplace_back(new DiscreteConditional({M(i), 2}, "4/6")); } return bayesNet; } diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index ef552bd92a..7d1b9130d9 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -42,21 +42,21 @@ static const DiscreteKey Asia(asiaKey, 2); // Test creation of a pure discrete Bayes net. TEST(HybridBayesNet, Creation) { HybridBayesNet bayesNet; - bayesNet.emplaceDiscrete(Asia, "99/1"); + bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1")); DiscreteConditional expected(Asia, "99/1"); - CHECK(bayesNet.atDiscrete(0)); - EXPECT(assert_equal(expected, *bayesNet.atDiscrete(0))); + CHECK(bayesNet.at(0)->asDiscrete()); + EXPECT(assert_equal(expected, *bayesNet.at(0)->asDiscrete())); } /* ****************************************************************************/ // Test adding a Bayes net to another one. TEST(HybridBayesNet, Add) { HybridBayesNet bayesNet; - bayesNet.emplaceDiscrete(Asia, "99/1"); + bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1")); HybridBayesNet other; - other.push_back(bayesNet); + other.add(bayesNet); EXPECT(bayesNet.equals(other)); } @@ -64,7 +64,7 @@ TEST(HybridBayesNet, Add) { // Test evaluate for a pure discrete Bayes net P(Asia). TEST(HybridBayesNet, EvaluatePureDiscrete) { HybridBayesNet bayesNet; - bayesNet.emplaceDiscrete(Asia, "99/1"); + bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1")); HybridValues values; values.insert(asiaKey, 0); EXPECT_DOUBLES_EQUAL(0.99, bayesNet.evaluate(values), 1e-9); @@ -80,7 +80,7 @@ TEST(HybridBayesNet, Tiny) { /* ****************************************************************************/ // Test evaluate for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia). TEST(HybridBayesNet, evaluateHybrid) { - const auto continuousConditional = GaussianConditional::FromMeanAndStddev( + const auto continuousConditional = GaussianConditional::sharedMeanAndStddev( X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0); const SharedDiagonal model0 = noiseModel::Diagonal::Sigmas(Vector1(2.0)), @@ -93,10 +93,11 @@ TEST(HybridBayesNet, evaluateHybrid) { // Create hybrid Bayes net. HybridBayesNet bayesNet; - bayesNet.emplaceGaussian(continuousConditional); - GaussianMixture gm({X(1)}, {}, {Asia}, {conditional0, conditional1}); - bayesNet.emplaceMixture(gm); // copy :-( - bayesNet.emplaceDiscrete(Asia, "99/1"); + bayesNet.push_back(GaussianConditional::sharedMeanAndStddev( + X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0)); + bayesNet.emplace_back( + new GaussianMixture({X(1)}, {}, {Asia}, {conditional0, conditional1})); + bayesNet.emplace_back(new DiscreteConditional(Asia, "99/1")); // Create values at which to evaluate. HybridValues values; @@ -105,7 +106,7 @@ TEST(HybridBayesNet, evaluateHybrid) { values.insert(X(1), Vector1(1)); const double conditionalProbability = - continuousConditional.evaluate(values.continuous()); + continuousConditional->evaluate(values.continuous()); const double mixtureProbability = conditional0->evaluate(values.continuous()); EXPECT_DOUBLES_EQUAL(conditionalProbability * mixtureProbability * 0.99, bayesNet.evaluate(values), 1e-9); @@ -136,16 +137,16 @@ TEST(HybridBayesNet, Choose) { EXPECT_LONGS_EQUAL(4, gbn.size()); EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( - hybridBayesNet->atMixture(0)))(assignment), + hybridBayesNet->at(0)->asMixture()))(assignment), *gbn.at(0))); EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( - hybridBayesNet->atMixture(1)))(assignment), + hybridBayesNet->at(1)->asMixture()))(assignment), *gbn.at(1))); EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( - hybridBayesNet->atMixture(2)))(assignment), + hybridBayesNet->at(2)->asMixture()))(assignment), *gbn.at(2))); EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( - hybridBayesNet->atMixture(3)))(assignment), + hybridBayesNet->at(3)->asMixture()))(assignment), *gbn.at(3))); } @@ -247,11 +248,12 @@ TEST(HybridBayesNet, Error) { double total_error = 0; for (size_t idx = 0; idx < hybridBayesNet->size(); idx++) { if (hybridBayesNet->at(idx)->isHybrid()) { - double error = hybridBayesNet->atMixture(idx)->error( + double error = hybridBayesNet->at(idx)->asMixture()->error( {delta.continuous(), discrete_values}); total_error += error; } else if (hybridBayesNet->at(idx)->isContinuous()) { - double error = hybridBayesNet->atGaussian(idx)->error(delta.continuous()); + double error = + hybridBayesNet->at(idx)->asGaussian()->error(delta.continuous()); total_error += error; } } diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 4a53a31412..84f686c599 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -310,7 +310,7 @@ TEST(HybridEstimation, Probability) { for (auto discrete_conditional : *discreteBayesNet) { bayesNet->add(discrete_conditional); } - auto discreteConditional = discreteBayesNet->atDiscrete(0); + auto discreteConditional = discreteBayesNet->at(0)->asDiscrete(); HybridValues hybrid_values = bayesNet->optimize(); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index cab867715f..422cdf64ec 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -677,11 +677,11 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { X(0), Vector1(14.1421), I_1x1 * 2.82843), conditional1 = boost::make_shared( X(0), Vector1(10.1379), I_1x1 * 2.02759); - GaussianMixture gm({X(0)}, {}, {mode}, {conditional0, conditional1}); - expectedBayesNet.emplaceMixture(gm); // copy :-( + expectedBayesNet.emplace_back( + new GaussianMixture({X(0)}, {}, {mode}, {conditional0, conditional1})); // Add prior on mode. - expectedBayesNet.emplaceDiscrete(mode, "74/26"); + expectedBayesNet.emplace_back(new DiscreteConditional(mode, "74/26")); // Test elimination Ordering ordering; @@ -712,11 +712,11 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) { X(0), Vector1(17.3205), I_1x1 * 3.4641), conditional1 = boost::make_shared( X(0), Vector1(10.274), I_1x1 * 2.0548); - GaussianMixture gm({X(0)}, {}, {mode}, {conditional0, conditional1}); - expectedBayesNet.emplaceMixture(gm); // copy :-( + expectedBayesNet.emplace_back( + new GaussianMixture({X(0)}, {}, {mode}, {conditional0, conditional1})); // Add prior on mode. - expectedBayesNet.emplaceDiscrete(mode, "23/77"); + expectedBayesNet.emplace_back(new DiscreteConditional(mode, "23/77")); // Test elimination Ordering ordering; @@ -784,34 +784,34 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { for (size_t t : {0, 1, 2}) { // Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t): const auto noise_mode_t = DiscreteKey{N(t), 2}; - GaussianMixture gm({Z(t)}, {X(t)}, {noise_mode_t}, - {GaussianConditional::sharedMeanAndStddev( - Z(t), I_1x1, X(t), Z_1x1, 0.5), - GaussianConditional::sharedMeanAndStddev( - Z(t), I_1x1, X(t), Z_1x1, 3.0)}); - bn.emplaceMixture(gm); // copy :-( + bn.emplace_back( + new GaussianMixture({Z(t)}, {X(t)}, {noise_mode_t}, + {GaussianConditional::sharedMeanAndStddev( + Z(t), I_1x1, X(t), Z_1x1, 0.5), + GaussianConditional::sharedMeanAndStddev( + Z(t), I_1x1, X(t), Z_1x1, 3.0)})); // Create prior on discrete mode M(t): - bn.emplaceDiscrete(noise_mode_t, "20/80"); + bn.emplace_back(new DiscreteConditional(noise_mode_t, "20/80")); } // Add motion models: for (size_t t : {2, 1}) { // Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1): const auto motion_model_t = DiscreteKey{M(t), 2}; - GaussianMixture gm({X(t)}, {X(t - 1)}, {motion_model_t}, - {GaussianConditional::sharedMeanAndStddev( - X(t), I_1x1, X(t - 1), Z_1x1, 0.2), - GaussianConditional::sharedMeanAndStddev( - X(t), I_1x1, X(t - 1), I_1x1, 0.2)}); - bn.emplaceMixture(gm); // copy :-( + bn.emplace_back( + new GaussianMixture({X(t)}, {X(t - 1)}, {motion_model_t}, + {GaussianConditional::sharedMeanAndStddev( + X(t), I_1x1, X(t - 1), Z_1x1, 0.2), + GaussianConditional::sharedMeanAndStddev( + X(t), I_1x1, X(t - 1), I_1x1, 0.2)})); // Create prior on motion model M(t): - bn.emplaceDiscrete(motion_model_t, "40/60"); + bn.emplace_back(new DiscreteConditional(motion_model_t, "40/60")); } // Create Gaussian prior on continuous X(0) using sharedMeanAndStddev: - bn.addGaussian(GaussianConditional::sharedMeanAndStddev(X(0), Z_1x1, 0.1)); + bn.push_back(GaussianConditional::sharedMeanAndStddev(X(0), Z_1x1, 0.1)); // Make sure we an sample from the Bayes net: EXPECT_LONGS_EQUAL(6, bn.sample().continuous().size()); From dc34fba9c8f2379f23909824c660efd07d6bbe1e Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 5 Jan 2023 12:27:08 -0800 Subject: [PATCH 6/8] remove unnecessary cast --- gtsam/hybrid/tests/testHybridBayesNet.cpp | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 7d1b9130d9..0f0a85516d 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -136,17 +136,13 @@ TEST(HybridBayesNet, Choose) { EXPECT_LONGS_EQUAL(4, gbn.size()); - EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( - hybridBayesNet->at(0)->asMixture()))(assignment), + EXPECT(assert_equal(*(*hybridBayesNet->at(0)->asMixture())(assignment), *gbn.at(0))); - EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( - hybridBayesNet->at(1)->asMixture()))(assignment), + EXPECT(assert_equal(*(*hybridBayesNet->at(1)->asMixture())(assignment), *gbn.at(1))); - EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( - hybridBayesNet->at(2)->asMixture()))(assignment), + EXPECT(assert_equal(*(*hybridBayesNet->at(2)->asMixture())(assignment), *gbn.at(2))); - EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( - hybridBayesNet->at(3)->asMixture()))(assignment), + EXPECT(assert_equal(*(*hybridBayesNet->at(3)->asMixture())(assignment), *gbn.at(3))); } From 8b8cde42307f02245518b5a312d10363657e3e6a Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 5 Jan 2023 12:58:21 -0800 Subject: [PATCH 7/8] Added lvalue version back in --- gtsam/hybrid/GaussianMixture.cpp | 8 ++++++++ gtsam/hybrid/GaussianMixture.h | 13 +++++++++++++ 2 files changed, 21 insertions(+) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index a8d3ca502b..bac1285e12 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -50,6 +50,14 @@ GaussianMixture::GaussianMixture( : GaussianMixture(continuousFrontals, continuousParents, discreteParents, Conditionals(discreteParents, conditionals)) {} +/* *******************************************************************************/ +GaussianMixture::GaussianMixture( + const KeyVector &continuousFrontals, const KeyVector &continuousParents, + const DiscreteKeys &discreteParents, + const std::vector &conditionals) + : GaussianMixture(continuousFrontals, continuousParents, discreteParents, + Conditionals(discreteParents, conditionals)) {} + /* *******************************************************************************/ GaussianFactorGraphTree GaussianMixture::add( const GaussianFactorGraphTree &sum) const { diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index 8077059a4e..79f5f8fa7a 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -116,6 +116,19 @@ class GTSAM_EXPORT GaussianMixture DiscreteKeys &&discreteParents, std::vector &&conditionals); + /** + * @brief Make a Gaussian Mixture from a list of Gaussian conditionals + * + * @param continuousFrontals The continuous frontal variables + * @param continuousParents The continuous parent variables + * @param discreteParents Discrete parents variables + * @param conditionals List of conditionals + */ + GaussianMixture( + const KeyVector &continuousFrontals, const KeyVector &continuousParents, + const DiscreteKeys &discreteParents, + const std::vector &conditionals); + /// @} /// @name Testable /// @{ From d49bcce7808560e32e1b2725e54200792184c508 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Thu, 5 Jan 2023 12:58:29 -0800 Subject: [PATCH 8/8] Fix python wrapper --- gtsam/hybrid/hybrid.i | 26 +++----------------- python/gtsam/tests/test_HybridBayesNet.py | 16 ++++++------ python/gtsam/tests/test_HybridFactorGraph.py | 14 +++++------ 3 files changed, 18 insertions(+), 38 deletions(-) diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 87aa3bc149..e877e5ee7c 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -135,29 +135,9 @@ class HybridBayesTree { #include class HybridBayesNet { HybridBayesNet(); - void add(const gtsam::HybridConditional& s); - void addMixture(const gtsam::GaussianMixture* s); - void addGaussian(const gtsam::GaussianConditional* s); - void addDiscrete(const gtsam::DiscreteConditional* s); - - void emplaceMixture(const gtsam::GaussianMixture& s); - void emplaceMixture(const gtsam::KeyVector& continuousFrontals, - const gtsam::KeyVector& continuousParents, - const gtsam::DiscreteKeys& discreteParents, - const std::vector& - conditionalsList); - void emplaceGaussian(const gtsam::GaussianConditional& s); - void emplaceDiscrete(const gtsam::DiscreteConditional& s); - void emplaceDiscrete(const gtsam::DiscreteKey& key, string spec); - void emplaceDiscrete(const gtsam::DiscreteKey& key, - const gtsam::DiscreteKeys& parents, string spec); - void emplaceDiscrete(const gtsam::DiscreteKey& key, - const std::vector& parents, - string spec); - - gtsam::GaussianMixture* atMixture(size_t i) const; - gtsam::GaussianConditional* atGaussian(size_t i) const; - gtsam::DiscreteConditional* atDiscrete(size_t i) const; + void push_back(const gtsam::GaussianMixture* s); + void push_back(const gtsam::GaussianConditional* s); + void push_back(const gtsam::DiscreteConditional* s); bool empty() const; size_t size() const; diff --git a/python/gtsam/tests/test_HybridBayesNet.py b/python/gtsam/tests/test_HybridBayesNet.py index af89a4ba7c..75a2e9f8b6 100644 --- a/python/gtsam/tests/test_HybridBayesNet.py +++ b/python/gtsam/tests/test_HybridBayesNet.py @@ -16,13 +16,13 @@ from gtsam.symbol_shorthand import A, X from gtsam.utils.test_case import GtsamTestCase -import gtsam -from gtsam import (DiscreteKeys, GaussianConditional, GaussianMixture, +from gtsam import (DiscreteKeys, GaussianMixture, DiscreteConditional, GaussianConditional, GaussianMixture, HybridBayesNet, HybridValues, noiseModel) class TestHybridBayesNet(GtsamTestCase): """Unit tests for HybridValues.""" + def test_evaluate(self): """Test evaluate for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia).""" asiaKey = A(0) @@ -40,15 +40,15 @@ def test_evaluate(self): # Create the conditionals conditional0 = GaussianConditional(X(1), [5], I_1x1, model0) conditional1 = GaussianConditional(X(1), [2], I_1x1, model1) - dkeys = DiscreteKeys() - dkeys.push_back(Asia) - gm = GaussianMixture([X(1)], [], dkeys, [conditional0, conditional1]) + discrete_keys = DiscreteKeys() + discrete_keys.push_back(Asia) # Create hybrid Bayes net. bayesNet = HybridBayesNet() - bayesNet.addGaussian(gc) - bayesNet.addMixture(gm) - bayesNet.emplaceDiscrete(Asia, "99/1") + bayesNet.push_back(gc) + bayesNet.push_back(GaussianMixture( + [X(1)], [], discrete_keys, [conditional0, conditional1])) + bayesNet.push_back(DiscreteConditional(Asia, "99/1")) # Create values at which to evaluate. values = HybridValues() diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index f83b954425..956a6f5720 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -108,16 +108,16 @@ def tiny(num_measurements: int = 1, prior_mean: float = 5.0, I_1x1, X(0), [0], sigma=3) - bayesNet.emplaceMixture([Z(i)], [X(0)], keys, - [conditional0, conditional1]) + bayesNet.push_back(GaussianMixture([Z(i)], [X(0)], keys, + [conditional0, conditional1])) # Create prior on X(0). prior_on_x0 = GaussianConditional.FromMeanAndStddev( X(0), [prior_mean], prior_sigma) - bayesNet.addGaussian(prior_on_x0) + bayesNet.push_back(prior_on_x0) # Add prior on mode. - bayesNet.emplaceDiscrete(mode, "4/6") + bayesNet.push_back(DiscreteConditional(mode, "4/6")) return bayesNet @@ -163,11 +163,11 @@ def factor_graph_from_bayes_net(cls, bayesNet: HybridBayesNet, fg = HybridGaussianFactorGraph() num_measurements = bayesNet.size() - 2 for i in range(num_measurements): - conditional = bayesNet.atMixture(i) + conditional = bayesNet.at(i).asMixture() factor = conditional.likelihood(cls.measurements(sample, [i])) fg.push_back(factor) - fg.push_back(bayesNet.atGaussian(num_measurements)) - fg.push_back(bayesNet.atDiscrete(num_measurements+1)) + fg.push_back(bayesNet.at(num_measurements).asGaussian()) + fg.push_back(bayesNet.at(num_measurements+1).asDiscrete()) return fg @classmethod