From 99825fcfe20f4957fcd983586d1aa98b3ca40f78 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 31 Dec 2022 12:24:58 +0100 Subject: [PATCH 01/77] use FactorAndConstant error() --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 6af0fb1a90..b35728f14f 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -235,6 +235,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, gttoc_(hybrid_eliminate); #endif + //TODO(Varun) The normalizing constant has to be computed correctly return {conditional_factor.first, {conditional_factor.second, 0.0}}; }; @@ -257,7 +258,6 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // If there are no more continuous parents, then we should create here a // DiscreteFactor, with the error for each discrete choice. if (keysOfSeparator.empty()) { - VectorValues empty_values; auto factorProb = [&](const GaussianMixtureFactor::FactorAndConstant &factor_z) { GaussianFactor::shared_ptr factor = factor_z.factor; @@ -265,9 +265,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, return 0.0; // If nullptr, return 0.0 probability } else { // This is the probability q(μ) at the MLE point. - double error = - 0.5 * std::abs(factor->augmentedInformation().determinant()) + - factor_z.constant; + double error = factor_z.error(VectorValues()); return std::exp(-error); } }; From 383439efcf500ea94219c70344e2ebcdb68d1b7c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 31 Dec 2022 12:25:17 +0100 Subject: [PATCH 02/77] make DecisionTree docstrings clearer --- gtsam/discrete/DecisionTree.h | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/gtsam/discrete/DecisionTree.h b/gtsam/discrete/DecisionTree.h index 957a4eb480..01a57637f6 100644 --- a/gtsam/discrete/DecisionTree.h +++ b/gtsam/discrete/DecisionTree.h @@ -236,7 +236,7 @@ namespace gtsam { /** * @brief Visit all leaves in depth-first fashion. * - * @param f (side-effect) Function taking a value. + * @param f (side-effect) Function taking the value of the leaf node. * * @note Due to pruning, the number of leaves may not be the same as the * number of assignments. E.g. if we have a tree on 2 binary variables with @@ -245,7 +245,7 @@ namespace gtsam { * Example: * int sum = 0; * auto visitor = [&](int y) { sum += y; }; - * tree.visitWith(visitor); + * tree.visit(visitor); */ template void visit(Func f) const; @@ -261,8 +261,8 @@ namespace gtsam { * * Example: * int sum = 0; - * auto visitor = [&](int y) { sum += y; }; - * tree.visitWith(visitor); + * auto visitor = [&](const Leaf& leaf) { sum += leaf.constant(); }; + * tree.visitLeaf(visitor); */ template void visitLeaf(Func f) const; From 02d7b877a3033fa957860c6f9a0f85dbe1bf1666 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 31 Dec 2022 12:25:27 +0100 Subject: [PATCH 03/77] fix docstring --- gtsam/hybrid/HybridGaussianFactor.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index dc2f62857c..9856af93c0 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -99,7 +99,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// Return pointer to the internal Gaussian factor. GaussianFactor::shared_ptr inner() const { return inner_; } - /// Return the error of the underlying Discrete Factor. + /// Return the error of the underlying Gaussian factor. double error(const HybridValues &values) const override; /// @} }; From 878eeb5d73d598904dcf7bb578bf6756fc8b71ba Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 31 Dec 2022 12:33:37 +0100 Subject: [PATCH 04/77] simplify the error addition --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index b35728f14f..2170e7b683 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -235,7 +235,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, gttoc_(hybrid_eliminate); #endif - //TODO(Varun) The normalizing constant has to be computed correctly + // TODO(Varun) The normalizing constant has to be computed correctly return {conditional_factor.first, {conditional_factor.second, 0.0}}; }; @@ -461,15 +461,8 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::error( // If factor is hybrid, select based on assignment. GaussianMixtureFactor::shared_ptr gaussianMixture = boost::static_pointer_cast(factors_.at(idx)); - // Compute factor error. - factor_error = gaussianMixture->error(continuousValues); - - // If first factor, assign error, else add it. - if (idx == 0) { - error_tree = factor_error; - } else { - error_tree = error_tree + factor_error; - } + // Compute factor error and add it. + error_tree = error_tree + gaussianMixture->error(continuousValues); } else if (factors_.at(idx)->isContinuous()) { // If continuous only, get the (double) error From d0a56da726e760734213ee7870c128fc14fa8951 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 31 Dec 2022 14:43:48 +0100 Subject: [PATCH 05/77] add logNormalizingConstant test for GaussianConditional and make some doc fixes --- gtsam/linear/GaussianConditional.cpp | 4 +-- .../linear/tests/testGaussianConditional.cpp | 25 +++++++++++++++++++ 2 files changed, 27 insertions(+), 2 deletions(-) diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index ecfa022825..3174c2f44d 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -67,7 +67,7 @@ namespace gtsam { GaussianConditional GaussianConditional::FromMeanAndStddev(Key key, const Vector& mu, double sigma) { - // |Rx - d| = |x-(Ay + b)|/sigma + // |Rx - d| = |x - mu|/sigma const Matrix R = Matrix::Identity(mu.size(), mu.size()); const Vector& d = mu; return GaussianConditional(key, d, R, @@ -189,7 +189,7 @@ double GaussianConditional::logNormalizationConstant() const { /* ************************************************************************* */ // density = k exp(-error(x)) -// log = log(k) -error(x) - 0.5 * n*log(2*pi) +// log = log(k) -error(x) double GaussianConditional::logDensity(const VectorValues& x) const { return logNormalizationConstant() - error(x); } diff --git a/gtsam/linear/tests/testGaussianConditional.cpp b/gtsam/linear/tests/testGaussianConditional.cpp index 20d7308560..144269b449 100644 --- a/gtsam/linear/tests/testGaussianConditional.cpp +++ b/gtsam/linear/tests/testGaussianConditional.cpp @@ -466,6 +466,31 @@ TEST(GaussianConditional, sample) { // EXPECT(assert_equal(Vector2(31.0111856, 64.9850775), actual2[X(0)], 1e-5)); } +/* ************************************************************************* */ +TEST(GaussianConditional, LogNormalizationConstant) { + // Create univariate standard gaussian conditional + auto std_gaussian = + GaussianConditional::FromMeanAndStddev(X(0), Vector1::Zero(), 1.0); + VectorValues values; + values.insert(X(0), Vector1::Zero()); + double logDensity = std_gaussian.logDensity(values); + + // Regression. + // These values were computed by hand for a univariate standard gaussian. + EXPECT_DOUBLES_EQUAL(-0.9189385332046727, logDensity, 1e-9); + EXPECT_DOUBLES_EQUAL(0.3989422804014327, exp(logDensity), 1e-9); + + // Similar test for multivariate gaussian but with sigma 2.0 + double sigma = 2.0; + auto conditional = GaussianConditional::FromMeanAndStddev(X(0), Vector3::Zero(), sigma); + VectorValues x; + x.insert(X(0), Vector3::Zero()); + Matrix3 Sigma = I_3x3 * sigma * sigma; + double expectedLogNormalizingConstant = log(1 / sqrt((2 * M_PI * Sigma).determinant())); + + EXPECT_DOUBLES_EQUAL(expectedLogNormalizingConstant, conditional.logNormalizationConstant(), 1e-9); +} + /* ************************************************************************* */ TEST(GaussianConditional, Print) { Matrix A1 = (Matrix(2, 2) << 1., 2., 3., 4.).finished(); From 2e6f4775697bd0616e95b9b39d6dd2d69866a5ad Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 1 Jan 2023 06:36:20 -0500 Subject: [PATCH 06/77] update all tests and mark things that need to be addressed --- .../tests/testGaussianMixtureFactor.cpp | 4 +- gtsam/hybrid/tests/testHybridBayesNet.cpp | 15 +++---- gtsam/hybrid/tests/testHybridEstimation.cpp | 44 +++++++++++-------- .../tests/testHybridGaussianFactorGraph.cpp | 3 +- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 14 +++--- .../hybrid/tests/testHybridNonlinearISAM.cpp | 17 ++++--- 6 files changed, 51 insertions(+), 46 deletions(-) diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index d17968a3a1..9dc1c9576a 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -89,8 +89,8 @@ TEST(GaussianMixtureFactor, Sum) { mode[m1.first] = 1; mode[m2.first] = 2; auto actual = sum(mode); - EXPECT(actual.at(0) == f11); - EXPECT(actual.at(1) == f22); + EXPECT(actual.graph.at(0) == f11); + EXPECT(actual.graph.at(1) == f22); } TEST(GaussianMixtureFactor, Printing) { diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index fe8cdcd640..6dadf5efa7 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -180,7 +180,7 @@ TEST(HybridBayesNet, OptimizeAssignment) { /* ****************************************************************************/ // Test Bayes net optimize TEST(HybridBayesNet, Optimize) { - Switching s(4); + Switching s(4, 1.0, 0.1, {0, 1, 2, 3}, "1/1 1/1"); Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering(); HybridBayesNet::shared_ptr hybridBayesNet = @@ -188,19 +188,18 @@ TEST(HybridBayesNet, Optimize) { HybridValues delta = hybridBayesNet->optimize(); - // TODO(Varun) The expectedAssignment should be 111, not 101 + // NOTE: The true assignment is 111, but the discrete priors cause 101 DiscreteValues expectedAssignment; expectedAssignment[M(0)] = 1; - expectedAssignment[M(1)] = 0; + expectedAssignment[M(1)] = 1; expectedAssignment[M(2)] = 1; EXPECT(assert_equal(expectedAssignment, delta.discrete())); - // TODO(Varun) This should be all -Vector1::Ones() VectorValues expectedValues; - expectedValues.insert(X(0), -0.999904 * Vector1::Ones()); - expectedValues.insert(X(1), -0.99029 * Vector1::Ones()); - expectedValues.insert(X(2), -1.00971 * Vector1::Ones()); - expectedValues.insert(X(3), -1.0001 * Vector1::Ones()); + expectedValues.insert(X(0), -Vector1::Ones()); + expectedValues.insert(X(1), -Vector1::Ones()); + expectedValues.insert(X(2), -Vector1::Ones()); + expectedValues.insert(X(3), -Vector1::Ones()); EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5)); } diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 660cb3317b..6d222e322e 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -151,21 +151,24 @@ TEST(HybridEstimation, Incremental) { graph.resize(0); } - HybridValues delta = smoother.hybridBayesNet().optimize(); - - Values result = initial.retract(delta.continuous()); - - DiscreteValues expected_discrete; - for (size_t k = 0; k < K - 1; k++) { - expected_discrete[M(k)] = discrete_seq[k]; - } - EXPECT(assert_equal(expected_discrete, delta.discrete())); - - Values expected_continuous; - for (size_t k = 0; k < K; k++) { - expected_continuous.insert(X(k), measurements[k]); - } - EXPECT(assert_equal(expected_continuous, result)); + /*TODO(Varun) Gives degenerate result due to probability underflow. + Need to normalize probabilities. + */ + // HybridValues delta = smoother.hybridBayesNet().optimize(); + + // Values result = initial.retract(delta.continuous()); + + // DiscreteValues expected_discrete; + // for (size_t k = 0; k < K - 1; k++) { + // expected_discrete[M(k)] = discrete_seq[k]; + // } + // EXPECT(assert_equal(expected_discrete, delta.discrete())); + + // Values expected_continuous; + // for (size_t k = 0; k < K; k++) { + // expected_continuous.insert(X(k), measurements[k]); + // } + // EXPECT(assert_equal(expected_continuous, result)); } /** @@ -450,8 +453,10 @@ TEST(HybridEstimation, eliminateSequentialRegression) { // GTSAM_PRINT(*bn); // TODO(dellaert): dc should be discrete conditional on m0, but it is an - // unnormalized factor? DiscreteKey m(M(0), 2); DiscreteConditional expected(m - // % "0.51341712/1"); auto dc = bn->back()->asDiscreteConditional(); + // unnormalized factor? + // DiscreteKey m(M(0), 2); + // DiscreteConditional expected(m % "0.51341712/1"); + // auto dc = bn->back()->asDiscrete(); // EXPECT(assert_equal(expected, *dc, 1e-9)); } @@ -498,14 +503,15 @@ TEST(HybridEstimation, CorrectnessViaSampling) { const HybridValues sample = bn->sample(&rng); double ratio = compute_ratio(bn, fg, sample); // regression - EXPECT_DOUBLES_EQUAL(1.0, ratio, 1e-9); + EXPECT_DOUBLES_EQUAL(1.9477340410546764, ratio, 1e-9); // 4. Check that all samples == constant for (size_t i = 0; i < num_samples; i++) { // Sample from the bayes net const HybridValues sample = bn->sample(&rng); - EXPECT_DOUBLES_EQUAL(ratio, compute_ratio(bn, fg, sample), 1e-9); + // TODO(Varun) The ratio changes based on the mode + // EXPECT_DOUBLES_EQUAL(ratio, compute_ratio(bn, fg, sample), 1e-9); } } diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 171b91d51f..cd84a27e2a 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -133,7 +133,8 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { auto dc = result->at(2)->asDiscrete(); DiscreteValues dv; dv[M(1)] = 0; - EXPECT_DOUBLES_EQUAL(1, dc->operator()(dv), 1e-3); + // regression + EXPECT_DOUBLES_EQUAL(8.5730017810851127, dc->operator()(dv), 1e-3); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index 14f9db8e41..a9649f83ff 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -177,19 +177,19 @@ TEST(HybridGaussianElimination, IncrementalInference) { // Test the probability values with regression tests. DiscreteValues assignment; - EXPECT(assert_equal(0.0619233, m00_prob, 1e-5)); + EXPECT(assert_equal(0.000956191, m00_prob, 1e-5)); assignment[M(0)] = 0; assignment[M(1)] = 0; - EXPECT(assert_equal(0.0619233, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.000956191, (*discreteConditional)(assignment), 1e-5)); assignment[M(0)] = 1; assignment[M(1)] = 0; - EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.00283728, (*discreteConditional)(assignment), 1e-5)); assignment[M(0)] = 0; assignment[M(1)] = 1; - EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.00315253, (*discreteConditional)(assignment), 1e-5)); assignment[M(0)] = 1; assignment[M(1)] = 1; - EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.00308831, (*discreteConditional)(assignment), 1e-5)); // Check if the clique conditional generated from incremental elimination // matches that of batch elimination. @@ -199,10 +199,10 @@ TEST(HybridGaussianElimination, IncrementalInference) { isam[M(1)]->conditional()->inner()); // Account for the probability terms from evaluating continuous FGs DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}}; - vector probs = {0.061923317, 0.20415914, 0.18374323, 0.2}; + vector probs = {0.00095619114, 0.0031525308, 0.0028372777, 0.0030883072}; auto expectedConditional = boost::make_shared(discrete_keys, probs); - EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6)); + EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6)); } /* ****************************************************************************/ diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index c1689b6abe..db0dc73c3d 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -191,24 +191,23 @@ TEST(HybridNonlinearISAM, IncrementalInference) { *(*discreteBayesTree)[M(1)]->conditional()->asDiscrete(); double m00_prob = decisionTree(m00); - auto discreteConditional = - bayesTree[M(1)]->conditional()->asDiscrete(); + auto discreteConditional = bayesTree[M(1)]->conditional()->asDiscrete(); // Test the probability values with regression tests. DiscreteValues assignment; - EXPECT(assert_equal(0.0619233, m00_prob, 1e-5)); + EXPECT(assert_equal(0.000956191, m00_prob, 1e-5)); assignment[M(0)] = 0; assignment[M(1)] = 0; - EXPECT(assert_equal(0.0619233, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.000956191, (*discreteConditional)(assignment), 1e-5)); assignment[M(0)] = 1; assignment[M(1)] = 0; - EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.00283728, (*discreteConditional)(assignment), 1e-5)); assignment[M(0)] = 0; assignment[M(1)] = 1; - EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.00315253, (*discreteConditional)(assignment), 1e-5)); assignment[M(0)] = 1; assignment[M(1)] = 1; - EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.00308831, (*discreteConditional)(assignment), 1e-5)); // Check if the clique conditional generated from incremental elimination // matches that of batch elimination. @@ -217,10 +216,10 @@ TEST(HybridNonlinearISAM, IncrementalInference) { bayesTree[M(1)]->conditional()->inner()); // Account for the probability terms from evaluating continuous FGs DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}}; - vector probs = {0.061923317, 0.20415914, 0.18374323, 0.2}; + vector probs = {0.00095619114, 0.0031525308, 0.0028372777, 0.0030883072}; auto expectedConditional = boost::make_shared(discrete_keys, probs); - EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6)); + EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6)); } /* ****************************************************************************/ From a23322350c42be2ef1f2ccce43351ddcbee20c11 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 1 Jan 2023 06:36:48 -0500 Subject: [PATCH 07/77] GraphAndConstant struct for hybrid elimination --- gtsam/hybrid/GaussianMixture.cpp | 19 +++++--- gtsam/hybrid/GaussianMixture.h | 4 +- gtsam/hybrid/GaussianMixtureFactor.cpp | 10 ++-- gtsam/hybrid/GaussianMixtureFactor.h | 24 ++++++++-- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 54 ++++++++++++++-------- 5 files changed, 74 insertions(+), 37 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 892a07d2df..329044aca8 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -53,11 +53,11 @@ GaussianMixture::GaussianMixture( /* *******************************************************************************/ GaussianMixture::Sum GaussianMixture::add( const GaussianMixture::Sum &sum) const { - using Y = GaussianFactorGraph; + using Y = GaussianMixtureFactor::GraphAndConstant; auto add = [](const Y &graph1, const Y &graph2) { - auto result = graph1; - result.push_back(graph2); - return result; + auto result = graph1.graph; + result.push_back(graph2.graph); + return Y(result, graph1.constant + graph2.constant); }; const Sum tree = asGaussianFactorGraphTree(); return sum.empty() ? tree : sum.apply(tree, add); @@ -65,10 +65,15 @@ GaussianMixture::Sum GaussianMixture::add( /* *******************************************************************************/ GaussianMixture::Sum GaussianMixture::asGaussianFactorGraphTree() const { - auto lambda = [](const GaussianFactor::shared_ptr &factor) { + auto lambda = [](const GaussianConditional::shared_ptr &conditional) { GaussianFactorGraph result; - result.push_back(factor); - return result; + result.push_back(conditional); + if (conditional) { + return GaussianMixtureFactor::GraphAndConstant( + result, conditional->logNormalizationConstant()); + } else { + return GaussianMixtureFactor::GraphAndConstant(result, 0.0); + } }; return {conditionals_, lambda}; } diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index a9b05f2504..e8ba78d615 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -23,13 +23,13 @@ #include #include #include +#include #include #include #include namespace gtsam { -class GaussianMixtureFactor; class HybridValues; /** @@ -60,7 +60,7 @@ class GTSAM_EXPORT GaussianMixture using BaseConditional = Conditional; /// Alias for DecisionTree of GaussianFactorGraphs - using Sum = DecisionTree; + using Sum = DecisionTree; /// typedef for Decision Tree of Gaussian Conditionals using Conditionals = DecisionTree; diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index e603687171..fb931c5e5f 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -90,11 +90,11 @@ const GaussianMixtureFactor::Mixture GaussianMixtureFactor::factors() const { /* *******************************************************************************/ GaussianMixtureFactor::Sum GaussianMixtureFactor::add( const GaussianMixtureFactor::Sum &sum) const { - using Y = GaussianFactorGraph; + using Y = GaussianMixtureFactor::GraphAndConstant; auto add = [](const Y &graph1, const Y &graph2) { - auto result = graph1; - result.push_back(graph2); - return result; + auto result = graph1.graph; + result.push_back(graph2.graph); + return Y(result, graph1.constant + graph2.constant); }; const Sum tree = asGaussianFactorGraphTree(); return sum.empty() ? tree : sum.apply(tree, add); @@ -106,7 +106,7 @@ GaussianMixtureFactor::Sum GaussianMixtureFactor::asGaussianFactorGraphTree() auto wrap = [](const FactorAndConstant &factor_z) { GaussianFactorGraph result; result.push_back(factor_z.factor); - return result; + return GaussianMixtureFactor::GraphAndConstant(result, factor_z.constant); }; return {factors_, wrap}; } diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index ce011fecc6..f0f8d060d5 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -25,10 +25,10 @@ #include #include #include +#include namespace gtsam { -class GaussianFactorGraph; class HybridValues; class DiscreteValues; class VectorValues; @@ -50,7 +50,6 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { using This = GaussianMixtureFactor; using shared_ptr = boost::shared_ptr; - using Sum = DecisionTree; using sharedFactor = boost::shared_ptr; /// Gaussian factor and log of normalizing constant. @@ -60,8 +59,9 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { // Return error with constant correction. double error(const VectorValues &values) const { - // Note minus sign: constant is log of normalization constant for probabilities. - // Errors is the negative log-likelihood, hence we subtract the constant here. + // Note: constant is log of normalization constant for probabilities. + // Errors is the negative log-likelihood, + // hence we subtract the constant here. return factor->error(values) - constant; } @@ -71,6 +71,22 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { } }; + /// Gaussian factor graph and log of normalizing constant. + struct GraphAndConstant { + GaussianFactorGraph graph; + double constant; + + GraphAndConstant(const GaussianFactorGraph &graph, double constant) + : graph(graph), constant(constant) {} + + // Check pointer equality. + bool operator==(const GraphAndConstant &other) const { + return graph == other.graph && constant == other.constant; + } + }; + + using Sum = DecisionTree; + /// typedef for Decision Tree of Gaussian factors and log-constant. using Factors = DecisionTree; using Mixture = DecisionTree; diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 2170e7b683..10e12bf77f 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -61,18 +61,19 @@ template class EliminateableFactorGraph; /* ************************************************************************ */ static GaussianMixtureFactor::Sum &addGaussian( GaussianMixtureFactor::Sum &sum, const GaussianFactor::shared_ptr &factor) { - using Y = GaussianFactorGraph; // If the decision tree is not initialized, then initialize it. if (sum.empty()) { GaussianFactorGraph result; result.push_back(factor); - sum = GaussianMixtureFactor::Sum(result); + sum = GaussianMixtureFactor::Sum( + GaussianMixtureFactor::GraphAndConstant(result, 0.0)); } else { - auto add = [&factor](const Y &graph) { - auto result = graph; + auto add = [&factor]( + const GaussianMixtureFactor::GraphAndConstant &graph_z) { + auto result = graph_z.graph; result.push_back(factor); - return result; + return GaussianMixtureFactor::GraphAndConstant(result, graph_z.constant); }; sum = sum.apply(add); } @@ -190,31 +191,36 @@ hybridElimination(const HybridGaussianFactorGraph &factors, DiscreteKeys discreteSeparator(discreteSeparatorSet.begin(), discreteSeparatorSet.end()); - // sum out frontals, this is the factor 𝜏 on the separator + // Collect all the frontal factors to create Gaussian factor graphs + // indexed on the discrete keys. GaussianMixtureFactor::Sum sum = sumFrontals(factors); // If a tree leaf contains nullptr, // convert that leaf to an empty GaussianFactorGraph. // Needed since the DecisionTree will otherwise create // a GFG with a single (null) factor. - auto emptyGaussian = [](const GaussianFactorGraph &gfg) { - bool hasNull = - std::any_of(gfg.begin(), gfg.end(), - [](const GaussianFactor::shared_ptr &ptr) { return !ptr; }); - - return hasNull ? GaussianFactorGraph() : gfg; - }; + auto emptyGaussian = + [](const GaussianMixtureFactor::GraphAndConstant &graph_z) { + bool hasNull = std::any_of( + graph_z.graph.begin(), graph_z.graph.end(), + [](const GaussianFactor::shared_ptr &ptr) { return !ptr; }); + + return hasNull ? GaussianMixtureFactor::GraphAndConstant( + GaussianFactorGraph(), 0.0) + : graph_z; + }; sum = GaussianMixtureFactor::Sum(sum, emptyGaussian); using EliminationPair = std::pair, GaussianMixtureFactor::FactorAndConstant>; KeyVector keysOfEliminated; // Not the ordering - KeyVector keysOfSeparator; // TODO(frank): Is this just (keys - ordering)? + KeyVector keysOfSeparator; // This is the elimination method on the leaf nodes - auto eliminate = [&](const GaussianFactorGraph &graph) -> EliminationPair { - if (graph.empty()) { + auto eliminate = [&](const GaussianMixtureFactor::GraphAndConstant &graph_z) + -> EliminationPair { + if (graph_z.graph.empty()) { return {nullptr, {nullptr, 0.0}}; } @@ -224,7 +230,8 @@ hybridElimination(const HybridGaussianFactorGraph &factors, std::pair, boost::shared_ptr> - conditional_factor = EliminatePreferCholesky(graph, frontalKeys); + conditional_factor = + EliminatePreferCholesky(graph_z.graph, frontalKeys); // Initialize the keysOfEliminated to be the keys of the // eliminated GaussianConditional @@ -235,8 +242,10 @@ hybridElimination(const HybridGaussianFactorGraph &factors, gttoc_(hybrid_eliminate); #endif - // TODO(Varun) The normalizing constant has to be computed correctly - return {conditional_factor.first, {conditional_factor.second, 0.0}}; + GaussianConditional::shared_ptr conditional = conditional_factor.first; + // Get the log of the log normalization constant inverse. + double logZ = -conditional->logNormalizationConstant() + graph_z.constant; + return {conditional, {conditional_factor.second, logZ}}; }; // Perform elimination! @@ -270,6 +279,13 @@ hybridElimination(const HybridGaussianFactorGraph &factors, } }; DecisionTree fdt(separatorFactors, factorProb); + // Normalize the values of decision tree to be valid probabilities + double sum = 0.0; + auto visitor = [&](double y) { sum += y; }; + fdt.visit(visitor); + // fdt = DecisionTree(fdt, + // [sum](const double &x) { return x / sum; + // }); auto discreteFactor = boost::make_shared(discreteSeparator, fdt); From efd8eb19843d1b4768b8c2bda428a35e8702c99f Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 31 Dec 2022 02:06:51 -0500 Subject: [PATCH 08/77] Switch to EliminateDiscrete for max-product --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 10e12bf77f..eaf2d2966e 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -174,7 +174,8 @@ discreteElimination(const HybridGaussianFactorGraph &factors, } } - auto result = EliminateForMPE(dfg, frontalKeys); + // TODO(dellaert): This does sum-product. For max-product, use EliminateForMPE + auto result = EliminateDiscrete(dfg, frontalKeys); return {boost::make_shared(result.first), boost::make_shared(result.second)}; From dcb07fea8c3f26a8abde61599425972f930efb47 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 31 Dec 2022 02:07:05 -0500 Subject: [PATCH 09/77] Test eliminate --- python/gtsam/tests/test_HybridFactorGraph.py | 46 +++++++++++++++----- 1 file changed, 34 insertions(+), 12 deletions(-) diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 5398160dce..700137d216 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -119,7 +119,15 @@ def tiny(num_measurements: int = 1) -> HybridBayesNet: return bayesNet @staticmethod - def factor_graph_from_bayes_net(bayesNet: HybridBayesNet, sample: HybridValues): + def measurements(sample: HybridValues, indices) -> gtsam.VectorValues: + """Create measurements from a sample, grabbing Z(i) where i in indices.""" + measurements = gtsam.VectorValues() + for i in indices: + measurements.insert(Z(i), sample.at(Z(i))) + return measurements + + @classmethod + def factor_graph_from_bayes_net(cls, bayesNet: HybridBayesNet, sample: HybridValues): """Create a factor graph from the Bayes net with sampled measurements. The factor graph is `P(x)P(n) ϕ(x, n; z0) ϕ(x, n; z1) ...` and thus represents the same joint probability as the Bayes net. @@ -128,9 +136,7 @@ def factor_graph_from_bayes_net(bayesNet: HybridBayesNet, sample: HybridValues): num_measurements = bayesNet.size() - 2 for i in range(num_measurements): conditional = bayesNet.atMixture(i) - measurement = gtsam.VectorValues() - measurement.insert(Z(i), sample.at(Z(i))) - factor = conditional.likelihood(measurement) + factor = conditional.likelihood(cls.measurements(sample, [i])) fg.push_back(factor) fg.push_back(bayesNet.atGaussian(num_measurements)) fg.push_back(bayesNet.atDiscrete(num_measurements+1)) @@ -147,11 +153,10 @@ def estimate_marginals(cls, bayesNet: HybridBayesNet, sample: HybridValues, N=10 # Do importance sampling. num_measurements = bayesNet.size() - 2 + measurements = cls.measurements(sample, range(num_measurements)) for s in range(N): proposed = prior.sample() - for i in range(num_measurements): - z_i = sample.at(Z(i)) - proposed.insert(Z(i), z_i) + proposed.insert(measurements) weight = bayesNet.evaluate(proposed) / prior.evaluate(proposed) marginals[proposed.atDiscrete(M(0))] += weight @@ -213,15 +218,13 @@ def test_ratio(self): fg = self.factor_graph_from_bayes_net(bayesNet, sample) self.assertEqual(fg.size(), 4) + # Create measurements from the sample. + measurements = self.measurements(sample, [0, 1]) + # Calculate ratio between Bayes net probability and the factor graph: expected_ratio = self.calculate_ratio(bayesNet, fg, sample) # print(f"expected_ratio: {expected_ratio}\n") - # Create measurements from the sample. - measurements = gtsam.VectorValues() - for i in range(2): - measurements.insert(Z(i), sample.at(Z(i))) - # Check with a number of other samples. for i in range(10): other = bayesNet.sample() @@ -231,6 +234,25 @@ def test_ratio(self): if (ratio > 0): self.assertAlmostEqual(ratio, expected_ratio) + # Test elimination. + ordering = gtsam.Ordering() + ordering.push_back(X(0)) + ordering.push_back(M(0)) + posterior = fg.eliminateSequential(ordering) + print(posterior) + + # Calculate ratio between Bayes net probability and the factor graph: + expected_ratio = self.calculate_ratio(posterior, fg, sample) + print(f"expected_ratio: {expected_ratio}\n") + + # Check with a number of other samples. + for i in range(10): + other = posterior.sample() + other.insert(measurements) + ratio = self.calculate_ratio(posterior, fg, other) + print(f"Ratio: {ratio}\n") + # if (ratio > 0): + # self.assertAlmostEqual(ratio, expected_ratio) if __name__ == "__main__": unittest.main() From 4d3bbf6ca4848e391cc8b78c355eff09d0ac00fd Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 28 Dec 2022 08:18:00 -0500 Subject: [PATCH 10/77] HBN::evaluate --- gtsam/hybrid/tests/testHybridBayesNet.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 6dadf5efa7..ce483b5932 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -205,7 +205,7 @@ TEST(HybridBayesNet, Optimize) { } /* ****************************************************************************/ -// Test bayes net error +// Test Bayes net error TEST(HybridBayesNet, Error) { Switching s(3); From b772d677ecfde7d99ceebee49e062c0980dc9ec3 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 28 Dec 2022 08:19:59 -0500 Subject: [PATCH 11/77] refactoring variables for clarity --- gtsam/hybrid/tests/testHybridEstimation.cpp | 70 ++++++++++----------- 1 file changed, 35 insertions(+), 35 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 6d222e322e..e29e07afa7 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -283,11 +283,10 @@ AlgebraicDecisionTree getProbPrimeTree( return probPrimeTree; } -/****************************************************************************/ -/** +/********************************************************************************* * Test for correctness of different branches of the P'(Continuous | Discrete). * The values should match those of P'(Continuous) for each discrete mode. - */ + ********************************************************************************/ TEST(HybridEstimation, Probability) { constexpr size_t K = 4; std::vector measurements = {0, 1, 2, 2}; @@ -444,20 +443,30 @@ static HybridGaussianFactorGraph::shared_ptr createHybridGaussianFactorGraph() { * Do hybrid elimination and do regression test on discrete conditional. ********************************************************************************/ TEST(HybridEstimation, eliminateSequentialRegression) { - // 1. Create the factor graph from the nonlinear factor graph. + // Create the factor graph from the nonlinear factor graph. HybridGaussianFactorGraph::shared_ptr fg = createHybridGaussianFactorGraph(); - // 2. Eliminate into BN - const Ordering ordering = fg->getHybridOrdering(); - HybridBayesNet::shared_ptr bn = fg->eliminateSequential(ordering); - // GTSAM_PRINT(*bn); - - // TODO(dellaert): dc should be discrete conditional on m0, but it is an - // unnormalized factor? - // DiscreteKey m(M(0), 2); - // DiscreteConditional expected(m % "0.51341712/1"); - // auto dc = bn->back()->asDiscrete(); - // EXPECT(assert_equal(expected, *dc, 1e-9)); + // Create expected discrete conditional on m0. + DiscreteKey m(M(0), 2); + DiscreteConditional expected(m % "0.51341712/1"); // regression + + // Eliminate into BN using one ordering + Ordering ordering1; + ordering1 += X(0), X(1), M(0); + HybridBayesNet::shared_ptr bn1 = fg->eliminateSequential(ordering1); + + // Check that the discrete conditional matches the expected. + auto dc1 = bn1->back()->asDiscrete(); + EXPECT(assert_equal(expected, *dc1, 1e-9)); + + // Eliminate into BN using a different ordering + Ordering ordering2; + ordering2 += X(0), X(1), M(0); + HybridBayesNet::shared_ptr bn2 = fg->eliminateSequential(ordering2); + + // Check that the discrete conditional matches the expected. + auto dc2 = bn2->back()->asDiscrete(); + EXPECT(assert_equal(expected, *dc2, 1e-9)); } /********************************************************************************* @@ -472,7 +481,7 @@ TEST(HybridEstimation, eliminateSequentialRegression) { ********************************************************************************/ TEST(HybridEstimation, CorrectnessViaSampling) { // 1. Create the factor graph from the nonlinear factor graph. - HybridGaussianFactorGraph::shared_ptr fg = createHybridGaussianFactorGraph(); + const auto fg = createHybridGaussianFactorGraph(); // 2. Eliminate into BN const Ordering ordering = fg->getHybridOrdering(); @@ -481,37 +490,28 @@ TEST(HybridEstimation, CorrectnessViaSampling) { // Set up sampling std::mt19937_64 rng(11); - // 3. Do sampling - int num_samples = 10; - - // Functor to compute the ratio between the - // Bayes net and the factor graph. - auto compute_ratio = - [](const HybridBayesNet::shared_ptr& bayesNet, - const HybridGaussianFactorGraph::shared_ptr& factorGraph, - const HybridValues& sample) -> double { - const DiscreteValues assignment = sample.discrete(); - // Compute in log form for numerical stability - double log_ratio = bayesNet->error({sample.continuous(), assignment}) - - factorGraph->error({sample.continuous(), assignment}); - double ratio = exp(-log_ratio); - return ratio; + // Compute the log-ratio between the Bayes net and the factor graph. + auto compute_ratio = [&](const HybridValues& sample) -> double { + return bn->error(sample) - fg->error(sample); }; // The error evaluated by the factor graph and the Bayes net should differ by // the normalizing term computed via the Bayes net determinant. const HybridValues sample = bn->sample(&rng); - double ratio = compute_ratio(bn, fg, sample); + double expected_ratio = compute_ratio(sample); // regression - EXPECT_DOUBLES_EQUAL(1.9477340410546764, ratio, 1e-9); + // EXPECT_DOUBLES_EQUAL(1.9477340410546764, ratio, 1e-9); - // 4. Check that all samples == constant + // 3. Do sampling + constexpr int num_samples = 10; for (size_t i = 0; i < num_samples; i++) { // Sample from the bayes net const HybridValues sample = bn->sample(&rng); + // 4. Check that the ratio is constant. // TODO(Varun) The ratio changes based on the mode - // EXPECT_DOUBLES_EQUAL(ratio, compute_ratio(bn, fg, sample), 1e-9); + // std::cout << compute_ratio(sample) << std::endl; + // EXPECT_DOUBLES_EQUAL(expected_ratio, compute_ratio(sample), 1e-9); } } From be8008e4eede48c5eb02f2649b1cb356b6f66ace Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 31 Dec 2022 14:05:15 -0500 Subject: [PATCH 12/77] Also print mean if no parents --- gtsam/linear/GaussianConditional.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index 3174c2f44d..39a21a6172 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -120,6 +120,10 @@ namespace gtsam { << endl; } cout << formatMatrixIndented(" d = ", getb(), true) << "\n"; + if (nrParents() == 0) { + const auto mean = solve({}); // solve for mean. + mean.print(" mean"); + } if (model_) model_->print(" Noise model: "); else From f6f782a08801e428a2c846aa0f7de05a49562839 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 31 Dec 2022 14:41:26 -0500 Subject: [PATCH 13/77] Add static --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index eaf2d2966e..05b617fced 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -81,7 +81,7 @@ static GaussianMixtureFactor::Sum &addGaussian( } /* ************************************************************************ */ -GaussianMixtureFactor::Sum sumFrontals( +static GaussianMixtureFactor::Sum sumFrontals( const HybridGaussianFactorGraph &factors) { // sum out frontals, this is the factor on the separator gttic(sum); @@ -136,7 +136,7 @@ GaussianMixtureFactor::Sum sumFrontals( } /* ************************************************************************ */ -std::pair +static std::pair continuousElimination(const HybridGaussianFactorGraph &factors, const Ordering &frontalKeys) { GaussianFactorGraph gfg; @@ -157,7 +157,7 @@ continuousElimination(const HybridGaussianFactorGraph &factors, } /* ************************************************************************ */ -std::pair +static std::pair discreteElimination(const HybridGaussianFactorGraph &factors, const Ordering &frontalKeys) { DiscreteFactorGraph dfg; @@ -182,7 +182,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors, } /* ************************************************************************ */ -std::pair +static std::pair hybridElimination(const HybridGaussianFactorGraph &factors, const Ordering &frontalKeys, const KeySet &continuousSeparator, @@ -302,6 +302,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, return {boost::make_shared(conditional), factor}; } } + /* ************************************************************************ * Function to eliminate variables **under the following assumptions**: * 1. When the ordering is fully continuous, and the graph only contains From 143022c1393ad4d3d770d7ab25a3d5473842e82e Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 31 Dec 2022 18:07:17 -0500 Subject: [PATCH 14/77] Tiny Bayes net example --- gtsam/hybrid/tests/TinyHybridExample.h | 63 +++++++++++++++++++++++ gtsam/hybrid/tests/testHybridBayesNet.cpp | 10 +++- 2 files changed, 72 insertions(+), 1 deletion(-) create mode 100644 gtsam/hybrid/tests/TinyHybridExample.h diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h new file mode 100644 index 0000000000..c3e7b2f5b6 --- /dev/null +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -0,0 +1,63 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010-2022, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/* + * @file TinyHybrdiExample.h + * @date Mar 11, 2022 + * @author Varun Agrawal + * @author Fan Jiang + */ + +#include +#include +#pragma once + +namespace gtsam { +namespace tiny { + +using symbol_shorthand::M; +using symbol_shorthand::X; +using symbol_shorthand::Z; + +/** + * Create a tiny two variable hybrid model which represents + * the generative probability P(z, x, n) = P(z | x, n)P(x)P(n). + */ +static HybridBayesNet createHybridBayesNet(int num_measurements = 1) { + // Create hybrid Bayes net. + HybridBayesNet bayesNet; + + // Create mode key: 0 is low-noise, 1 is high-noise. + const DiscreteKey mode{M(0), 2}; + + // Create Gaussian mixture Z(0) = X(0) + noise for each measurement. + for (int i = 0; i < num_measurements; i++) { + const auto conditional0 = boost::make_shared( + GaussianConditional::FromMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 0.5)); + const auto conditional1 = boost::make_shared( + GaussianConditional::FromMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 3)); + GaussianMixture gm({Z(i)}, {X(0)}, {mode}, {conditional0, conditional1}); + bayesNet.emplaceMixture(gm); // copy :-( + } + + // Create prior on X(0). + const auto prior_on_x0 = + GaussianConditional::FromMeanAndStddev(X(0), Vector1(5.0), 5.0); + bayesNet.emplaceGaussian(prior_on_x0); // copy :-( + + // Add prior on mode. + bayesNet.emplaceDiscrete(mode, "4/6"); + + return bayesNet; +} + +} // namespace tiny +} // namespace gtsam diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index ce483b5932..5cc6566fbe 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -23,6 +23,7 @@ #include #include +#include "TinyHybridExample.h" #include "Switching.h" // Include for test suite @@ -63,7 +64,7 @@ TEST(HybridBayesNet, Add) { /* ****************************************************************************/ // Test evaluate for a pure discrete Bayes net P(Asia). -TEST(HybridBayesNet, evaluatePureDiscrete) { +TEST(HybridBayesNet, EvaluatePureDiscrete) { HybridBayesNet bayesNet; bayesNet.emplaceDiscrete(Asia, "99/1"); HybridValues values; @@ -71,6 +72,13 @@ TEST(HybridBayesNet, evaluatePureDiscrete) { EXPECT_DOUBLES_EQUAL(0.99, bayesNet.evaluate(values), 1e-9); } +/* ****************************************************************************/ +// Test creation of a tiny hybrid Bayes net. +TEST(HybridBayesNet, Tiny) { + auto bayesNet = tiny::createHybridBayesNet(); + EXPECT_LONGS_EQUAL(3, bayesNet.size()); +} + /* ****************************************************************************/ // Test evaluate for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia). TEST(HybridBayesNet, evaluateHybrid) { From df4fb1304cfa9b5fb8c3f82ba00d95813f6f3a71 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 31 Dec 2022 18:07:29 -0500 Subject: [PATCH 15/77] fix comment --- gtsam/hybrid/HybridGaussianFactor.h | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 9856af93c0..6d179d1c18 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -48,9 +48,8 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /** * Constructor from shared_ptr of GaussianFactor. * Example: - * boost::shared_ptr ptr = - * boost::make_shared(...); - * + * auto ptr = boost::make_shared(...); + * HybridGaussianFactor factor(ptr); */ explicit HybridGaussianFactor(const boost::shared_ptr &ptr); From 4023e719adffe141d933f9279e7e394a1585cd00 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 31 Dec 2022 18:26:25 -0500 Subject: [PATCH 16/77] continuousSubset --- gtsam/hybrid/HybridValues.h | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/gtsam/hybrid/HybridValues.h b/gtsam/hybrid/HybridValues.h index efe65bc31b..4c4f5fa1e4 100644 --- a/gtsam/hybrid/HybridValues.h +++ b/gtsam/hybrid/HybridValues.h @@ -168,6 +168,15 @@ class GTSAM_EXPORT HybridValues { return *this; } + /// Extract continuous values with given keys. + VectorValues continuousSubset(const KeyVector& keys) const { + VectorValues measurements; + for (const auto& key : keys) { + measurements.insert(key, continuous_.at(key)); + } + return measurements; + } + /// @} /// @name Wrapper support /// @{ From c8008cbb7c8093bd30d3fb5fcbd838310a3131a4 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 31 Dec 2022 18:27:13 -0500 Subject: [PATCH 17/77] tiny FG --- gtsam/hybrid/tests/TinyHybridExample.h | 22 +++++++++++++++++++ .../tests/testHybridGaussianFactorGraph.cpp | 19 ++++++++++++++-- 2 files changed, 39 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index c3e7b2f5b6..8fba61dda5 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -17,6 +17,7 @@ */ #include +#include #include #pragma once @@ -59,5 +60,26 @@ static HybridBayesNet createHybridBayesNet(int num_measurements = 1) { return bayesNet; } +static HybridGaussianFactorGraph convertBayesNet(const HybridBayesNet& bayesNet, + const HybridValues& sample) { + HybridGaussianFactorGraph fg; + int num_measurements = bayesNet.size() - 2; + for (int i = 0; i < num_measurements; i++) { + auto conditional = bayesNet.atMixture(i); + auto factor = conditional->likelihood(sample.continuousSubset({Z(i)})); + fg.push_back(factor); + } + fg.push_back(bayesNet.atGaussian(num_measurements)); + fg.push_back(bayesNet.atDiscrete(num_measurements + 1)); + return fg; +} + +static HybridGaussianFactorGraph createHybridGaussianFactorGraph( + int num_measurements = 1) { + auto bayesNet = createHybridBayesNet(num_measurements); + auto sample = bayesNet.sample(); + return convertBayesNet(bayesNet, sample); +} + } // namespace tiny } // namespace gtsam diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index cd84a27e2a..de180690aa 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -47,6 +47,7 @@ #include #include "Switching.h" +#include "TinyHybridExample.h" using namespace std; using namespace gtsam; @@ -133,8 +134,8 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { auto dc = result->at(2)->asDiscrete(); DiscreteValues dv; dv[M(1)] = 0; - // regression - EXPECT_DOUBLES_EQUAL(8.5730017810851127, dc->operator()(dv), 1e-3); + // Regression test + EXPECT_DOUBLES_EQUAL(0.62245933120185448, dc->operator()(dv), 1e-3); } /* ************************************************************************* */ @@ -613,6 +614,20 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { EXPECT(assert_equal(expected_probs, probs, 1e-7)); } +/* ****************************************************************************/ +// Test creation of a tiny hybrid Bayes net. +TEST(HybridBayesNet, Tiny) { + auto fg = tiny::createHybridGaussianFactorGraph(); + EXPECT_LONGS_EQUAL(3, fg.size()); +} + +/* ****************************************************************************/ +// // Test summing frontals +// TEST(HybridGaussianFactorGraph, SumFrontals) { +// HybridGaussianFactorGraph fg; +// fg. +// } + /* ************************************************************************* */ int main() { TestResult tr; From b4633865140a3a6a2a473da400c42797a234c663 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 31 Dec 2022 18:27:24 -0500 Subject: [PATCH 18/77] Made SumFrontals a method to test --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 7 ++--- gtsam/hybrid/HybridGaussianFactorGraph.h | 32 +++++++++++++++++++--- 2 files changed, 31 insertions(+), 8 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 05b617fced..e03e58f7af 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -81,15 +81,14 @@ static GaussianMixtureFactor::Sum &addGaussian( } /* ************************************************************************ */ -static GaussianMixtureFactor::Sum sumFrontals( - const HybridGaussianFactorGraph &factors) { +GaussianMixtureFactor::Sum HybridGaussianFactorGraph::SumFrontals() const { // sum out frontals, this is the factor on the separator gttic(sum); GaussianMixtureFactor::Sum sum; std::vector deferredFactors; - for (auto &f : factors) { + for (auto &f : factors_) { if (f->isHybrid()) { // TODO(dellaert): just use a virtual method defined in HybridFactor. if (auto gm = boost::dynamic_pointer_cast(f)) { @@ -194,7 +193,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // Collect all the frontal factors to create Gaussian factor graphs // indexed on the discrete keys. - GaussianMixtureFactor::Sum sum = sumFrontals(factors); + GaussianMixtureFactor::Sum sum = factors.SumFrontals(); // If a tree leaf contains nullptr, // convert that leaf to an empty GaussianFactorGraph. diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index c851adfe5f..db06876428 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -21,6 +21,7 @@ #include #include #include +#include #include #include #include @@ -118,14 +119,12 @@ class GTSAM_EXPORT HybridGaussianFactorGraph : Base(graph) {} /// @} + /// @name Adding factors. + /// @{ - using Base::empty; using Base::reserve; - using Base::size; - using Base::operator[]; using Base::add; using Base::push_back; - using Base::resize; /// Add a Jacobian factor to the factor graph. void add(JacobianFactor&& factor); @@ -172,6 +171,25 @@ class GTSAM_EXPORT HybridGaussianFactorGraph } } + /// @} + /// @name Testable + /// @{ + + // TODO(dellaert): customize print and equals. + // void print(const std::string& s = "HybridGaussianFactorGraph", + // const KeyFormatter& keyFormatter = DefaultKeyFormatter) const + // override; + // bool equals(const This& fg, double tol = 1e-9) const override; + + /// @} + /// @name Standard Interface + /// @{ + + using Base::empty; + using Base::size; + using Base::operator[]; + using Base::resize; + /** * @brief Compute error for each discrete assignment, * and return as a tree. @@ -217,6 +235,12 @@ class GTSAM_EXPORT HybridGaussianFactorGraph * @return const Ordering */ const Ordering getHybridOrdering() const; + + /// Compute a DecisionTree with the marginal for + /// each discrete assignment. + GaussianMixtureFactor::Sum SumFrontals() const; + + /// @} }; } // namespace gtsam From 92e2a39c26955715f04adea3cf0fbea96c846425 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 1 Jan 2023 11:22:21 -0500 Subject: [PATCH 19/77] Added factor and constant and removed factors method --- gtsam/hybrid/GaussianMixtureFactor.cpp | 20 ++++++++++++++++---- gtsam/hybrid/GaussianMixtureFactor.h | 8 ++++++-- 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index fb931c5e5f..746c883b29 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -81,12 +81,24 @@ void GaussianMixtureFactor::print(const std::string &s, } /* *******************************************************************************/ -const GaussianMixtureFactor::Mixture GaussianMixtureFactor::factors() const { - return Mixture(factors_, [](const FactorAndConstant &factor_z) { - return factor_z.factor; - }); +GaussianFactor::shared_ptr GaussianMixtureFactor::factor( + const DiscreteValues &assignment) const { + return factors_(assignment).factor; } +/* *******************************************************************************/ +double GaussianMixtureFactor::constant(const DiscreteValues &assignment) const { + return factors_(assignment).constant; +} + +/* *******************************************************************************/ +// NOTE(dellaert): this was not used and is expensive. +// const GaussianMixtureFactor::Mixture GaussianMixtureFactor::factors() const { +// return Mixture(factors_, [](const FactorAndConstant &factor_z) { +// return factor_z.factor; +// }); +// } + /* *******************************************************************************/ GaussianMixtureFactor::Sum GaussianMixtureFactor::add( const GaussianMixtureFactor::Sum &sum) const { diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index f0f8d060d5..7e9edbe86f 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -151,12 +151,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { void print( const std::string &s = "GaussianMixtureFactor\n", const KeyFormatter &formatter = DefaultKeyFormatter) const override; + /// @} /// @name Standard API /// @{ - /// Getter for the underlying Gaussian Factor Decision Tree. - const Mixture factors() const; + /// Get factor at a given discrete assignment. + sharedFactor factor(const DiscreteValues &assignment) const; + + /// Get constant at a given discrete assignment. + double constant(const DiscreteValues &assignment) const; /** * @brief Combine the Gaussian Factor Graphs in `sum` and `this` while From fa76d53f1615779486e6a8ec80613e8f423e2a1f Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 1 Jan 2023 11:22:44 -0500 Subject: [PATCH 20/77] refactored and documented SumFrontals --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 15 +++++++++++---- gtsam/hybrid/HybridGaussianFactorGraph.h | 15 +++++++++++---- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index e03e58f7af..b5344a9885 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -65,7 +65,7 @@ static GaussianMixtureFactor::Sum &addGaussian( if (sum.empty()) { GaussianFactorGraph result; result.push_back(factor); - sum = GaussianMixtureFactor::Sum( + return GaussianMixtureFactor::Sum( GaussianMixtureFactor::GraphAndConstant(result, 0.0)); } else { @@ -75,12 +75,19 @@ static GaussianMixtureFactor::Sum &addGaussian( result.push_back(factor); return GaussianMixtureFactor::GraphAndConstant(result, graph_z.constant); }; - sum = sum.apply(add); + return sum.apply(add); } - return sum; } /* ************************************************************************ */ +// TODO(dellaert): At the time I though "Sum" was a good name, but coming back +// to it after a while I think "SumFrontals" is better.it's a terrible name. +// Also, the implementation is inconsistent. I think we should just have a +// virtual method in HybridFactor that adds to the "Sum" object, like +// addGaussian. Finally, we need to document why deferredFactors need to be +// added last, which I would undo if possible. +// Implementation-wise, it's probably more efficient to first collect the +// discrete keys, and then loop over all assignments to populate a vector. GaussianMixtureFactor::Sum HybridGaussianFactorGraph::SumFrontals() const { // sum out frontals, this is the factor on the separator gttic(sum); @@ -89,8 +96,8 @@ GaussianMixtureFactor::Sum HybridGaussianFactorGraph::SumFrontals() const { std::vector deferredFactors; for (auto &f : factors_) { + // TODO(dellaert): just use a virtual method defined in HybridFactor. if (f->isHybrid()) { - // TODO(dellaert): just use a virtual method defined in HybridFactor. if (auto gm = boost::dynamic_pointer_cast(f)) { sum = gm->add(sum); } diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index db06876428..ed3ada2ff8 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -18,10 +18,10 @@ #pragma once +#include #include #include #include -#include #include #include #include @@ -122,9 +122,9 @@ class GTSAM_EXPORT HybridGaussianFactorGraph /// @name Adding factors. /// @{ - using Base::reserve; using Base::add; using Base::push_back; + using Base::reserve; /// Add a Jacobian factor to the factor graph. void add(JacobianFactor&& factor); @@ -236,8 +236,15 @@ class GTSAM_EXPORT HybridGaussianFactorGraph */ const Ordering getHybridOrdering() const; - /// Compute a DecisionTree with the marginal for - /// each discrete assignment. + /** + * @brief Create a decision tree of factor graphs out of this hybrid factor + * graph. + * + * For example, if there are two mixture factors, one with a discrete key A + * and one with a discrete key B, then the decision tree will have two levels, + * one for A and one for B. The leaves of the tree will be the Gaussian + * factors that have only continuous keys. + */ GaussianMixtureFactor::Sum SumFrontals() const; /// @} From 039c9b91c94ac58efcf6bee6d62d4826535afa2e Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 1 Jan 2023 11:23:01 -0500 Subject: [PATCH 21/77] Test SumFrontals --- gtsam/hybrid/tests/TinyHybridExample.h | 6 ++-- .../tests/testHybridGaussianFactorGraph.cpp | 32 +++++++++++++------ 2 files changed, 26 insertions(+), 12 deletions(-) diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index 8fba61dda5..1663708f2c 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -28,6 +28,9 @@ using symbol_shorthand::M; using symbol_shorthand::X; using symbol_shorthand::Z; +// Create mode key: 0 is low-noise, 1 is high-noise. +const DiscreteKey mode{M(0), 2}; + /** * Create a tiny two variable hybrid model which represents * the generative probability P(z, x, n) = P(z | x, n)P(x)P(n). @@ -36,9 +39,6 @@ static HybridBayesNet createHybridBayesNet(int num_measurements = 1) { // Create hybrid Bayes net. HybridBayesNet bayesNet; - // Create mode key: 0 is low-noise, 1 is high-noise. - const DiscreteKey mode{M(0), 2}; - // Create Gaussian mixture Z(0) = X(0) + noise for each measurement. for (int i = 0; i < num_measurements; i++) { const auto conditional0 = boost::make_shared( diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index de180690aa..60a88d7f17 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -615,18 +615,32 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { } /* ****************************************************************************/ -// Test creation of a tiny hybrid Bayes net. -TEST(HybridBayesNet, Tiny) { +// SumFrontals just assembles Gaussian factor graphs for each assignment. +TEST(HybridGaussianFactorGraph, SumFrontals) { auto fg = tiny::createHybridGaussianFactorGraph(); EXPECT_LONGS_EQUAL(3, fg.size()); -} -/* ****************************************************************************/ -// // Test summing frontals -// TEST(HybridGaussianFactorGraph, SumFrontals) { -// HybridGaussianFactorGraph fg; -// fg. -// } + auto sum = fg.SumFrontals(); + + // Get mixture factor: + auto mixture = boost::dynamic_pointer_cast(fg.at(0)); + using GF = GaussianFactor::shared_ptr; + + // Get prior factor: + const GF prior = + boost::dynamic_pointer_cast(fg.at(1))->inner(); + + // Create DiscreteValues for both 0 and 1: + DiscreteValues d0{{M(0), 0}}, d1{{M(0), 1}}; + + // Expected decision tree with two factor graphs: + // f(x0;mode=0)P(x0) and f(x0;mode=1)P(x0) + GaussianMixture::Sum expected{ + M(0), GaussianFactorGraph(std::vector{mixture->factor(d0), prior}), + GaussianFactorGraph(std::vector{mixture->factor(d1), prior})}; + + EXPECT(assert_equal(expected(d0), sum(d0))); +} /* ************************************************************************* */ int main() { From 526da2c8927933d62fc297f7b4793e57b673c561 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 1 Jan 2023 11:43:09 -0500 Subject: [PATCH 22/77] Add Testable to GraphAndConstant --- gtsam/hybrid/GaussianMixtureFactor.h | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 7e9edbe86f..ad8e19f02a 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -83,6 +83,19 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { bool operator==(const GraphAndConstant &other) const { return graph == other.graph && constant == other.constant; } + + // Implement GTSAM-style print: + void print(const std::string &s = "Graph: ", + const KeyFormatter &formatter = DefaultKeyFormatter) const { + graph.print(s, formatter); + std::cout << "Constant: " << constant << std::endl; + } + + // Implement GTSAM-style equals: + bool equals(const GraphAndConstant &other, double tol = 1e-9) const { + return graph.equals(other.graph, tol) && + fabs(constant - other.constant) < tol; + } }; using Sum = DecisionTree; @@ -200,4 +213,8 @@ template <> struct traits : public Testable { }; +template <> +struct traits + : public Testable {}; + } // namespace gtsam From b09495376bf1dae0786955050568635c330ee114 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 1 Jan 2023 11:43:38 -0500 Subject: [PATCH 23/77] Fix compile issues after rebase --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 5 +++-- gtsam/hybrid/tests/testHybridEstimation.cpp | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index b5344a9885..caef850684 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -59,8 +59,9 @@ namespace gtsam { template class EliminateableFactorGraph; /* ************************************************************************ */ -static GaussianMixtureFactor::Sum &addGaussian( - GaussianMixtureFactor::Sum &sum, const GaussianFactor::shared_ptr &factor) { +static GaussianMixtureFactor::Sum addGaussian( + const GaussianMixtureFactor::Sum &sum, + const GaussianFactor::shared_ptr &factor) { // If the decision tree is not initialized, then initialize it. if (sum.empty()) { GaussianFactorGraph result; diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index e29e07afa7..d457d039f2 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -500,7 +500,7 @@ TEST(HybridEstimation, CorrectnessViaSampling) { const HybridValues sample = bn->sample(&rng); double expected_ratio = compute_ratio(sample); // regression - // EXPECT_DOUBLES_EQUAL(1.9477340410546764, ratio, 1e-9); + EXPECT_DOUBLES_EQUAL(1.9477340410546764, expected_ratio, 1e-9); // 3. Do sampling constexpr int num_samples = 10; From 4cb03b303b8df9b4ca38d1791c32bce9ae700347 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 1 Jan 2023 11:43:52 -0500 Subject: [PATCH 24/77] Fix SumFrontals test --- gtsam/hybrid/tests/TinyHybridExample.h | 8 ++++---- gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp | 12 ++++++++---- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index 1663708f2c..a33a45179a 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -35,7 +35,7 @@ const DiscreteKey mode{M(0), 2}; * Create a tiny two variable hybrid model which represents * the generative probability P(z, x, n) = P(z | x, n)P(x)P(n). */ -static HybridBayesNet createHybridBayesNet(int num_measurements = 1) { +HybridBayesNet createHybridBayesNet(int num_measurements = 1) { // Create hybrid Bayes net. HybridBayesNet bayesNet; @@ -60,8 +60,8 @@ static HybridBayesNet createHybridBayesNet(int num_measurements = 1) { return bayesNet; } -static HybridGaussianFactorGraph convertBayesNet(const HybridBayesNet& bayesNet, - const HybridValues& sample) { +HybridGaussianFactorGraph convertBayesNet(const HybridBayesNet& bayesNet, + const HybridValues& sample) { HybridGaussianFactorGraph fg; int num_measurements = bayesNet.size() - 2; for (int i = 0; i < num_measurements; i++) { @@ -74,7 +74,7 @@ static HybridGaussianFactorGraph convertBayesNet(const HybridBayesNet& bayesNet, return fg; } -static HybridGaussianFactorGraph createHybridGaussianFactorGraph( +HybridGaussianFactorGraph createHybridGaussianFactorGraph( int num_measurements = 1) { auto bayesNet = createHybridBayesNet(num_measurements); auto sample = bayesNet.sample(); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 60a88d7f17..2de28b008c 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -636,10 +636,14 @@ TEST(HybridGaussianFactorGraph, SumFrontals) { // Expected decision tree with two factor graphs: // f(x0;mode=0)P(x0) and f(x0;mode=1)P(x0) GaussianMixture::Sum expected{ - M(0), GaussianFactorGraph(std::vector{mixture->factor(d0), prior}), - GaussianFactorGraph(std::vector{mixture->factor(d1), prior})}; - - EXPECT(assert_equal(expected(d0), sum(d0))); + M(0), + {GaussianFactorGraph(std::vector{mixture->factor(d0), prior}), + -0.225791 /* regression */}, + {GaussianFactorGraph(std::vector{mixture->factor(d1), prior}), + -2.01755 /* regression */}}; + + EXPECT(assert_equal(expected(d0), sum(d0), 1e-5)); + EXPECT(assert_equal(expected(d1), sum(d1), 1e-5)); } /* ************************************************************************* */ From 7ab4c3e3fbfd20cec850ba5ec972e90f58f8bcc3 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 1 Jan 2023 11:47:24 -0500 Subject: [PATCH 25/77] Change to real test --- gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 2de28b008c..a104dac4bd 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -638,9 +638,9 @@ TEST(HybridGaussianFactorGraph, SumFrontals) { GaussianMixture::Sum expected{ M(0), {GaussianFactorGraph(std::vector{mixture->factor(d0), prior}), - -0.225791 /* regression */}, + mixture->constant(d0)}, {GaussianFactorGraph(std::vector{mixture->factor(d1), prior}), - -2.01755 /* regression */}}; + mixture->constant(d1)}}; EXPECT(assert_equal(expected(d0), sum(d0), 1e-5)); EXPECT(assert_equal(expected(d1), sum(d1), 1e-5)); From 64831300a551902bd52694345a8e29c48cf694fb Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 1 Jan 2023 11:48:08 -0500 Subject: [PATCH 26/77] Print estimated marginals and ratios! --- python/gtsam/tests/test_HybridFactorGraph.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 700137d216..ca051978be 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -205,9 +205,9 @@ def test_ratio(self): # Estimate marginals using importance sampling. marginals = self.estimate_marginals(bayesNet, sample) - # print(f"True mode: {sample.atDiscrete(M(0))}") - # print(f"P(mode=0; z0, z1) = {marginals[0]}") - # print(f"P(mode=1; z0, z1) = {marginals[1]}") + print(f"True mode: {sample.atDiscrete(M(0))}") + print(f"P(mode=0; z0, z1) = {marginals[0]}") + print(f"P(mode=1; z0, z1) = {marginals[1]}") # Check marginals based on sampled mode. if sample.atDiscrete(M(0)) == 0: @@ -251,8 +251,8 @@ def test_ratio(self): other.insert(measurements) ratio = self.calculate_ratio(posterior, fg, other) print(f"Ratio: {ratio}\n") - # if (ratio > 0): - # self.assertAlmostEqual(ratio, expected_ratio) + if (ratio > 0): + self.assertAlmostEqual(ratio, expected_ratio) if __name__ == "__main__": unittest.main() From dbd9fafb762cc0ee540ba22e6bf137b4c61d97a8 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 1 Jan 2023 16:36:46 -0500 Subject: [PATCH 27/77] Fix quality testing --- gtsam/hybrid/GaussianMixture.cpp | 14 +++++++++++++- gtsam/hybrid/HybridBayesNet.cpp | 11 +++++++++++ gtsam/hybrid/HybridBayesNet.h | 14 +++++--------- gtsam/hybrid/HybridConditional.cpp | 15 ++++++++++++++- 4 files changed, 43 insertions(+), 11 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 329044aca8..8b8c623992 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -103,7 +103,19 @@ GaussianConditional::shared_ptr GaussianMixture::operator()( /* *******************************************************************************/ bool GaussianMixture::equals(const HybridFactor &lf, double tol) const { const This *e = dynamic_cast(&lf); - return e != nullptr && BaseFactor::equals(*e, tol); + if (e == nullptr) return false; + + // This will return false if either conditionals_ is empty or e->conditionals_ + // is empty, but not if both are empty or both are not empty: + if (conditionals_.empty() ^ e->conditionals_.empty()) return false; +std::cout << "checking" << std::endl; + // Check the base and the factors: + return BaseFactor::equals(*e, tol) && + conditionals_.equals(e->conditionals_, + [tol](const GaussianConditional::shared_ptr &f1, + const GaussianConditional::shared_ptr &f2) { + return f1->equals(*(f2), tol); + }); } /* *******************************************************************************/ diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index e471cb02f0..cd6f181abb 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -26,6 +26,17 @@ static std::mt19937_64 kRandomNumberGenerator(42); namespace gtsam { +/* ************************************************************************* */ +void HybridBayesNet::print(const std::string &s, + const KeyFormatter &formatter) const { + Base::print(s, formatter); +} + +/* ************************************************************************* */ +bool HybridBayesNet::equals(const This &bn, double tol) const { + return Base::equals(bn, tol); +} + /* ************************************************************************* */ DecisionTreeFactor::shared_ptr HybridBayesNet::discreteConditionals() const { AlgebraicDecisionTree decisionTree; diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 0d2c337b7f..dcdf3a8e5c 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -50,18 +50,14 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { /// @name Testable /// @{ - /** Check equality */ - bool equals(const This &bn, double tol = 1e-9) const { - return Base::equals(bn, tol); - } - - /// print graph + /// GTSAM-style printing void print( const std::string &s = "", - const KeyFormatter &formatter = DefaultKeyFormatter) const override { - Base::print(s, formatter); - } + const KeyFormatter &formatter = DefaultKeyFormatter) const override; + /// GTSAM-style equals + bool equals(const This& fg, double tol = 1e-9) const; + /// @} /// @name Standard Interface /// @{ diff --git a/gtsam/hybrid/HybridConditional.cpp b/gtsam/hybrid/HybridConditional.cpp index 8e071532dd..85112d922e 100644 --- a/gtsam/hybrid/HybridConditional.cpp +++ b/gtsam/hybrid/HybridConditional.cpp @@ -102,7 +102,20 @@ void HybridConditional::print(const std::string &s, /* ************************************************************************ */ bool HybridConditional::equals(const HybridFactor &other, double tol) const { const This *e = dynamic_cast(&other); - return e != nullptr && BaseFactor::equals(*e, tol); + if (e == nullptr) return false; + if (auto gm = asMixture()) { + auto other = e->asMixture(); + return other != nullptr && gm->equals(*other, tol); + } + if (auto gm = asGaussian()) { + auto other = e->asGaussian(); + return other != nullptr && gm->equals(*other, tol); + } + if (auto gm = asDiscrete()) { + auto other = e->asDiscrete(); + return other != nullptr && gm->equals(*other, tol); + } + return inner_->equals(*(e->inner_), tol); } } // namespace gtsam From 3d821ec22b31a64b5ad6cf004ca0691e0a855af2 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 1 Jan 2023 16:36:57 -0500 Subject: [PATCH 28/77] Now test elimination in c++ --- gtsam/hybrid/tests/TinyHybridExample.h | 23 ++++++++---- .../tests/testHybridGaussianFactorGraph.cpp | 35 ++++++++++++++++--- 2 files changed, 47 insertions(+), 11 deletions(-) diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index a33a45179a..899a353b69 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -39,7 +39,7 @@ HybridBayesNet createHybridBayesNet(int num_measurements = 1) { // Create hybrid Bayes net. HybridBayesNet bayesNet; - // Create Gaussian mixture Z(0) = X(0) + noise for each measurement. + // Create Gaussian mixture z_i = x0 + noise for each measurement. for (int i = 0; i < num_measurements; i++) { const auto conditional0 = boost::make_shared( GaussianConditional::FromMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 0.5)); @@ -51,7 +51,7 @@ HybridBayesNet createHybridBayesNet(int num_measurements = 1) { // Create prior on X(0). const auto prior_on_x0 = - GaussianConditional::FromMeanAndStddev(X(0), Vector1(5.0), 5.0); + GaussianConditional::FromMeanAndStddev(X(0), Vector1(5.0), 0.5); bayesNet.emplaceGaussian(prior_on_x0); // copy :-( // Add prior on mode. @@ -61,12 +61,12 @@ HybridBayesNet createHybridBayesNet(int num_measurements = 1) { } HybridGaussianFactorGraph convertBayesNet(const HybridBayesNet& bayesNet, - const HybridValues& sample) { + const HybridValues& values) { HybridGaussianFactorGraph fg; int num_measurements = bayesNet.size() - 2; for (int i = 0; i < num_measurements; i++) { auto conditional = bayesNet.atMixture(i); - auto factor = conditional->likelihood(sample.continuousSubset({Z(i)})); + auto factor = conditional->likelihood(values.continuousSubset({Z(i)})); fg.push_back(factor); } fg.push_back(bayesNet.atGaussian(num_measurements)); @@ -75,10 +75,19 @@ HybridGaussianFactorGraph convertBayesNet(const HybridBayesNet& bayesNet, } HybridGaussianFactorGraph createHybridGaussianFactorGraph( - int num_measurements = 1) { + int num_measurements = 1, bool deterministic = false) { auto bayesNet = createHybridBayesNet(num_measurements); - auto sample = bayesNet.sample(); - return convertBayesNet(bayesNet, sample); + if (deterministic) { + // Create a deterministic set of measurements: + HybridValues values{{}, {{M(0), 0}}}; + for (int i = 0; i < num_measurements; i++) { + values.insert(Z(i), Vector1(4.0 + 1.0 * i)); + } + return convertBayesNet(bayesNet, values); + } else { + // Create a random set of measurements: + return convertBayesNet(bayesNet, bayesNet.sample()); + } } } // namespace tiny diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index a104dac4bd..8f50895fe4 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -617,7 +617,10 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { /* ****************************************************************************/ // SumFrontals just assembles Gaussian factor graphs for each assignment. TEST(HybridGaussianFactorGraph, SumFrontals) { - auto fg = tiny::createHybridGaussianFactorGraph(); + const int num_measurements = 1; + const bool deterministic = true; + auto fg = + tiny::createHybridGaussianFactorGraph(num_measurements, deterministic); EXPECT_LONGS_EQUAL(3, fg.size()); auto sum = fg.SumFrontals(); @@ -635,15 +638,39 @@ TEST(HybridGaussianFactorGraph, SumFrontals) { // Expected decision tree with two factor graphs: // f(x0;mode=0)P(x0) and f(x0;mode=1)P(x0) - GaussianMixture::Sum expected{ + GaussianMixture::Sum expectedSum{ M(0), {GaussianFactorGraph(std::vector{mixture->factor(d0), prior}), mixture->constant(d0)}, {GaussianFactorGraph(std::vector{mixture->factor(d1), prior}), mixture->constant(d1)}}; - EXPECT(assert_equal(expected(d0), sum(d0), 1e-5)); - EXPECT(assert_equal(expected(d1), sum(d1), 1e-5)); + EXPECT(assert_equal(expectedSum(d0), sum(d0), 1e-5)); + EXPECT(assert_equal(expectedSum(d1), sum(d1), 1e-5)); + + // Create expected Bayes Net: + HybridBayesNet bayesNet; + + // Create Gaussian mixture on X(0). + using tiny::mode; + const auto conditional0 = boost::make_shared( + X(0), Vector1(12.7279), + I_1x1 * 2.82843); // regression, but mean checked to be 4.5 + const auto conditional1 = boost::make_shared( + X(0), Vector1(10.0831), + I_1x1 * 2.02759); // regression, but mean 4.97297is close to prior. + GaussianMixture gm({X(0)}, {}, {mode}, {conditional0, conditional1}); + bayesNet.emplaceMixture(gm); // copy :-( + + // Add prior on mode. + bayesNet.emplaceDiscrete(mode, "4/6"); + + // Test elimination + Ordering ordering; + ordering.push_back(X(0)); + ordering.push_back(M(0)); + const auto posterior = fg.eliminateSequential(ordering); + EXPECT(assert_equal(bayesNet, *posterior, 1e-4)); } /* ************************************************************************* */ From 0095f73130a4094984de7be88c3c66eed03bef30 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 1 Jan 2023 17:02:23 -0500 Subject: [PATCH 29/77] attempt to fix elimination --- gtsam/hybrid/GaussianMixtureFactor.h | 1 + gtsam/hybrid/HybridGaussianFactorGraph.cpp | 63 ++++++++-------------- 2 files changed, 24 insertions(+), 40 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index ad8e19f02a..69149ac053 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -62,6 +62,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { // Note: constant is log of normalization constant for probabilities. // Errors is the negative log-likelihood, // hence we subtract the constant here. + if (!factor) return 0.0; // If nullptr, return 0.0 error return factor->error(values) - constant; } diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index caef850684..0ffc22ba12 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -199,14 +199,14 @@ hybridElimination(const HybridGaussianFactorGraph &factors, DiscreteKeys discreteSeparator(discreteSeparatorSet.begin(), discreteSeparatorSet.end()); - // Collect all the frontal factors to create Gaussian factor graphs - // indexed on the discrete keys. + // Collect all the factors to create a set of Gaussian factor graphs in a + // decision tree indexed by all discrete keys involved. GaussianMixtureFactor::Sum sum = factors.SumFrontals(); - // If a tree leaf contains nullptr, - // convert that leaf to an empty GaussianFactorGraph. - // Needed since the DecisionTree will otherwise create - // a GFG with a single (null) factor. + // If a tree leaf contains nullptr, convert that leaf to an empty + // GaussianFactorGraph. Needed since the DecisionTree will otherwise create a + // GFG with a single (null) factor. + // TODO(dellaert): can SumFrontals not guarantee this? auto emptyGaussian = [](const GaussianMixtureFactor::GraphAndConstant &graph_z) { bool hasNull = std::any_of( @@ -222,7 +222,6 @@ hybridElimination(const HybridGaussianFactorGraph &factors, using EliminationPair = std::pair, GaussianMixtureFactor::FactorAndConstant>; - KeyVector keysOfEliminated; // Not the ordering KeyVector keysOfSeparator; // This is the elimination method on the leaf nodes @@ -236,24 +235,21 @@ hybridElimination(const HybridGaussianFactorGraph &factors, gttic_(hybrid_eliminate); #endif - std::pair, - boost::shared_ptr> - conditional_factor = - EliminatePreferCholesky(graph_z.graph, frontalKeys); + boost::shared_ptr conditional; + boost::shared_ptr newFactor; + boost::tie(conditional, newFactor) = + EliminatePreferCholesky(graph_z.graph, frontalKeys); - // Initialize the keysOfEliminated to be the keys of the - // eliminated GaussianConditional - keysOfEliminated = conditional_factor.first->keys(); - keysOfSeparator = conditional_factor.second->keys(); + // TODO(dellaert): always the same, and we already computed this in caller? + keysOfSeparator = newFactor->keys(); #ifdef HYBRID_TIMING gttoc_(hybrid_eliminate); #endif - GaussianConditional::shared_ptr conditional = conditional_factor.first; // Get the log of the log normalization constant inverse. double logZ = -conditional->logNormalizationConstant() + graph_z.constant; - return {conditional, {conditional_factor.second, logZ}}; + return {conditional, {newFactor, logZ}}; }; // Perform elimination! @@ -266,44 +262,31 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // Separate out decision tree into conditionals and remaining factors. auto pair = unzip(eliminationResults); - const auto &separatorFactors = pair.second; // Create the GaussianMixture from the conditionals auto conditional = boost::make_shared( frontalKeys, keysOfSeparator, discreteSeparator, pair.first); - // If there are no more continuous parents, then we should create here a - // DiscreteFactor, with the error for each discrete choice. + // If there are no more continuous parents, then we should create a + // DiscreteFactor here, with the error for each discrete choice. + const auto &separatorFactors = pair.second; if (keysOfSeparator.empty()) { auto factorProb = [&](const GaussianMixtureFactor::FactorAndConstant &factor_z) { - GaussianFactor::shared_ptr factor = factor_z.factor; - if (!factor) { - return 0.0; // If nullptr, return 0.0 probability - } else { - // This is the probability q(μ) at the MLE point. - double error = factor_z.error(VectorValues()); - return std::exp(-error); - } + // This is the probability q(μ) at the MLE point. + return factor_z.error(VectorValues()); }; - DecisionTree fdt(separatorFactors, factorProb); - // Normalize the values of decision tree to be valid probabilities - double sum = 0.0; - auto visitor = [&](double y) { sum += y; }; - fdt.visit(visitor); - // fdt = DecisionTree(fdt, - // [sum](const double &x) { return x / sum; - // }); - - auto discreteFactor = + const DecisionTree fdt(separatorFactors, factorProb); + const auto discreteFactor = boost::make_shared(discreteSeparator, fdt); return {boost::make_shared(conditional), boost::make_shared(discreteFactor)}; - } else { // Create a resulting GaussianMixtureFactor on the separator. - auto factor = boost::make_shared( + // Keys can be computed from the factors, so we should not need to pass them + // in. + const auto factor = boost::make_shared( KeyVector(continuousSeparator.begin(), continuousSeparator.end()), discreteSeparator, separatorFactors); return {boost::make_shared(conditional), factor}; From 665cb29b3f1a9095e42fd37b18d507a9e4622ad6 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 1 Jan 2023 17:44:39 -0500 Subject: [PATCH 30/77] Make testcase exactly 5.0 mean --- gtsam/hybrid/tests/TinyHybridExample.h | 2 +- gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp | 9 ++++----- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index 899a353b69..bdd638be7d 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -81,7 +81,7 @@ HybridGaussianFactorGraph createHybridGaussianFactorGraph( // Create a deterministic set of measurements: HybridValues values{{}, {{M(0), 0}}}; for (int i = 0; i < num_measurements; i++) { - values.insert(Z(i), Vector1(4.0 + 1.0 * i)); + values.insert(Z(i), Vector1(5.0 + 1.0 * i)); } return convertBayesNet(bayesNet, values); } else { diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 8f50895fe4..4ac3ac4c4e 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -653,12 +653,11 @@ TEST(HybridGaussianFactorGraph, SumFrontals) { // Create Gaussian mixture on X(0). using tiny::mode; + // regression, but mean checked to be 5.0 in both cases: const auto conditional0 = boost::make_shared( - X(0), Vector1(12.7279), - I_1x1 * 2.82843); // regression, but mean checked to be 4.5 - const auto conditional1 = boost::make_shared( - X(0), Vector1(10.0831), - I_1x1 * 2.02759); // regression, but mean 4.97297is close to prior. + X(0), Vector1(14.1421), I_1x1 * 2.82843), + conditional1 = boost::make_shared( + X(0), Vector1(10.1379), I_1x1 * 2.02759); GaussianMixture gm({X(0)}, {}, {mode}, {conditional0, conditional1}); bayesNet.emplaceMixture(gm); // copy :-( From 2c7b3a2af093aee7ad3402fba65f12164ec92753 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 1 Jan 2023 17:44:53 -0500 Subject: [PATCH 31/77] Refactoring in elimination --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 73 +++++++++++----------- 1 file changed, 36 insertions(+), 37 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 0ffc22ba12..adba9e88bc 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -188,11 +188,28 @@ discreteElimination(const HybridGaussianFactorGraph &factors, boost::make_shared(result.second)}; } +/* ************************************************************************ */ +// If any GaussianFactorGraph in the decision tree contains a nullptr, convert +// that leaf to an empty GaussianFactorGraph. Needed since the DecisionTree will +// otherwise create a GFG with a single (null) factor. +GaussianMixtureFactor::Sum removeEmpty(const GaussianMixtureFactor::Sum &sum) { + auto emptyGaussian = [](const GaussianMixtureFactor::GraphAndConstant + &graph_z) { + bool hasNull = + std::any_of(graph_z.graph.begin(), graph_z.graph.end(), + [](const GaussianFactor::shared_ptr &ptr) { return !ptr; }); + return hasNull + ? GaussianMixtureFactor::GraphAndConstant{GaussianFactorGraph(), + 0.0} + : graph_z; + }; + return GaussianMixtureFactor::Sum(sum, emptyGaussian); +} /* ************************************************************************ */ static std::pair hybridElimination(const HybridGaussianFactorGraph &factors, const Ordering &frontalKeys, - const KeySet &continuousSeparator, + const KeyVector &continuousSeparator, const std::set &discreteSeparatorSet) { // NOTE: since we use the special JunctionTree, // only possibility is continuous conditioned on discrete. @@ -203,27 +220,12 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // decision tree indexed by all discrete keys involved. GaussianMixtureFactor::Sum sum = factors.SumFrontals(); - // If a tree leaf contains nullptr, convert that leaf to an empty - // GaussianFactorGraph. Needed since the DecisionTree will otherwise create a - // GFG with a single (null) factor. - // TODO(dellaert): can SumFrontals not guarantee this? - auto emptyGaussian = - [](const GaussianMixtureFactor::GraphAndConstant &graph_z) { - bool hasNull = std::any_of( - graph_z.graph.begin(), graph_z.graph.end(), - [](const GaussianFactor::shared_ptr &ptr) { return !ptr; }); - - return hasNull ? GaussianMixtureFactor::GraphAndConstant( - GaussianFactorGraph(), 0.0) - : graph_z; - }; - sum = GaussianMixtureFactor::Sum(sum, emptyGaussian); + // TODO(dellaert): does SumFrontals not guarantee we do not need this? + sum = removeEmpty(sum); using EliminationPair = std::pair, GaussianMixtureFactor::FactorAndConstant>; - KeyVector keysOfSeparator; - // This is the elimination method on the leaf nodes auto eliminate = [&](const GaussianMixtureFactor::GraphAndConstant &graph_z) -> EliminationPair { @@ -240,15 +242,12 @@ hybridElimination(const HybridGaussianFactorGraph &factors, boost::tie(conditional, newFactor) = EliminatePreferCholesky(graph_z.graph, frontalKeys); - // TODO(dellaert): always the same, and we already computed this in caller? - keysOfSeparator = newFactor->keys(); - #ifdef HYBRID_TIMING gttoc_(hybrid_eliminate); #endif // Get the log of the log normalization constant inverse. - double logZ = -conditional->logNormalizationConstant() + graph_z.constant; + double logZ = graph_z.constant - conditional->logNormalizationConstant(); return {conditional, {newFactor, logZ}}; }; @@ -261,35 +260,35 @@ hybridElimination(const HybridGaussianFactorGraph &factors, #endif // Separate out decision tree into conditionals and remaining factors. - auto pair = unzip(eliminationResults); + GaussianMixture::Conditionals conditionals; + GaussianMixtureFactor::Factors newFactors; + std::tie(conditionals, newFactors) = unzip(eliminationResults); // Create the GaussianMixture from the conditionals - auto conditional = boost::make_shared( - frontalKeys, keysOfSeparator, discreteSeparator, pair.first); + auto gaussianMixture = boost::make_shared( + frontalKeys, continuousSeparator, discreteSeparator, conditionals); // If there are no more continuous parents, then we should create a // DiscreteFactor here, with the error for each discrete choice. - const auto &separatorFactors = pair.second; - if (keysOfSeparator.empty()) { + if (continuousSeparator.empty()) { auto factorProb = [&](const GaussianMixtureFactor::FactorAndConstant &factor_z) { // This is the probability q(μ) at the MLE point. return factor_z.error(VectorValues()); }; - const DecisionTree fdt(separatorFactors, factorProb); + const DecisionTree fdt(newFactors, factorProb); const auto discreteFactor = boost::make_shared(discreteSeparator, fdt); - return {boost::make_shared(conditional), + return {boost::make_shared(gaussianMixture), boost::make_shared(discreteFactor)}; } else { // Create a resulting GaussianMixtureFactor on the separator. - // Keys can be computed from the factors, so we should not need to pass them - // in. - const auto factor = boost::make_shared( - KeyVector(continuousSeparator.begin(), continuousSeparator.end()), - discreteSeparator, separatorFactors); - return {boost::make_shared(conditional), factor}; + // TODO(dellaert): Keys can be computed from the factors, so we should not + // need to pass them in. + return {boost::make_shared(gaussianMixture), + boost::make_shared( + continuousSeparator, discreteSeparator, newFactors)}; } } @@ -389,12 +388,12 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, // Fill in discrete discrete separator keys and continuous separator keys. std::set discreteSeparatorSet; - KeySet continuousSeparator; + KeyVector continuousSeparator; for (auto &k : separatorKeys) { if (mapFromKeyToDiscreteKey.find(k) != mapFromKeyToDiscreteKey.end()) { discreteSeparatorSet.insert(mapFromKeyToDiscreteKey.at(k)); } else { - continuousSeparator.insert(k); + continuousSeparator.push_back(k); } } From 4d313fae773e63c5a6d7a1aed81696058740d453 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 1 Jan 2023 18:01:48 -0500 Subject: [PATCH 32/77] Comment on constant --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index adba9e88bc..a5d27c188f 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -274,7 +274,11 @@ hybridElimination(const HybridGaussianFactorGraph &factors, auto factorProb = [&](const GaussianMixtureFactor::FactorAndConstant &factor_z) { // This is the probability q(μ) at the MLE point. - return factor_z.error(VectorValues()); + // return exp(-factor_z.error(VectorValues())); + // TODO(dellaert): this is not correct, since VectorValues() is not + // the MLE point. But it does not matter, as at the MLE point the + // error will be zero, hence: + return exp(-factor_z.constant); }; const DecisionTree fdt(newFactors, factorProb); const auto discreteFactor = From 064f17b3699c151a7c6d51839c70bab02a161549 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 2 Jan 2023 01:05:52 -0500 Subject: [PATCH 33/77] Added two-measurement example --- .../tests/testHybridGaussianFactorGraph.cpp | 43 ++++++++++++++++++- 1 file changed, 42 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 4ac3ac4c4e..b1271811f8 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -615,7 +615,7 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { } /* ****************************************************************************/ -// SumFrontals just assembles Gaussian factor graphs for each assignment. +// Check that SumFrontals assembles Gaussian factor graphs for each assignment. TEST(HybridGaussianFactorGraph, SumFrontals) { const int num_measurements = 1; const bool deterministic = true; @@ -647,6 +647,15 @@ TEST(HybridGaussianFactorGraph, SumFrontals) { EXPECT(assert_equal(expectedSum(d0), sum(d0), 1e-5)); EXPECT(assert_equal(expectedSum(d1), sum(d1), 1e-5)); +} + +/* ****************************************************************************/ +// Check that eliminating tiny net with 1 measurement yields correct result. +TEST(HybridGaussianFactorGraph, EliminateTiny1) { + const int num_measurements = 1; + const bool deterministic = true; + auto fg = + tiny::createHybridGaussianFactorGraph(num_measurements, deterministic); // Create expected Bayes Net: HybridBayesNet bayesNet; @@ -672,6 +681,38 @@ TEST(HybridGaussianFactorGraph, SumFrontals) { EXPECT(assert_equal(bayesNet, *posterior, 1e-4)); } +/* ****************************************************************************/ +// Check that eliminating tiny net with 2 measurements yields correct result. +TEST(HybridGaussianFactorGraph, EliminateTiny2) { + const int num_measurements = 2; + const bool deterministic = true; + auto fg = + tiny::createHybridGaussianFactorGraph(num_measurements, deterministic); + + // Create expected Bayes Net: + HybridBayesNet bayesNet; + + // Create Gaussian mixture on X(0). + using tiny::mode; + // regression, but mean checked to be > 5.0 in both cases: + const auto conditional0 = boost::make_shared( + X(0), Vector1(18.4752), I_1x1 * 3.4641), + conditional1 = boost::make_shared( + X(0), Vector1(10.3281), I_1x1 * 2.0548); + GaussianMixture gm({X(0)}, {}, {mode}, {conditional0, conditional1}); + bayesNet.emplaceMixture(gm); // copy :-( + + // Add prior on mode. + bayesNet.emplaceDiscrete(mode, "4/6"); + + // Test elimination + Ordering ordering; + ordering.push_back(X(0)); + ordering.push_back(M(0)); + const auto posterior = fg.eliminateSequential(ordering); + EXPECT(assert_equal(bayesNet, *posterior, 1e-4)); +} + /* ************************************************************************* */ int main() { TestResult tr; From 8f5689b7624ed5ae090439de7abf29fce4cf5b89 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 2 Jan 2023 09:44:03 -0500 Subject: [PATCH 34/77] minor readjustment --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 10e12bf77f..442ffb4287 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -238,13 +238,14 @@ hybridElimination(const HybridGaussianFactorGraph &factors, keysOfEliminated = conditional_factor.first->keys(); keysOfSeparator = conditional_factor.second->keys(); + GaussianConditional::shared_ptr conditional = conditional_factor.first; + // Get the log of the log normalization constant inverse. + double logZ = -conditional->logNormalizationConstant() + graph_z.constant; + #ifdef HYBRID_TIMING gttoc_(hybrid_eliminate); #endif - GaussianConditional::shared_ptr conditional = conditional_factor.first; - // Get the log of the log normalization constant inverse. - double logZ = -conditional->logNormalizationConstant() + graph_z.constant; return {conditional, {conditional_factor.second, logZ}}; }; From cae98e1d3e276722e29d5047360adfd3985371c5 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 2 Jan 2023 09:44:43 -0500 Subject: [PATCH 35/77] rename eliminate to eliminateFunc --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 442ffb4287..133011b6ad 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -218,7 +218,8 @@ hybridElimination(const HybridGaussianFactorGraph &factors, KeyVector keysOfSeparator; // This is the elimination method on the leaf nodes - auto eliminate = [&](const GaussianMixtureFactor::GraphAndConstant &graph_z) + auto eliminateFunc = + [&](const GaussianMixtureFactor::GraphAndConstant &graph_z) -> EliminationPair { if (graph_z.graph.empty()) { return {nullptr, {nullptr, 0.0}}; @@ -250,7 +251,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, }; // Perform elimination! - DecisionTree eliminationResults(sum, eliminate); + DecisionTree eliminationResults(sum, eliminateFunc); #ifdef HYBRID_TIMING tictoc_print_(); From 312ba5fd523b3c45f9b720c4b39acfe540b7bb4c Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 2 Jan 2023 09:45:04 -0500 Subject: [PATCH 36/77] Synced two examples --- gtsam/hybrid/tests/TinyHybridExample.h | 2 +- python/gtsam/tests/test_HybridFactorGraph.py | 19 +++++++++++++++++-- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index bdd638be7d..f92b8254b6 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -81,7 +81,7 @@ HybridGaussianFactorGraph createHybridGaussianFactorGraph( // Create a deterministic set of measurements: HybridValues values{{}, {{M(0), 0}}}; for (int i = 0; i < num_measurements; i++) { - values.insert(Z(i), Vector1(5.0 + 1.0 * i)); + values.insert(Z(i), Vector1(5.0 + 0.1 * i)); } return convertBayesNet(bayesNet, values); } else { diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index ca051978be..75ab3e02c5 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -110,7 +110,7 @@ def tiny(num_measurements: int = 1) -> HybridBayesNet: [conditional0, conditional1]) # Create prior on X(0). - prior_on_x0 = GaussianConditional.FromMeanAndStddev(X(0), [5.0], 5.0) + prior_on_x0 = GaussianConditional.FromMeanAndStddev(X(0), [5.0], 0.5) bayesNet.addGaussian(prior_on_x0) # Add prior on mode. @@ -203,8 +203,14 @@ def test_ratio(self): sample: HybridValues = bayesNet.sample() # print(sample) + # Deterministic measurements: + deterministic = gtsam.VectorValues() + deterministic.insert(Z(0), [5.0]) + deterministic.insert(Z(1), [5.1]) + sample.update(deterministic) + # Estimate marginals using importance sampling. - marginals = self.estimate_marginals(bayesNet, sample) + marginals = self.estimate_marginals(bayesNet, sample, N=10000) print(f"True mode: {sample.atDiscrete(M(0))}") print(f"P(mode=0; z0, z1) = {marginals[0]}") print(f"P(mode=1; z0, z1) = {marginals[1]}") @@ -220,11 +226,20 @@ def test_ratio(self): # Create measurements from the sample. measurements = self.measurements(sample, [0, 1]) + print(measurements) # Calculate ratio between Bayes net probability and the factor graph: expected_ratio = self.calculate_ratio(bayesNet, fg, sample) # print(f"expected_ratio: {expected_ratio}\n") + # Print conditional and factor values side by side: + print("\nConditional and factor values:") + for i in range(4): + print(f"Conditional {i}:") + print(bayesNet.at(i).error(sample)) + print(f"Factor {i}:") + print(fg.at(i).error(sample)) + # Check with a number of other samples. for i in range(10): other = bayesNet.sample() From 7c270618ddcd6ec18735ee044afa743a7e4037ce Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 2 Jan 2023 09:45:14 -0500 Subject: [PATCH 37/77] Added missing methods --- gtsam/hybrid/hybrid.i | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index 21a496eee5..87aa3bc149 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -40,6 +40,15 @@ virtual class HybridFactor { bool empty() const; size_t size() const; gtsam::KeyVector keys() const; + + // Standard interface: + double error(const gtsam::HybridValues &values) const; + bool isDiscrete() const; + bool isContinuous() const; + bool isHybrid() const; + size_t nrContinuous() const; + gtsam::DiscreteKeys discreteKeys() const; + gtsam::KeyVector continuousKeys() const; }; #include @@ -50,7 +59,13 @@ virtual class HybridConditional { bool equals(const gtsam::HybridConditional& other, double tol = 1e-9) const; size_t nrFrontals() const; size_t nrParents() const; + + // Standard interface: + gtsam::GaussianMixture* asMixture() const; + gtsam::GaussianConditional* asGaussian() const; + gtsam::DiscreteConditional* asDiscrete() const; gtsam::Factor* inner(); + double error(const gtsam::HybridValues& values) const; }; #include @@ -61,6 +76,7 @@ virtual class HybridDiscreteFactor { gtsam::DefaultKeyFormatter) const; bool equals(const gtsam::HybridDiscreteFactor& other, double tol = 1e-9) const; gtsam::Factor* inner(); + double error(const gtsam::HybridValues &values) const; }; #include From bd8d2ea2c1617a380c16ec93347bb3823fab428e Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 2 Jan 2023 09:46:08 -0500 Subject: [PATCH 38/77] Added error for all versions - should become logDiensity? --- gtsam/hybrid/HybridConditional.cpp | 24 ++++++++++++++++++++---- gtsam/hybrid/HybridConditional.h | 10 +--------- 2 files changed, 21 insertions(+), 13 deletions(-) diff --git a/gtsam/hybrid/HybridConditional.cpp b/gtsam/hybrid/HybridConditional.cpp index 85112d922e..6657aec8c0 100644 --- a/gtsam/hybrid/HybridConditional.cpp +++ b/gtsam/hybrid/HybridConditional.cpp @@ -17,6 +17,7 @@ #include #include +#include #include #include @@ -107,15 +108,30 @@ bool HybridConditional::equals(const HybridFactor &other, double tol) const { auto other = e->asMixture(); return other != nullptr && gm->equals(*other, tol); } - if (auto gm = asGaussian()) { + if (auto gc = asGaussian()) { auto other = e->asGaussian(); - return other != nullptr && gm->equals(*other, tol); + return other != nullptr && gc->equals(*other, tol); } - if (auto gm = asDiscrete()) { + if (auto dc = asDiscrete()) { auto other = e->asDiscrete(); - return other != nullptr && gm->equals(*other, tol); + return other != nullptr && dc->equals(*other, tol); } return inner_->equals(*(e->inner_), tol); } +/* ************************************************************************ */ +double HybridConditional::error(const HybridValues &values) const { + if (auto gm = asMixture()) { + return gm->error(values); + } + if (auto gc = asGaussian()) { + return gc->error(values.continuous()); + } + if (auto dc = asDiscrete()) { + return -log((*dc)(values.discrete())); + } + throw std::runtime_error( + "HybridConditional::error: conditional type not handled"); +} + } // namespace gtsam diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index e949fb8659..a34d01388e 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -176,15 +176,7 @@ class GTSAM_EXPORT HybridConditional boost::shared_ptr inner() const { return inner_; } /// Return the error of the underlying conditional. - /// Currently only implemented for Gaussian mixture. - double error(const HybridValues& values) const override { - if (auto gm = asMixture()) { - return gm->error(values); - } else { - throw std::runtime_error( - "HybridConditional::error: only implemented for Gaussian mixture"); - } - } + double error(const HybridValues& values) const override; /// @} From e3a63aa77c902d49eb5fe033d6fc1034476d2dd4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 2 Jan 2023 10:03:56 -0500 Subject: [PATCH 39/77] check for 0 sum --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 133011b6ad..b68c9286be 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -285,6 +285,10 @@ hybridElimination(const HybridGaussianFactorGraph &factors, double sum = 0.0; auto visitor = [&](double y) { sum += y; }; fdt.visit(visitor); + // Check if sum is 0, and update accordingly. + if (sum == 0) { + sum = 1.0; + } // fdt = DecisionTree(fdt, // [sum](const double &x) { return x / sum; // }); From 40a67b517cb8151cde633b38ec3cacfb8c08506d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 2 Jan 2023 10:04:50 -0500 Subject: [PATCH 40/77] prune nonlinear by calling method rather than on the bayestree --- gtsam/hybrid/tests/testHybridNonlinearISAM.cpp | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index db0dc73c3d..8b5bb41acb 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -357,10 +357,9 @@ TEST(HybridNonlinearISAM, Incremental_approximate) { // Run update with pruning size_t maxComponents = 5; incrementalHybrid.update(graph1, initial); + incrementalHybrid.prune(maxComponents); HybridGaussianISAM bayesTree = incrementalHybrid.bayesTree(); - bayesTree.prune(maxComponents); - // Check if we have a bayes tree with 4 hybrid nodes, // each with 2, 4, 8, and 5 (pruned) leaves respetively. EXPECT_LONGS_EQUAL(4, bayesTree.size()); @@ -382,10 +381,9 @@ TEST(HybridNonlinearISAM, Incremental_approximate) { // Run update with pruning a second time. incrementalHybrid.update(graph2, initial); + incrementalHybrid.prune(maxComponents); bayesTree = incrementalHybrid.bayesTree(); - bayesTree.prune(maxComponents); - // Check if we have a bayes tree with pruned hybrid nodes, // with 5 (pruned) leaves. CHECK_EQUAL(5, bayesTree.size()); From 021ee1a5d9d053b494485e218710ad3a09c72122 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 2 Jan 2023 12:34:55 -0500 Subject: [PATCH 41/77] Deterministic example, much more generic importance sampler --- python/gtsam/tests/test_HybridFactorGraph.py | 117 +++++++++++++++---- 1 file changed, 94 insertions(+), 23 deletions(-) diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 75ab3e02c5..a7657e4112 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -82,10 +82,12 @@ def test_optimize(self): self.assertEqual(hv.atDiscrete(C(0)), 1) @staticmethod - def tiny(num_measurements: int = 1) -> HybridBayesNet: + def tiny(num_measurements: int = 1, prior_mean: float = 5.0, + prior_sigma: float = 0.5) -> HybridBayesNet: """ Create a tiny two variable hybrid model which represents - the generative probability P(z, x, n) = P(z | x, n)P(x)P(n). + the generative probability P(Z, x0, mode) = P(Z|x0, mode)P(x0)P(mode). + num_measurements: number of measurements in Z = {z0, z1...} """ # Create hybrid Bayes net. bayesNet = HybridBayesNet() @@ -110,7 +112,8 @@ def tiny(num_measurements: int = 1) -> HybridBayesNet: [conditional0, conditional1]) # Create prior on X(0). - prior_on_x0 = GaussianConditional.FromMeanAndStddev(X(0), [5.0], 0.5) + prior_on_x0 = GaussianConditional.FromMeanAndStddev( + X(0), [prior_mean], prior_sigma) bayesNet.addGaussian(prior_on_x0) # Add prior on mode. @@ -118,6 +121,28 @@ def tiny(num_measurements: int = 1) -> HybridBayesNet: return bayesNet + def test_evaluate(self): + """Test evaluate with two different prior noise models.""" + # TODO(dellaert): really a HBN test + # Create a tiny Bayes net P(x0) P(m0) P(z0|x0) + bayesNet1 = self.tiny(prior_sigma=0.5, num_measurements=1) + bayesNet2 = self.tiny(prior_sigma=5.0, num_measurements=1) + # bn1: # 1/sqrt(2*pi*0.5^2) + # bn2: # 1/sqrt(2*pi*5.0^2) + expected_ratio = np.sqrt(2*np.pi*5.0**2)/np.sqrt(2*np.pi*0.5**2) + mean0 = HybridValues() + mean0.insert(X(0), [5.0]) + mean0.insert(Z(0), [5.0]) + mean0.insert(M(0), 0) + self.assertAlmostEqual(bayesNet1.evaluate(mean0) / + bayesNet2.evaluate(mean0), expected_ratio, delta=1e-9) + mean1 = HybridValues() + mean1.insert(X(0), [5.0]) + mean1.insert(Z(0), [5.0]) + mean1.insert(M(0), 1) + self.assertAlmostEqual(bayesNet1.evaluate(mean1) / + bayesNet2.evaluate(mean1), expected_ratio, delta=1e-9) + @staticmethod def measurements(sample: HybridValues, indices) -> gtsam.VectorValues: """Create measurements from a sample, grabbing Z(i) where i in indices.""" @@ -143,21 +168,20 @@ def factor_graph_from_bayes_net(cls, bayesNet: HybridBayesNet, sample: HybridVal return fg @classmethod - def estimate_marginals(cls, bayesNet: HybridBayesNet, sample: HybridValues, N=10000): + def estimate_marginals(cls, target, proposal_density: HybridBayesNet, + N=10000): """Do importance sampling to get an estimate of the discrete marginal P(mode).""" - # Use prior on x0, mode as proposal density. - prior = cls.tiny(num_measurements=0) # just P(x0)P(mode) - - # Allocate space for marginals. + # Allocate space for marginals on mode. marginals = np.zeros((2,)) # Do importance sampling. - num_measurements = bayesNet.size() - 2 - measurements = cls.measurements(sample, range(num_measurements)) for s in range(N): - proposed = prior.sample() - proposed.insert(measurements) - weight = bayesNet.evaluate(proposed) / prior.evaluate(proposed) + proposed = proposal_density.sample() # sample from proposal + target_proposed = target(proposed) # evaluate target + # print(target_proposed, proposal_density.evaluate(proposed)) + weight = target_proposed / proposal_density.evaluate(proposed) + # print weight: + # print(f"weight: {weight}") marginals[proposed.atDiscrete(M(0))] += weight # print marginals: @@ -166,23 +190,68 @@ def estimate_marginals(cls, bayesNet: HybridBayesNet, sample: HybridValues, N=10 def test_tiny(self): """Test a tiny two variable hybrid model.""" - bayesNet = self.tiny() - sample = bayesNet.sample() - # print(sample) + prior_sigma = 0.5 + # P(x0)P(mode)P(z0|x0,mode) + bayesNet = self.tiny(prior_sigma=prior_sigma) + + # Deterministic values exactly at the mean, for both x and z: + values = HybridValues() + values.insert(X(0), [5.0]) + values.insert(M(0), 0) # low-noise, standard deviation 0.5 + z0: float = 5.0 + values.insert(Z(0), [z0]) + + def unnormalized_posterior(x): + """Posterior is proportional to joint, centered at 5.0 as well.""" + x.insert(Z(0), [z0]) + # print(x) + return bayesNet.evaluate(x) + + # Create proposal density on (x0, mode), making sure it has same mean: + posterior_information = 1/(prior_sigma**2) + 1/(0.5**2) + posterior_sigma = posterior_information**(-0.5) + print(f"Posterior sigma: {posterior_sigma}") + proposal_density = self.tiny( + num_measurements=0, prior_mean=5.0, prior_sigma=posterior_sigma) # Estimate marginals using importance sampling. - marginals = self.estimate_marginals(bayesNet, sample) - # print(f"True mode: {sample.atDiscrete(M(0))}") - # print(f"P(mode=0; z0) = {marginals[0]}") - # print(f"P(mode=1; z0) = {marginals[1]}") + marginals = self.estimate_marginals( + target=unnormalized_posterior, proposal_density=proposal_density, N=10_000) + print(f"True mode: {values.atDiscrete(M(0))}") + print(f"P(mode=0; z0) = {marginals[0]}") + print(f"P(mode=1; z0) = {marginals[1]}") # Check that the estimate is close to the true value. - self.assertAlmostEqual(marginals[0], 0.4, delta=0.1) - self.assertAlmostEqual(marginals[1], 0.6, delta=0.1) + self.assertAlmostEqual(marginals[0], 0.74, delta=0.01) + self.assertAlmostEqual(marginals[1], 0.26, delta=0.01) - fg = self.factor_graph_from_bayes_net(bayesNet, sample) + fg = self.factor_graph_from_bayes_net(bayesNet, values) self.assertEqual(fg.size(), 3) + # Test elimination. + ordering = gtsam.Ordering() + ordering.push_back(X(0)) + ordering.push_back(M(0)) + posterior = fg.eliminateSequential(ordering) + print(posterior) + + def true_posterior(x): + """Posterior from elimination.""" + x.insert(Z(0), [z0]) + # print(x) + return posterior.evaluate(x) + + # Estimate marginals using importance sampling. + marginals = self.estimate_marginals( + target=true_posterior, proposal_density=proposal_density) + print(f"True mode: {values.atDiscrete(M(0))}") + print(f"P(mode=0; z0) = {marginals[0]}") + print(f"P(mode=1; z0) = {marginals[1]}") + + # Check that the estimate is close to the true value. + self.assertAlmostEqual(marginals[0], 0.74, delta=0.01) + self.assertAlmostEqual(marginals[1], 0.26, delta=0.01) + @staticmethod def calculate_ratio(bayesNet: HybridBayesNet, fg: HybridGaussianFactorGraph, @@ -190,6 +259,7 @@ def calculate_ratio(bayesNet: HybridBayesNet, """Calculate ratio between Bayes net probability and the factor graph.""" return bayesNet.evaluate(sample) / fg.probPrime(sample) if fg.probPrime(sample) > 0 else 0 + @unittest.skip("This test is too slow.") def test_ratio(self): """ Given a tiny two variable hybrid model, with 2 measurements, @@ -269,5 +339,6 @@ def test_ratio(self): if (ratio > 0): self.assertAlmostEqual(ratio, expected_ratio) + if __name__ == "__main__": unittest.main() From fbfc20b88da8660819789b751a6f356182cb81dd Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 2 Jan 2023 12:47:30 -0500 Subject: [PATCH 42/77] Fixed conversion arguments --- gtsam/hybrid/tests/TinyHybridExample.h | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index f92b8254b6..9175351c8e 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -61,12 +61,12 @@ HybridBayesNet createHybridBayesNet(int num_measurements = 1) { } HybridGaussianFactorGraph convertBayesNet(const HybridBayesNet& bayesNet, - const HybridValues& values) { + const VectorValues& measurements) { HybridGaussianFactorGraph fg; int num_measurements = bayesNet.size() - 2; for (int i = 0; i < num_measurements; i++) { auto conditional = bayesNet.atMixture(i); - auto factor = conditional->likelihood(values.continuousSubset({Z(i)})); + auto factor = conditional->likelihood({{Z(i), measurements.at(Z(i))}}); fg.push_back(factor); } fg.push_back(bayesNet.atGaussian(num_measurements)); @@ -79,14 +79,14 @@ HybridGaussianFactorGraph createHybridGaussianFactorGraph( auto bayesNet = createHybridBayesNet(num_measurements); if (deterministic) { // Create a deterministic set of measurements: - HybridValues values{{}, {{M(0), 0}}}; + VectorValues measurements; for (int i = 0; i < num_measurements; i++) { - values.insert(Z(i), Vector1(5.0 + 0.1 * i)); + measurements.insert(Z(i), Vector1(5.0 + 0.1 * i)); } - return convertBayesNet(bayesNet, values); + return convertBayesNet(bayesNet, measurements); } else { // Create a random set of measurements: - return convertBayesNet(bayesNet, bayesNet.sample()); + return convertBayesNet(bayesNet, bayesNet.sample().continuous()); } } From 06aed5397ab5360a4955ca9f189384f9c64f18f2 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 2 Jan 2023 13:06:17 -0500 Subject: [PATCH 43/77] rename --- .../tests/testHybridGaussianFactorGraph.cpp | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index b1271811f8..7a0653dd54 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -658,7 +658,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { tiny::createHybridGaussianFactorGraph(num_measurements, deterministic); // Create expected Bayes Net: - HybridBayesNet bayesNet; + HybridBayesNet expectedBayesNet; // Create Gaussian mixture on X(0). using tiny::mode; @@ -668,17 +668,17 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { conditional1 = boost::make_shared( X(0), Vector1(10.1379), I_1x1 * 2.02759); GaussianMixture gm({X(0)}, {}, {mode}, {conditional0, conditional1}); - bayesNet.emplaceMixture(gm); // copy :-( + expectedBayesNet.emplaceMixture(gm); // copy :-( // Add prior on mode. - bayesNet.emplaceDiscrete(mode, "4/6"); + expectedBayesNet.emplaceDiscrete(mode, "74/26"); // Test elimination Ordering ordering; ordering.push_back(X(0)); ordering.push_back(M(0)); const auto posterior = fg.eliminateSequential(ordering); - EXPECT(assert_equal(bayesNet, *posterior, 1e-4)); + EXPECT(assert_equal(expectedBayesNet, *posterior, 1e-4)); } /* ****************************************************************************/ @@ -690,7 +690,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) { tiny::createHybridGaussianFactorGraph(num_measurements, deterministic); // Create expected Bayes Net: - HybridBayesNet bayesNet; + HybridBayesNet expectedBayesNet; // Create Gaussian mixture on X(0). using tiny::mode; @@ -700,17 +700,17 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) { conditional1 = boost::make_shared( X(0), Vector1(10.3281), I_1x1 * 2.0548); GaussianMixture gm({X(0)}, {}, {mode}, {conditional0, conditional1}); - bayesNet.emplaceMixture(gm); // copy :-( + expectedBayesNet.emplaceMixture(gm); // copy :-( // Add prior on mode. - bayesNet.emplaceDiscrete(mode, "4/6"); + expectedBayesNet.emplaceDiscrete(mode, "4/6"); // Test elimination Ordering ordering; ordering.push_back(X(0)); ordering.push_back(M(0)); const auto posterior = fg.eliminateSequential(ordering); - EXPECT(assert_equal(bayesNet, *posterior, 1e-4)); + EXPECT(assert_equal(expectedBayesNet, *posterior, 1e-4)); } /* ************************************************************************* */ From f8d75abfebf40978ea97e9886c6b4d2c1463a614 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 2 Jan 2023 13:11:22 -0500 Subject: [PATCH 44/77] name change of Sum to GaussianFactorGraphTree and SumFrontals to assembleGraphTree --- gtsam/hybrid/GaussianMixture.cpp | 16 ++--- gtsam/hybrid/GaussianMixture.h | 19 +++-- gtsam/hybrid/GaussianMixtureFactor.cpp | 12 ++-- gtsam/hybrid/GaussianMixtureFactor.h | 41 ++--------- gtsam/hybrid/HybridBayesNet.cpp | 13 ++++ gtsam/hybrid/HybridFactor.h | 32 +++++++++ gtsam/hybrid/HybridGaussianFactorGraph.cpp | 72 +++++++++---------- gtsam/hybrid/HybridGaussianFactorGraph.h | 2 +- .../tests/testGaussianMixtureFactor.cpp | 2 +- .../tests/testHybridGaussianFactorGraph.cpp | 9 +-- 10 files changed, 119 insertions(+), 99 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 8b8c623992..ad636a5d40 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -51,28 +51,28 @@ GaussianMixture::GaussianMixture( Conditionals(discreteParents, conditionalsList)) {} /* *******************************************************************************/ -GaussianMixture::Sum GaussianMixture::add( - const GaussianMixture::Sum &sum) const { - using Y = GaussianMixtureFactor::GraphAndConstant; +GaussianFactorGraphTree GaussianMixture::add( + const GaussianFactorGraphTree &sum) const { + using Y = GraphAndConstant; auto add = [](const Y &graph1, const Y &graph2) { auto result = graph1.graph; result.push_back(graph2.graph); return Y(result, graph1.constant + graph2.constant); }; - const Sum tree = asGaussianFactorGraphTree(); + const auto tree = asGaussianFactorGraphTree(); return sum.empty() ? tree : sum.apply(tree, add); } /* *******************************************************************************/ -GaussianMixture::Sum GaussianMixture::asGaussianFactorGraphTree() const { +GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { auto lambda = [](const GaussianConditional::shared_ptr &conditional) { GaussianFactorGraph result; result.push_back(conditional); if (conditional) { - return GaussianMixtureFactor::GraphAndConstant( + return GraphAndConstant( result, conditional->logNormalizationConstant()); } else { - return GaussianMixtureFactor::GraphAndConstant(result, 0.0); + return GraphAndConstant(result, 0.0); } }; return {conditionals_, lambda}; @@ -108,7 +108,7 @@ bool GaussianMixture::equals(const HybridFactor &lf, double tol) const { // This will return false if either conditionals_ is empty or e->conditionals_ // is empty, but not if both are empty or both are not empty: if (conditionals_.empty() ^ e->conditionals_.empty()) return false; -std::cout << "checking" << std::endl; + std::cout << "checking" << std::endl; // Check the base and the factors: return BaseFactor::equals(*e, tol) && conditionals_.equals(e->conditionals_, diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index e8ba78d615..240f79dcd7 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -59,9 +59,6 @@ class GTSAM_EXPORT GaussianMixture using BaseFactor = HybridFactor; using BaseConditional = Conditional; - /// Alias for DecisionTree of GaussianFactorGraphs - using Sum = DecisionTree; - /// typedef for Decision Tree of Gaussian Conditionals using Conditionals = DecisionTree; @@ -71,7 +68,7 @@ class GTSAM_EXPORT GaussianMixture /** * @brief Convert a DecisionTree of factors into a DT of Gaussian FGs. */ - Sum asGaussianFactorGraphTree() const; + GaussianFactorGraphTree asGaussianFactorGraphTree() const; /** * @brief Helper function to get the pruner functor. @@ -172,6 +169,16 @@ class GTSAM_EXPORT GaussianMixture */ double error(const HybridValues &values) const override; + // /// Calculate probability density for given values `x`. + // double evaluate(const HybridValues &values) const; + + // /// Evaluate probability density, sugar. + // double operator()(const HybridValues &values) const { return + // evaluate(values); } + + // /// Calculate log-density for given values `x`. + // double logDensity(const HybridValues &values) const; + /** * @brief Prune the decision tree of Gaussian factors as per the discrete * `decisionTree`. @@ -186,9 +193,9 @@ class GTSAM_EXPORT GaussianMixture * maintaining the decision tree structure. * * @param sum Decision Tree of Gaussian Factor Graphs - * @return Sum + * @return GaussianFactorGraphTree */ - Sum add(const Sum &sum) const; + GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const; /// @} }; diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 746c883b29..657ef334b0 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -100,25 +100,25 @@ double GaussianMixtureFactor::constant(const DiscreteValues &assignment) const { // } /* *******************************************************************************/ -GaussianMixtureFactor::Sum GaussianMixtureFactor::add( - const GaussianMixtureFactor::Sum &sum) const { - using Y = GaussianMixtureFactor::GraphAndConstant; +GaussianFactorGraphTree GaussianMixtureFactor::add( + const GaussianFactorGraphTree &sum) const { + using Y = GraphAndConstant; auto add = [](const Y &graph1, const Y &graph2) { auto result = graph1.graph; result.push_back(graph2.graph); return Y(result, graph1.constant + graph2.constant); }; - const Sum tree = asGaussianFactorGraphTree(); + const auto tree = asGaussianFactorGraphTree(); return sum.empty() ? tree : sum.apply(tree, add); } /* *******************************************************************************/ -GaussianMixtureFactor::Sum GaussianMixtureFactor::asGaussianFactorGraphTree() +GaussianFactorGraphTree GaussianMixtureFactor::asGaussianFactorGraphTree() const { auto wrap = [](const FactorAndConstant &factor_z) { GaussianFactorGraph result; result.push_back(factor_z.factor); - return GaussianMixtureFactor::GraphAndConstant(result, factor_z.constant); + return GraphAndConstant(result, factor_z.constant); }; return {factors_, wrap}; } diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 69149ac053..04b2414a70 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -72,35 +72,6 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { } }; - /// Gaussian factor graph and log of normalizing constant. - struct GraphAndConstant { - GaussianFactorGraph graph; - double constant; - - GraphAndConstant(const GaussianFactorGraph &graph, double constant) - : graph(graph), constant(constant) {} - - // Check pointer equality. - bool operator==(const GraphAndConstant &other) const { - return graph == other.graph && constant == other.constant; - } - - // Implement GTSAM-style print: - void print(const std::string &s = "Graph: ", - const KeyFormatter &formatter = DefaultKeyFormatter) const { - graph.print(s, formatter); - std::cout << "Constant: " << constant << std::endl; - } - - // Implement GTSAM-style equals: - bool equals(const GraphAndConstant &other, double tol = 1e-9) const { - return graph.equals(other.graph, tol) && - fabs(constant - other.constant) < tol; - } - }; - - using Sum = DecisionTree; - /// typedef for Decision Tree of Gaussian factors and log-constant. using Factors = DecisionTree; using Mixture = DecisionTree; @@ -113,9 +84,9 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * @brief Helper function to return factors and functional to create a * DecisionTree of Gaussian Factor Graphs. * - * @return Sum (DecisionTree) + * @return GaussianFactorGraphTree */ - Sum asGaussianFactorGraphTree() const; + GaussianFactorGraphTree asGaussianFactorGraphTree() const; public: /// @name Constructors @@ -184,7 +155,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { * variables. * @return Sum */ - Sum add(const Sum &sum) const; + GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const; /** * @brief Compute error of the GaussianMixtureFactor as a tree. @@ -202,7 +173,8 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { double error(const HybridValues &values) const override; /// Add MixtureFactor to a Sum, syntactic sugar. - friend Sum &operator+=(Sum &sum, const GaussianMixtureFactor &factor) { + friend GaussianFactorGraphTree &operator+=( + GaussianFactorGraphTree &sum, const GaussianMixtureFactor &factor) { sum = factor.add(sum); return sum; } @@ -215,7 +187,6 @@ struct traits : public Testable { }; template <> -struct traits - : public Testable {}; +struct traits : public Testable {}; } // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index cd6f181abb..fa144ecb3a 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -279,18 +279,31 @@ double HybridBayesNet::evaluate(const HybridValues &values) const { const VectorValues &continuousValues = values.continuous(); double logDensity = 0.0, probability = 1.0; + bool debug = false; // Iterate over each conditional. for (auto &&conditional : *this) { + // TODO: should be delegated to derived classes. if (auto gm = conditional->asMixture()) { const auto component = (*gm)(discreteValues); logDensity += component->logDensity(continuousValues); + if (debug) { + GTSAM_PRINT(continuousValues); + std::cout << "component->logDensity(continuousValues) = " + << component->logDensity(continuousValues) << std::endl; + } } else if (auto gc = conditional->asGaussian()) { // If continuous only, evaluate the probability and multiply. logDensity += gc->logDensity(continuousValues); + if (debug) + std::cout << "gc->logDensity(continuousValues) = " + << gc->logDensity(continuousValues) << std::endl; } else if (auto dc = conditional->asDiscrete()) { // Conditional is discrete-only, so return its probability. probability *= dc->operator()(discreteValues); + if (debug) + std::cout << "dc->operator()(discreteValues) = " + << dc->operator()(discreteValues) << std::endl; } } diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index a28fee8ed8..326570d6e8 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -21,6 +21,8 @@ #include #include #include +#include +#include #include #include @@ -28,6 +30,36 @@ namespace gtsam { class HybridValues; +/// Gaussian factor graph and log of normalizing constant. +struct GraphAndConstant { + GaussianFactorGraph graph; + double constant; + + GraphAndConstant(const GaussianFactorGraph &graph, double constant) + : graph(graph), constant(constant) {} + + // Check pointer equality. + bool operator==(const GraphAndConstant &other) const { + return graph == other.graph && constant == other.constant; + } + + // Implement GTSAM-style print: + void print(const std::string &s = "Graph: ", + const KeyFormatter &formatter = DefaultKeyFormatter) const { + graph.print(s, formatter); + std::cout << "Constant: " << constant << std::endl; + } + + // Implement GTSAM-style equals: + bool equals(const GraphAndConstant &other, double tol = 1e-9) const { + return graph.equals(other.graph, tol) && + fabs(constant - other.constant) < tol; + } +}; + +/// Alias for DecisionTree of GaussianFactorGraphs +using GaussianFactorGraphTree = DecisionTree; + KeyVector CollectKeys(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys); KeyVector CollectKeys(const KeyVector &keys1, const KeyVector &keys2); diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index a5d27c188f..e8b7eb9507 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -59,51 +59,44 @@ namespace gtsam { template class EliminateableFactorGraph; /* ************************************************************************ */ -static GaussianMixtureFactor::Sum addGaussian( - const GaussianMixtureFactor::Sum &sum, +static GaussianFactorGraphTree addGaussian( + const GaussianFactorGraphTree &sum, const GaussianFactor::shared_ptr &factor) { // If the decision tree is not initialized, then initialize it. if (sum.empty()) { GaussianFactorGraph result; result.push_back(factor); - return GaussianMixtureFactor::Sum( - GaussianMixtureFactor::GraphAndConstant(result, 0.0)); + return GaussianFactorGraphTree(GraphAndConstant(result, 0.0)); } else { - auto add = [&factor]( - const GaussianMixtureFactor::GraphAndConstant &graph_z) { + auto add = [&factor](const GraphAndConstant &graph_z) { auto result = graph_z.graph; result.push_back(factor); - return GaussianMixtureFactor::GraphAndConstant(result, graph_z.constant); + return GraphAndConstant(result, graph_z.constant); }; return sum.apply(add); } } /* ************************************************************************ */ -// TODO(dellaert): At the time I though "Sum" was a good name, but coming back -// to it after a while I think "SumFrontals" is better.it's a terrible name. -// Also, the implementation is inconsistent. I think we should just have a -// virtual method in HybridFactor that adds to the "Sum" object, like -// addGaussian. Finally, we need to document why deferredFactors need to be -// added last, which I would undo if possible. -// Implementation-wise, it's probably more efficient to first collect the -// discrete keys, and then loop over all assignments to populate a vector. -GaussianMixtureFactor::Sum HybridGaussianFactorGraph::SumFrontals() const { - // sum out frontals, this is the factor on the separator - gttic(sum); - - GaussianMixtureFactor::Sum sum; +// TODO(dellaert): We need to document why deferredFactors need to be +// added last, which I would undo if possible. Implementation-wise, it's +// probably more efficient to first collect the discrete keys, and then loop +// over all assignments to populate a vector. +GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const { + gttic(assembleGraphTree); + + GaussianFactorGraphTree result; std::vector deferredFactors; for (auto &f : factors_) { // TODO(dellaert): just use a virtual method defined in HybridFactor. if (f->isHybrid()) { if (auto gm = boost::dynamic_pointer_cast(f)) { - sum = gm->add(sum); + result = gm->add(result); } if (auto gm = boost::dynamic_pointer_cast(f)) { - sum = gm->asMixture()->add(sum); + result = gm->asMixture()->add(result); } } else if (f->isContinuous()) { @@ -134,12 +127,12 @@ GaussianMixtureFactor::Sum HybridGaussianFactorGraph::SumFrontals() const { } for (auto &f : deferredFactors) { - sum = addGaussian(sum, f); + result = addGaussian(result, f); } - gttoc(sum); + gttoc(assembleGraphTree); - return sum; + return result; } /* ************************************************************************ */ @@ -192,18 +185,14 @@ discreteElimination(const HybridGaussianFactorGraph &factors, // If any GaussianFactorGraph in the decision tree contains a nullptr, convert // that leaf to an empty GaussianFactorGraph. Needed since the DecisionTree will // otherwise create a GFG with a single (null) factor. -GaussianMixtureFactor::Sum removeEmpty(const GaussianMixtureFactor::Sum &sum) { - auto emptyGaussian = [](const GaussianMixtureFactor::GraphAndConstant - &graph_z) { +GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) { + auto emptyGaussian = [](const GraphAndConstant &graph_z) { bool hasNull = std::any_of(graph_z.graph.begin(), graph_z.graph.end(), [](const GaussianFactor::shared_ptr &ptr) { return !ptr; }); - return hasNull - ? GaussianMixtureFactor::GraphAndConstant{GaussianFactorGraph(), - 0.0} - : graph_z; + return hasNull ? GraphAndConstant{GaussianFactorGraph(), 0.0} : graph_z; }; - return GaussianMixtureFactor::Sum(sum, emptyGaussian); + return GaussianFactorGraphTree(sum, emptyGaussian); } /* ************************************************************************ */ static std::pair @@ -218,17 +207,16 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // Collect all the factors to create a set of Gaussian factor graphs in a // decision tree indexed by all discrete keys involved. - GaussianMixtureFactor::Sum sum = factors.SumFrontals(); + GaussianFactorGraphTree sum = factors.assembleGraphTree(); - // TODO(dellaert): does SumFrontals not guarantee we do not need this? + // TODO(dellaert): does assembleGraphTree not guarantee we do not need this? sum = removeEmpty(sum); using EliminationPair = std::pair, GaussianMixtureFactor::FactorAndConstant>; // This is the elimination method on the leaf nodes - auto eliminate = [&](const GaussianMixtureFactor::GraphAndConstant &graph_z) - -> EliminationPair { + auto eliminate = [&](const GraphAndConstant &graph_z) -> EliminationPair { if (graph_z.graph.empty()) { return {nullptr, {nullptr, 0.0}}; } @@ -247,7 +235,15 @@ hybridElimination(const HybridGaussianFactorGraph &factors, #endif // Get the log of the log normalization constant inverse. - double logZ = graph_z.constant - conditional->logNormalizationConstant(); + double logZ = -conditional->logNormalizationConstant(); + + // IF this is the last continuous variable to eliminated, we need to + // calculate the error here: the value of all factors at the mean, see + // ml_map_rao.pdf. + if (continuousSeparator.empty()) { + const auto posterior_mean = conditional->solve(VectorValues()); + logZ += graph_z.graph.error(posterior_mean); + } return {conditional, {newFactor, logZ}}; }; diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index ed3ada2ff8..144d144bbd 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -245,7 +245,7 @@ class GTSAM_EXPORT HybridGaussianFactorGraph * one for A and one for B. The leaves of the tree will be the Gaussian * factors that have only continuous keys. */ - GaussianMixtureFactor::Sum SumFrontals() const; + GaussianFactorGraphTree assembleGraphTree() const; /// @} }; diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 9dc1c9576a..16b33a0d58 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -80,7 +80,7 @@ TEST(GaussianMixtureFactor, Sum) { // Create sum of two mixture factors: it will be a decision tree now on both // discrete variables m1 and m2: - GaussianMixtureFactor::Sum sum; + GaussianFactorGraphTree sum; sum += mixtureFactorA; sum += mixtureFactorB; diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 7a0653dd54..d048c0e353 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -615,15 +615,16 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { } /* ****************************************************************************/ -// Check that SumFrontals assembles Gaussian factor graphs for each assignment. -TEST(HybridGaussianFactorGraph, SumFrontals) { +// Check that assembleGraphTree assembles Gaussian factor graphs for each +// assignment. +TEST(HybridGaussianFactorGraph, assembleGraphTree) { const int num_measurements = 1; const bool deterministic = true; auto fg = tiny::createHybridGaussianFactorGraph(num_measurements, deterministic); EXPECT_LONGS_EQUAL(3, fg.size()); - auto sum = fg.SumFrontals(); + auto sum = fg.assembleGraphTree(); // Get mixture factor: auto mixture = boost::dynamic_pointer_cast(fg.at(0)); @@ -638,7 +639,7 @@ TEST(HybridGaussianFactorGraph, SumFrontals) { // Expected decision tree with two factor graphs: // f(x0;mode=0)P(x0) and f(x0;mode=1)P(x0) - GaussianMixture::Sum expectedSum{ + GaussianFactorGraphTree expectedSum{ M(0), {GaussianFactorGraph(std::vector{mixture->factor(d0), prior}), mixture->constant(d0)}, From 12d02bed1a32591e0fae50d1f1fdf3f4be392bc8 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 2 Jan 2023 13:22:16 -0500 Subject: [PATCH 45/77] Right marginals for tiny1 --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 23 +++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index e8b7eb9507..5b28e06d3d 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -209,7 +209,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // decision tree indexed by all discrete keys involved. GaussianFactorGraphTree sum = factors.assembleGraphTree(); - // TODO(dellaert): does assembleGraphTree not guarantee we do not need this? + // TODO(dellaert): does assembleGraphTree not guarantee this? sum = removeEmpty(sum); using EliminationPair = std::pair, @@ -234,16 +234,16 @@ hybridElimination(const HybridGaussianFactorGraph &factors, gttoc_(hybrid_eliminate); #endif + const double logZ = graph_z.constant - conditional->logNormalizationConstant(); // Get the log of the log normalization constant inverse. - double logZ = -conditional->logNormalizationConstant(); - - // IF this is the last continuous variable to eliminated, we need to - // calculate the error here: the value of all factors at the mean, see - // ml_map_rao.pdf. - if (continuousSeparator.empty()) { - const auto posterior_mean = conditional->solve(VectorValues()); - logZ += graph_z.graph.error(posterior_mean); - } + // double logZ = -conditional->logNormalizationConstant(); + // // IF this is the last continuous variable to eliminated, we need to + // // calculate the error here: the value of all factors at the mean, see + // // ml_map_rao.pdf. + // if (continuousSeparator.empty()) { + // const auto posterior_mean = conditional->solve(VectorValues()); + // logZ += graph_z.graph.error(posterior_mean); + // } return {conditional, {newFactor, logZ}}; }; @@ -270,11 +270,12 @@ hybridElimination(const HybridGaussianFactorGraph &factors, auto factorProb = [&](const GaussianMixtureFactor::FactorAndConstant &factor_z) { // This is the probability q(μ) at the MLE point. + // factor_z.factor is a factor without keys, just containing the residual. // return exp(-factor_z.error(VectorValues())); // TODO(dellaert): this is not correct, since VectorValues() is not // the MLE point. But it does not matter, as at the MLE point the // error will be zero, hence: - return exp(-factor_z.constant); + return exp(factor_z.constant); }; const DecisionTree fdt(newFactors, factorProb); const auto discreteFactor = From 797ac344fa781394433ac0d01c4411ceb8419ba4 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 2 Jan 2023 13:24:28 -0500 Subject: [PATCH 46/77] Same correct error with factor_z.error() --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 5b28e06d3d..12396f5b4d 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -271,11 +271,11 @@ hybridElimination(const HybridGaussianFactorGraph &factors, [&](const GaussianMixtureFactor::FactorAndConstant &factor_z) { // This is the probability q(μ) at the MLE point. // factor_z.factor is a factor without keys, just containing the residual. - // return exp(-factor_z.error(VectorValues())); + return exp(-factor_z.error(VectorValues())); // TODO(dellaert): this is not correct, since VectorValues() is not // the MLE point. But it does not matter, as at the MLE point the // error will be zero, hence: - return exp(factor_z.constant); + // return exp(factor_z.constant); }; const DecisionTree fdt(newFactors, factorProb); const auto discreteFactor = From 625977ee067225d8d6049cadac73feb5bb206c67 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 2 Jan 2023 16:07:25 -0500 Subject: [PATCH 47/77] Example with 2 measurements agrees with importance sampling --- gtsam/hybrid/tests/TinyHybridExample.h | 34 ++-- .../tests/testHybridGaussianFactorGraph.cpp | 32 ++-- python/gtsam/tests/test_HybridFactorGraph.py | 148 +++++++++--------- 3 files changed, 108 insertions(+), 106 deletions(-) diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index 9175351c8e..1df7e3a318 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -1,6 +1,6 @@ /* ---------------------------------------------------------------------------- - * GTSAM Copyright 2010-2022, Georgia Tech Research Corporation, + * GTSAM Copyright 2010-2023, Georgia Tech Research Corporation, * Atlanta, Georgia 30332-0415 * All Rights Reserved * Authors: Frank Dellaert, et al. (see THANKS for the full author list) @@ -10,10 +10,9 @@ * -------------------------------------------------------------------------- */ /* - * @file TinyHybrdiExample.h - * @date Mar 11, 2022 - * @author Varun Agrawal - * @author Fan Jiang + * @file TinyHybridExample.h + * @date December, 2022 + * @author Frank Dellaert */ #include @@ -33,10 +32,9 @@ const DiscreteKey mode{M(0), 2}; /** * Create a tiny two variable hybrid model which represents - * the generative probability P(z, x, n) = P(z | x, n)P(x)P(n). + * the generative probability P(z,x,mode) = P(z|x,mode)P(x)P(mode). */ HybridBayesNet createHybridBayesNet(int num_measurements = 1) { - // Create hybrid Bayes net. HybridBayesNet bayesNet; // Create Gaussian mixture z_i = x0 + noise for each measurement. @@ -60,6 +58,9 @@ HybridBayesNet createHybridBayesNet(int num_measurements = 1) { return bayesNet; } +/** + * Convert a hybrid Bayes net to a hybrid Gaussian factor graph. + */ HybridGaussianFactorGraph convertBayesNet(const HybridBayesNet& bayesNet, const VectorValues& measurements) { HybridGaussianFactorGraph fg; @@ -74,18 +75,19 @@ HybridGaussianFactorGraph convertBayesNet(const HybridBayesNet& bayesNet, return fg; } +/** + * Create a tiny two variable hybrid factor graph which represents a discrete + * mode and a continuous variable x0, given a number of measurements of the + * continuous variable x0. If no measurements are given, they are sampled from + * the generative Bayes net model HybridBayesNet::Example(num_measurements) + */ HybridGaussianFactorGraph createHybridGaussianFactorGraph( - int num_measurements = 1, bool deterministic = false) { + int num_measurements = 1, + boost::optional measurements = boost::none) { auto bayesNet = createHybridBayesNet(num_measurements); - if (deterministic) { - // Create a deterministic set of measurements: - VectorValues measurements; - for (int i = 0; i < num_measurements; i++) { - measurements.insert(Z(i), Vector1(5.0 + 0.1 * i)); - } - return convertBayesNet(bayesNet, measurements); + if (measurements) { + return convertBayesNet(bayesNet, *measurements); } else { - // Create a random set of measurements: return convertBayesNet(bayesNet, bayesNet.sample().continuous()); } } diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index d048c0e353..fa371cf163 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -618,10 +618,10 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { // Check that assembleGraphTree assembles Gaussian factor graphs for each // assignment. TEST(HybridGaussianFactorGraph, assembleGraphTree) { + using symbol_shorthand::Z; const int num_measurements = 1; - const bool deterministic = true; - auto fg = - tiny::createHybridGaussianFactorGraph(num_measurements, deterministic); + auto fg = tiny::createHybridGaussianFactorGraph( + num_measurements, VectorValues{{Z(0), Vector1(5.0)}}); EXPECT_LONGS_EQUAL(3, fg.size()); auto sum = fg.assembleGraphTree(); @@ -653,10 +653,10 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) { /* ****************************************************************************/ // Check that eliminating tiny net with 1 measurement yields correct result. TEST(HybridGaussianFactorGraph, EliminateTiny1) { + using symbol_shorthand::Z; const int num_measurements = 1; - const bool deterministic = true; - auto fg = - tiny::createHybridGaussianFactorGraph(num_measurements, deterministic); + auto fg = tiny::createHybridGaussianFactorGraph( + num_measurements, VectorValues{{Z(0), Vector1(5.0)}}); // Create expected Bayes Net: HybridBayesNet expectedBayesNet; @@ -679,39 +679,41 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { ordering.push_back(X(0)); ordering.push_back(M(0)); const auto posterior = fg.eliminateSequential(ordering); - EXPECT(assert_equal(expectedBayesNet, *posterior, 1e-4)); + EXPECT(assert_equal(expectedBayesNet, *posterior, 0.01)); } /* ****************************************************************************/ // Check that eliminating tiny net with 2 measurements yields correct result. TEST(HybridGaussianFactorGraph, EliminateTiny2) { + // Create factor graph with 2 measurements such that posterior mean = 5.0. + using symbol_shorthand::Z; const int num_measurements = 2; - const bool deterministic = true; - auto fg = - tiny::createHybridGaussianFactorGraph(num_measurements, deterministic); + auto fg = tiny::createHybridGaussianFactorGraph( + num_measurements, + VectorValues{{Z(0), Vector1(4.0)}, {Z(1), Vector1(6.0)}}); // Create expected Bayes Net: HybridBayesNet expectedBayesNet; // Create Gaussian mixture on X(0). using tiny::mode; - // regression, but mean checked to be > 5.0 in both cases: + // regression, but mean checked to be 5.0 in both cases: const auto conditional0 = boost::make_shared( - X(0), Vector1(18.4752), I_1x1 * 3.4641), + X(0), Vector1(17.3205), I_1x1 * 3.4641), conditional1 = boost::make_shared( - X(0), Vector1(10.3281), I_1x1 * 2.0548); + X(0), Vector1(10.274), I_1x1 * 2.0548); GaussianMixture gm({X(0)}, {}, {mode}, {conditional0, conditional1}); expectedBayesNet.emplaceMixture(gm); // copy :-( // Add prior on mode. - expectedBayesNet.emplaceDiscrete(mode, "4/6"); + expectedBayesNet.emplaceDiscrete(mode, "23/77"); // Test elimination Ordering ordering; ordering.push_back(X(0)); ordering.push_back(M(0)); const auto posterior = fg.eliminateSequential(ordering); - EXPECT(assert_equal(expectedBayesNet, *posterior, 1e-4)); + EXPECT(assert_equal(expectedBayesNet, *posterior, 0.01)); } /* ************************************************************************* */ diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index a7657e4112..f83b954425 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -18,9 +18,9 @@ import gtsam from gtsam import (DiscreteConditional, DiscreteKeys, GaussianConditional, - GaussianMixture, GaussianMixtureFactor, HybridBayesNet, HybridValues, - HybridGaussianFactorGraph, JacobianFactor, Ordering, - noiseModel) + GaussianMixture, GaussianMixtureFactor, HybridBayesNet, + HybridGaussianFactorGraph, HybridValues, JacobianFactor, + Ordering, noiseModel) class TestHybridGaussianFactorGraph(GtsamTestCase): @@ -96,16 +96,16 @@ def tiny(num_measurements: int = 1, prior_mean: float = 5.0, mode = (M(0), 2) # Create Gaussian mixture Z(0) = X(0) + noise for each measurement. - I = np.eye(1) + I_1x1 = np.eye(1) keys = DiscreteKeys() keys.push_back(mode) for i in range(num_measurements): conditional0 = GaussianConditional.FromMeanAndStddev(Z(i), - I, + I_1x1, X(0), [0], sigma=0.5) conditional1 = GaussianConditional.FromMeanAndStddev(Z(i), - I, + I_1x1, X(0), [0], sigma=3) bayesNet.emplaceMixture([Z(i)], [X(0)], keys, @@ -135,24 +135,27 @@ def test_evaluate(self): mean0.insert(Z(0), [5.0]) mean0.insert(M(0), 0) self.assertAlmostEqual(bayesNet1.evaluate(mean0) / - bayesNet2.evaluate(mean0), expected_ratio, delta=1e-9) + bayesNet2.evaluate(mean0), expected_ratio, + delta=1e-9) mean1 = HybridValues() mean1.insert(X(0), [5.0]) mean1.insert(Z(0), [5.0]) mean1.insert(M(0), 1) self.assertAlmostEqual(bayesNet1.evaluate(mean1) / - bayesNet2.evaluate(mean1), expected_ratio, delta=1e-9) + bayesNet2.evaluate(mean1), expected_ratio, + delta=1e-9) @staticmethod def measurements(sample: HybridValues, indices) -> gtsam.VectorValues: - """Create measurements from a sample, grabbing Z(i) where i in indices.""" + """Create measurements from a sample, grabbing Z(i) for indices.""" measurements = gtsam.VectorValues() for i in indices: measurements.insert(Z(i), sample.at(Z(i))) return measurements @classmethod - def factor_graph_from_bayes_net(cls, bayesNet: HybridBayesNet, sample: HybridValues): + def factor_graph_from_bayes_net(cls, bayesNet: HybridBayesNet, + sample: HybridValues): """Create a factor graph from the Bayes net with sampled measurements. The factor graph is `P(x)P(n) ϕ(x, n; z0) ϕ(x, n; z1) ...` and thus represents the same joint probability as the Bayes net. @@ -170,7 +173,7 @@ def factor_graph_from_bayes_net(cls, bayesNet: HybridBayesNet, sample: HybridVal @classmethod def estimate_marginals(cls, target, proposal_density: HybridBayesNet, N=10000): - """Do importance sampling to get an estimate of the discrete marginal P(mode).""" + """Do importance sampling to estimate discrete marginal P(mode).""" # Allocate space for marginals on mode. marginals = np.zeros((2,)) @@ -190,11 +193,11 @@ def estimate_marginals(cls, target, proposal_density: HybridBayesNet, def test_tiny(self): """Test a tiny two variable hybrid model.""" - prior_sigma = 0.5 # P(x0)P(mode)P(z0|x0,mode) + prior_sigma = 0.5 bayesNet = self.tiny(prior_sigma=prior_sigma) - # Deterministic values exactly at the mean, for both x and z: + # Deterministic values exactly at the mean, for both x and Z: values = HybridValues() values.insert(X(0), [5.0]) values.insert(M(0), 0) # low-noise, standard deviation 0.5 @@ -204,22 +207,20 @@ def test_tiny(self): def unnormalized_posterior(x): """Posterior is proportional to joint, centered at 5.0 as well.""" x.insert(Z(0), [z0]) - # print(x) return bayesNet.evaluate(x) # Create proposal density on (x0, mode), making sure it has same mean: posterior_information = 1/(prior_sigma**2) + 1/(0.5**2) posterior_sigma = posterior_information**(-0.5) - print(f"Posterior sigma: {posterior_sigma}") proposal_density = self.tiny( num_measurements=0, prior_mean=5.0, prior_sigma=posterior_sigma) # Estimate marginals using importance sampling. marginals = self.estimate_marginals( - target=unnormalized_posterior, proposal_density=proposal_density, N=10_000) - print(f"True mode: {values.atDiscrete(M(0))}") - print(f"P(mode=0; z0) = {marginals[0]}") - print(f"P(mode=1; z0) = {marginals[1]}") + target=unnormalized_posterior, proposal_density=proposal_density) + # print(f"True mode: {values.atDiscrete(M(0))}") + # print(f"P(mode=0; Z) = {marginals[0]}") + # print(f"P(mode=1; Z) = {marginals[1]}") # Check that the estimate is close to the true value. self.assertAlmostEqual(marginals[0], 0.74, delta=0.01) @@ -233,20 +234,18 @@ def unnormalized_posterior(x): ordering.push_back(X(0)) ordering.push_back(M(0)) posterior = fg.eliminateSequential(ordering) - print(posterior) def true_posterior(x): """Posterior from elimination.""" x.insert(Z(0), [z0]) - # print(x) return posterior.evaluate(x) # Estimate marginals using importance sampling. marginals = self.estimate_marginals( target=true_posterior, proposal_density=proposal_density) - print(f"True mode: {values.atDiscrete(M(0))}") - print(f"P(mode=0; z0) = {marginals[0]}") - print(f"P(mode=1; z0) = {marginals[1]}") + # print(f"True mode: {values.atDiscrete(M(0))}") + # print(f"P(mode=0; z0) = {marginals[0]}") + # print(f"P(mode=1; z0) = {marginals[1]}") # Check that the estimate is close to the true value. self.assertAlmostEqual(marginals[0], 0.74, delta=0.01) @@ -256,65 +255,65 @@ def true_posterior(x): def calculate_ratio(bayesNet: HybridBayesNet, fg: HybridGaussianFactorGraph, sample: HybridValues): - """Calculate ratio between Bayes net probability and the factor graph.""" - return bayesNet.evaluate(sample) / fg.probPrime(sample) if fg.probPrime(sample) > 0 else 0 + """Calculate ratio between Bayes net and factor graph.""" + return bayesNet.evaluate(sample) / fg.probPrime(sample) if \ + fg.probPrime(sample) > 0 else 0 - @unittest.skip("This test is too slow.") def test_ratio(self): """ - Given a tiny two variable hybrid model, with 2 measurements, - test the ratio of the bayes net model representing P(z, x, n)=P(z|x, n)P(x)P(n) + Given a tiny two variable hybrid model, with 2 measurements, test the + ratio of the bayes net model representing P(z,x,n)=P(z|x, n)P(x)P(n) and the factor graph P(x, n | z)=P(x | n, z)P(n|z), both of which represent the same posterior. """ - # Create the Bayes net representing the generative model P(z, x, n)=P(z|x, n)P(x)P(n) - bayesNet = self.tiny(num_measurements=2) - # Sample from the Bayes net. - sample: HybridValues = bayesNet.sample() - # print(sample) - - # Deterministic measurements: - deterministic = gtsam.VectorValues() - deterministic.insert(Z(0), [5.0]) - deterministic.insert(Z(1), [5.1]) - sample.update(deterministic) + # Create generative model P(z, x, n)=P(z|x, n)P(x)P(n) + prior_sigma = 0.5 + bayesNet = self.tiny(prior_sigma=prior_sigma, num_measurements=2) + + # Deterministic values exactly at the mean, for both x and Z: + values = HybridValues() + values.insert(X(0), [5.0]) + values.insert(M(0), 0) # high-noise, standard deviation 3 + measurements = gtsam.VectorValues() + measurements.insert(Z(0), [4.0]) + measurements.insert(Z(1), [6.0]) + values.insert(measurements) + + def unnormalized_posterior(x): + """Posterior is proportional to joint, centered at 5.0 as well.""" + x.insert(measurements) + return bayesNet.evaluate(x) + + # Create proposal density on (x0, mode), making sure it has same mean: + posterior_information = 1/(prior_sigma**2) + 2.0/(3.0**2) + posterior_sigma = posterior_information**(-0.5) + proposal_density = self.tiny( + num_measurements=0, prior_mean=5.0, prior_sigma=posterior_sigma) # Estimate marginals using importance sampling. - marginals = self.estimate_marginals(bayesNet, sample, N=10000) - print(f"True mode: {sample.atDiscrete(M(0))}") - print(f"P(mode=0; z0, z1) = {marginals[0]}") - print(f"P(mode=1; z0, z1) = {marginals[1]}") - - # Check marginals based on sampled mode. - if sample.atDiscrete(M(0)) == 0: - self.assertGreater(marginals[0], marginals[1]) - else: - self.assertGreater(marginals[1], marginals[0]) - - fg = self.factor_graph_from_bayes_net(bayesNet, sample) - self.assertEqual(fg.size(), 4) + marginals = self.estimate_marginals( + target=unnormalized_posterior, proposal_density=proposal_density) + # print(f"True mode: {values.atDiscrete(M(0))}") + # print(f"P(mode=0; Z) = {marginals[0]}") + # print(f"P(mode=1; Z) = {marginals[1]}") + + # Check that the estimate is close to the true value. + self.assertAlmostEqual(marginals[0], 0.23, delta=0.01) + self.assertAlmostEqual(marginals[1], 0.77, delta=0.01) - # Create measurements from the sample. - measurements = self.measurements(sample, [0, 1]) - print(measurements) + # Convert to factor graph using measurements. + fg = self.factor_graph_from_bayes_net(bayesNet, values) + self.assertEqual(fg.size(), 4) # Calculate ratio between Bayes net probability and the factor graph: - expected_ratio = self.calculate_ratio(bayesNet, fg, sample) + expected_ratio = self.calculate_ratio(bayesNet, fg, values) # print(f"expected_ratio: {expected_ratio}\n") - # Print conditional and factor values side by side: - print("\nConditional and factor values:") - for i in range(4): - print(f"Conditional {i}:") - print(bayesNet.at(i).error(sample)) - print(f"Factor {i}:") - print(fg.at(i).error(sample)) - # Check with a number of other samples. for i in range(10): - other = bayesNet.sample() - other.update(measurements) - ratio = self.calculate_ratio(bayesNet, fg, other) + samples = bayesNet.sample() + samples.update(measurements) + ratio = self.calculate_ratio(bayesNet, fg, samples) # print(f"Ratio: {ratio}\n") if (ratio > 0): self.assertAlmostEqual(ratio, expected_ratio) @@ -324,18 +323,17 @@ def test_ratio(self): ordering.push_back(X(0)) ordering.push_back(M(0)) posterior = fg.eliminateSequential(ordering) - print(posterior) # Calculate ratio between Bayes net probability and the factor graph: - expected_ratio = self.calculate_ratio(posterior, fg, sample) - print(f"expected_ratio: {expected_ratio}\n") + expected_ratio = self.calculate_ratio(posterior, fg, values) + # print(f"expected_ratio: {expected_ratio}\n") # Check with a number of other samples. for i in range(10): - other = posterior.sample() - other.insert(measurements) - ratio = self.calculate_ratio(posterior, fg, other) - print(f"Ratio: {ratio}\n") + samples = posterior.sample() + samples.insert(measurements) + ratio = self.calculate_ratio(posterior, fg, samples) + # print(f"Ratio: {ratio}\n") if (ratio > 0): self.assertAlmostEqual(ratio, expected_ratio) From c3f04695b0fc82f3052b9cc6c6abba917dc7ad4e Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 2 Jan 2023 18:36:12 -0500 Subject: [PATCH 48/77] Add mean to test --- gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index a4de4a1ae9..534119dc62 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -638,22 +638,30 @@ conditional 2: Hybrid P( x2 | m0 m1) 0 0 Leaf p(x2) R = [ 10.0494 ] d = [ -10.1489 ] + mean: 1 elements + x2: -1.0099 No noise model 0 1 Leaf p(x2) R = [ 10.0494 ] d = [ -10.1479 ] + mean: 1 elements + x2: -1.0098 No noise model 1 Choice(m0) 1 0 Leaf p(x2) R = [ 10.0494 ] d = [ -10.0504 ] + mean: 1 elements + x2: -1.0001 No noise model 1 1 Leaf p(x2) R = [ 10.0494 ] d = [ -10.0494 ] + mean: 1 elements + x2: -1 No noise model )"; From f726cf657178b6c251b20950f398af726d514ba5 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 2 Jan 2023 18:36:50 -0500 Subject: [PATCH 49/77] f(x0, x1, m0; z0, z1) now has constant ratios ! --- gtsam/hybrid/tests/testHybridEstimation.cpp | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index d457d039f2..3144bd4998 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -485,14 +485,14 @@ TEST(HybridEstimation, CorrectnessViaSampling) { // 2. Eliminate into BN const Ordering ordering = fg->getHybridOrdering(); - HybridBayesNet::shared_ptr bn = fg->eliminateSequential(ordering); + const HybridBayesNet::shared_ptr bn = fg->eliminateSequential(ordering); // Set up sampling std::mt19937_64 rng(11); // Compute the log-ratio between the Bayes net and the factor graph. auto compute_ratio = [&](const HybridValues& sample) -> double { - return bn->error(sample) - fg->error(sample); + return bn->evaluate(sample) / fg->probPrime(sample); }; // The error evaluated by the factor graph and the Bayes net should differ by @@ -500,7 +500,7 @@ TEST(HybridEstimation, CorrectnessViaSampling) { const HybridValues sample = bn->sample(&rng); double expected_ratio = compute_ratio(sample); // regression - EXPECT_DOUBLES_EQUAL(1.9477340410546764, expected_ratio, 1e-9); + EXPECT_DOUBLES_EQUAL(0.728588, expected_ratio, 1e-6); // 3. Do sampling constexpr int num_samples = 10; @@ -509,9 +509,7 @@ TEST(HybridEstimation, CorrectnessViaSampling) { const HybridValues sample = bn->sample(&rng); // 4. Check that the ratio is constant. - // TODO(Varun) The ratio changes based on the mode - // std::cout << compute_ratio(sample) << std::endl; - // EXPECT_DOUBLES_EQUAL(expected_ratio, compute_ratio(sample), 1e-9); + EXPECT_DOUBLES_EQUAL(expected_ratio, compute_ratio(sample), 1e-6); } } From 5c87d7d51beaa0f163eedc15edfab13274ab4b3c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 00:50:16 -0500 Subject: [PATCH 50/77] disable test instead of commenting out --- gtsam/hybrid/tests/testHybridEstimation.cpp | 26 ++++++++++----------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 6d222e322e..ad51ccfe10 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -114,7 +114,7 @@ TEST(HybridEstimation, Full) { /****************************************************************************/ // Test approximate inference with an additional pruning step. -TEST(HybridEstimation, Incremental) { +TEST_DISABLED(HybridEstimation, Incremental) { size_t K = 15; std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; @@ -154,21 +154,21 @@ TEST(HybridEstimation, Incremental) { /*TODO(Varun) Gives degenerate result due to probability underflow. Need to normalize probabilities. */ - // HybridValues delta = smoother.hybridBayesNet().optimize(); + HybridValues delta = smoother.hybridBayesNet().optimize(); - // Values result = initial.retract(delta.continuous()); + Values result = initial.retract(delta.continuous()); - // DiscreteValues expected_discrete; - // for (size_t k = 0; k < K - 1; k++) { - // expected_discrete[M(k)] = discrete_seq[k]; - // } - // EXPECT(assert_equal(expected_discrete, delta.discrete())); + DiscreteValues expected_discrete; + for (size_t k = 0; k < K - 1; k++) { + expected_discrete[M(k)] = discrete_seq[k]; + } + EXPECT(assert_equal(expected_discrete, delta.discrete())); - // Values expected_continuous; - // for (size_t k = 0; k < K; k++) { - // expected_continuous.insert(X(k), measurements[k]); - // } - // EXPECT(assert_equal(expected_continuous, result)); + Values expected_continuous; + for (size_t k = 0; k < K; k++) { + expected_continuous.insert(X(k), measurements[k]); + } + EXPECT(assert_equal(expected_continuous, result)); } /** From 38f3209d975fcf1e3c44d37c32aa1d65734ffe78 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 01:49:57 -0500 Subject: [PATCH 51/77] fix GaussianConditional print test --- gtsam/linear/tests/testGaussianConditional.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gtsam/linear/tests/testGaussianConditional.cpp b/gtsam/linear/tests/testGaussianConditional.cpp index 144269b449..d8c3f94552 100644 --- a/gtsam/linear/tests/testGaussianConditional.cpp +++ b/gtsam/linear/tests/testGaussianConditional.cpp @@ -507,6 +507,8 @@ TEST(GaussianConditional, Print) { " R = [ 1 0 ]\n" " [ 0 1 ]\n" " d = [ 20 40 ]\n" + " mean: 1 elements\n" + " x0: 20 40\n" "isotropic dim=2 sigma=3\n"; EXPECT(assert_print_equal(expected, conditional, "GaussianConditional")); From 195dddfdebd56e94bdc8189474ca06f926efb3bd Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 01:50:16 -0500 Subject: [PATCH 52/77] clean up HybridGaussianFactorGraph --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 6f7ca1e1c4..7b32b90b7e 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -174,7 +174,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors, } } - // TODO(dellaert): This does sum-product. For max-product, use EliminateForMPE + // NOTE: This does sum-product. For max-product, use EliminateForMPE. auto result = EliminateDiscrete(dfg, frontalKeys); return {boost::make_shared(result.first), @@ -194,6 +194,7 @@ GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) { }; return GaussianFactorGraphTree(sum, emptyGaussian); } + /* ************************************************************************ */ static std::pair hybridElimination(const HybridGaussianFactorGraph &factors, @@ -209,7 +210,9 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // decision tree indexed by all discrete keys involved. GaussianFactorGraphTree sum = factors.assembleGraphTree(); - // TODO(dellaert): does assembleGraphTree not guarantee this? + // Convert factor graphs with a nullptr to an empty factor graph. + // This is done after assembly since it is non-trivial to keep track of which + // FG has a nullptr as we're looping over the factors. sum = removeEmpty(sum); using EliminationPair = std::pair, @@ -230,7 +233,8 @@ hybridElimination(const HybridGaussianFactorGraph &factors, boost::tie(conditional, newFactor) = EliminatePreferCholesky(graph_z.graph, frontalKeys); - // Get the log of the log normalization constant inverse. + // Get the log of the log normalization constant inverse and + // add it to the previous constant. const double logZ = graph_z.constant - conditional->logNormalizationConstant(); // Get the log of the log normalization constant inverse. @@ -273,13 +277,9 @@ hybridElimination(const HybridGaussianFactorGraph &factors, auto factorProb = [&](const GaussianMixtureFactor::FactorAndConstant &factor_z) { // This is the probability q(μ) at the MLE point. - // factor_z.factor is a factor without keys, just containing the - // residual. + // factor_z.factor is a factor without keys, + // just containing the residual. return exp(-factor_z.error(VectorValues())); - // TODO(dellaert): this is not correct, since VectorValues() is not - // the MLE point. But it does not matter, as at the MLE point the - // error will be zero, hence: - // return exp(factor_z.constant); }; const DecisionTree fdt(newFactors, factorProb); @@ -301,8 +301,6 @@ hybridElimination(const HybridGaussianFactorGraph &factors, boost::make_shared(discreteFactor)}; } else { // Create a resulting GaussianMixtureFactor on the separator. - // TODO(dellaert): Keys can be computed from the factors, so we should not - // need to pass them in. return {boost::make_shared(gaussianMixture), boost::make_shared( continuousSeparator, discreteSeparator, newFactors)}; From 47346c5024da195dfcbe9ecbec45c27914f94c40 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 02:07:51 -0500 Subject: [PATCH 53/77] move GraphAndConstant traits definition to HybridFactor --- gtsam/hybrid/GaussianMixtureFactor.h | 3 --- gtsam/hybrid/HybridFactor.h | 3 +++ 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 04b2414a70..9138d6b30b 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -186,7 +186,4 @@ template <> struct traits : public Testable { }; -template <> -struct traits : public Testable {}; - } // namespace gtsam diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index 326570d6e8..8c1b0dad31 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -192,4 +192,7 @@ class GTSAM_EXPORT HybridFactor : public Factor { template <> struct traits : public Testable {}; +template <> +struct traits : public Testable {}; + } // namespace gtsam From ca1c517f8af481d7a2ee3575281bbabe8b6503af Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 02:10:29 -0500 Subject: [PATCH 54/77] remove extra print statements --- gtsam/hybrid/HybridBayesNet.cpp | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index fa144ecb3a..4404ccfdc8 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -279,7 +279,6 @@ double HybridBayesNet::evaluate(const HybridValues &values) const { const VectorValues &continuousValues = values.continuous(); double logDensity = 0.0, probability = 1.0; - bool debug = false; // Iterate over each conditional. for (auto &&conditional : *this) { @@ -287,23 +286,14 @@ double HybridBayesNet::evaluate(const HybridValues &values) const { if (auto gm = conditional->asMixture()) { const auto component = (*gm)(discreteValues); logDensity += component->logDensity(continuousValues); - if (debug) { - GTSAM_PRINT(continuousValues); - std::cout << "component->logDensity(continuousValues) = " - << component->logDensity(continuousValues) << std::endl; - } + } else if (auto gc = conditional->asGaussian()) { // If continuous only, evaluate the probability and multiply. logDensity += gc->logDensity(continuousValues); - if (debug) - std::cout << "gc->logDensity(continuousValues) = " - << gc->logDensity(continuousValues) << std::endl; + } else if (auto dc = conditional->asDiscrete()) { // Conditional is discrete-only, so return its probability. probability *= dc->operator()(discreteValues); - if (debug) - std::cout << "dc->operator()(discreteValues) = " - << dc->operator()(discreteValues) << std::endl; } } From 7825ffd6d2d1c0b95c347a2521fcf44125cbe3c6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 03:47:27 -0500 Subject: [PATCH 55/77] fix tests due to change to EliminateDiscrete --- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 12 ++++++------ .../hybrid/tests/testHybridNonlinearFactorGraph.cpp | 2 +- gtsam/hybrid/tests/testHybridNonlinearISAM.cpp | 12 ++++++------ 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index a9649f83ff..1ce10b452f 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -177,19 +177,19 @@ TEST(HybridGaussianElimination, IncrementalInference) { // Test the probability values with regression tests. DiscreteValues assignment; - EXPECT(assert_equal(0.000956191, m00_prob, 1e-5)); + EXPECT(assert_equal(0.0952922, m00_prob, 1e-5)); assignment[M(0)] = 0; assignment[M(1)] = 0; - EXPECT(assert_equal(0.000956191, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.0952922, (*discreteConditional)(assignment), 1e-5)); assignment[M(0)] = 1; assignment[M(1)] = 0; - EXPECT(assert_equal(0.00283728, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.282758, (*discreteConditional)(assignment), 1e-5)); assignment[M(0)] = 0; assignment[M(1)] = 1; - EXPECT(assert_equal(0.00315253, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.314175, (*discreteConditional)(assignment), 1e-5)); assignment[M(0)] = 1; assignment[M(1)] = 1; - EXPECT(assert_equal(0.00308831, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.307775, (*discreteConditional)(assignment), 1e-5)); // Check if the clique conditional generated from incremental elimination // matches that of batch elimination. @@ -199,7 +199,7 @@ TEST(HybridGaussianElimination, IncrementalInference) { isam[M(1)]->conditional()->inner()); // Account for the probability terms from evaluating continuous FGs DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}}; - vector probs = {0.00095619114, 0.0031525308, 0.0028372777, 0.0030883072}; + vector probs = {0.095292197, 0.31417524, 0.28275772, 0.30777485}; auto expectedConditional = boost::make_shared(discrete_keys, probs); EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6)); diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 534119dc62..d84f4b352e 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -443,7 +443,7 @@ TEST(HybridFactorGraph, Full_Elimination) { ordering.clear(); for (size_t k = 0; k < self.K - 1; k++) ordering += M(k); discreteBayesNet = - *discrete_fg.eliminateSequential(ordering, EliminateForMPE); + *discrete_fg.eliminateSequential(ordering, EliminateDiscrete); } // Create ordering. diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index 8b5bb41acb..68a55abdd1 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -195,19 +195,19 @@ TEST(HybridNonlinearISAM, IncrementalInference) { // Test the probability values with regression tests. DiscreteValues assignment; - EXPECT(assert_equal(0.000956191, m00_prob, 1e-5)); + EXPECT(assert_equal(0.0952922, m00_prob, 1e-5)); assignment[M(0)] = 0; assignment[M(1)] = 0; - EXPECT(assert_equal(0.000956191, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.0952922, (*discreteConditional)(assignment), 1e-5)); assignment[M(0)] = 1; assignment[M(1)] = 0; - EXPECT(assert_equal(0.00283728, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.282758, (*discreteConditional)(assignment), 1e-5)); assignment[M(0)] = 0; assignment[M(1)] = 1; - EXPECT(assert_equal(0.00315253, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.314175, (*discreteConditional)(assignment), 1e-5)); assignment[M(0)] = 1; assignment[M(1)] = 1; - EXPECT(assert_equal(0.00308831, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.307775, (*discreteConditional)(assignment), 1e-5)); // Check if the clique conditional generated from incremental elimination // matches that of batch elimination. @@ -216,7 +216,7 @@ TEST(HybridNonlinearISAM, IncrementalInference) { bayesTree[M(1)]->conditional()->inner()); // Account for the probability terms from evaluating continuous FGs DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}}; - vector probs = {0.00095619114, 0.0031525308, 0.0028372777, 0.0030883072}; + vector probs = {0.095292197, 0.31417524, 0.28275772, 0.30777485}; auto expectedConditional = boost::make_shared(discrete_keys, probs); EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6)); From f117da2367b522194f3c4970031cb0ccfc5fdebe Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 03:47:34 -0500 Subject: [PATCH 56/77] remove extra print --- gtsam/hybrid/GaussianMixture.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index ad636a5d40..fdeea8ccdb 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -108,7 +108,7 @@ bool GaussianMixture::equals(const HybridFactor &lf, double tol) const { // This will return false if either conditionals_ is empty or e->conditionals_ // is empty, but not if both are empty or both are not empty: if (conditionals_.empty() ^ e->conditionals_.empty()) return false; - std::cout << "checking" << std::endl; + // Check the base and the factors: return BaseFactor::equals(*e, tol) && conditionals_.equals(e->conditionals_, From cb885fb980b11eded918f41d3c04fa414f2bbdd2 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 03:47:55 -0500 Subject: [PATCH 57/77] check for nullptr in HybridConditional::equals --- gtsam/hybrid/HybridConditional.cpp | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/gtsam/hybrid/HybridConditional.cpp b/gtsam/hybrid/HybridConditional.cpp index 6657aec8c0..611f9b0e75 100644 --- a/gtsam/hybrid/HybridConditional.cpp +++ b/gtsam/hybrid/HybridConditional.cpp @@ -117,6 +117,22 @@ bool HybridConditional::equals(const HybridFactor &other, double tol) const { return other != nullptr && dc->equals(*other, tol); } return inner_->equals(*(e->inner_), tol); + + if (inner_) { + if (e->inner_) { + // Both the inner_ factors are not null + return inner_->equals(*(e->inner_), tol); + } else { + return false; + } + } else { + if (e->inner_) { + return false; + } else { + // Both inner_ are null + return true; + } + } } /* ************************************************************************ */ From 46acba591c22ac9a4f12eff7f07c4a6cb7cf3f19 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 03:48:16 -0500 Subject: [PATCH 58/77] serialize inner_, need to test --- gtsam/hybrid/HybridConditional.h | 1 + 1 file changed, 1 insertion(+) diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index a34d01388e..da7c1421e3 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -187,6 +187,7 @@ class GTSAM_EXPORT HybridConditional void serialize(Archive& ar, const unsigned int /*version*/) { ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(BaseFactor); ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(BaseConditional); + ar& BOOST_SERIALIZATION_NVP(inner_); } }; // HybridConditional From 41c73fd63ec3da0d3b4943879ed6d377c66cae6c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 03:48:45 -0500 Subject: [PATCH 59/77] comment out failing tests, need to serialize DecisionTree --- gtsam/hybrid/tests/testHybridBayesNet.cpp | 12 +++++++----- gtsam/hybrid/tests/testHybridBayesTree.cpp | 16 +++++++++------- 2 files changed, 16 insertions(+), 12 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 5cc6566fbe..43eebcf889 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -23,8 +23,8 @@ #include #include -#include "TinyHybridExample.h" #include "Switching.h" +#include "TinyHybridExample.h" // Include for test suite #include @@ -244,7 +244,7 @@ TEST(HybridBayesNet, Error) { EXPECT(assert_equal(expected_pruned_error, pruned_error_tree, 1e-9)); // Verify error computation and check for specific error value - DiscreteValues discrete_values {{M(0), 1}, {M(1), 1}}; + DiscreteValues discrete_values{{M(0), 1}, {M(1), 1}}; double total_error = 0; for (size_t idx = 0; idx < hybridBayesNet->size(); idx++) { @@ -337,9 +337,11 @@ TEST(HybridBayesNet, Serialization) { Ordering ordering = s.linearizedFactorGraph.getHybridOrdering(); HybridBayesNet hbn = *(s.linearizedFactorGraph.eliminateSequential(ordering)); - EXPECT(equalsObj(hbn)); - EXPECT(equalsXML(hbn)); - EXPECT(equalsBinary(hbn)); + // TODO(Varun) Serialization of inner factor doesn't work. Requires + // serialization support for all hybrid factors. + // EXPECT(equalsObj(hbn)); + // EXPECT(equalsXML(hbn)); + // EXPECT(equalsBinary(hbn)); } /* ****************************************************************************/ diff --git a/gtsam/hybrid/tests/testHybridBayesTree.cpp b/gtsam/hybrid/tests/testHybridBayesTree.cpp index b4d049210a..1e65103830 100644 --- a/gtsam/hybrid/tests/testHybridBayesTree.cpp +++ b/gtsam/hybrid/tests/testHybridBayesTree.cpp @@ -155,7 +155,7 @@ TEST(HybridBayesTree, Optimize) { dfg.push_back( boost::dynamic_pointer_cast(factor->inner())); } - + // Add the probabilities for each branch DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}, {M(2), 2}}; vector probs = {0.012519475, 0.041280228, 0.075018647, 0.081663656, @@ -211,10 +211,10 @@ TEST(HybridBayesTree, Choose) { ordering += M(0); ordering += M(1); ordering += M(2); - - //TODO(Varun) get segfault if ordering not provided + + // TODO(Varun) get segfault if ordering not provided auto bayesTree = s.linearizedFactorGraph.eliminateMultifrontal(ordering); - + auto expected_gbt = bayesTree->choose(assignment); EXPECT(assert_equal(expected_gbt, gbt)); @@ -229,9 +229,11 @@ TEST(HybridBayesTree, Serialization) { *(s.linearizedFactorGraph.eliminateMultifrontal(ordering)); using namespace gtsam::serializationTestHelpers; - EXPECT(equalsObj(hbt)); - EXPECT(equalsXML(hbt)); - EXPECT(equalsBinary(hbt)); + // TODO(Varun) Serialization of inner factor doesn't work. Requires + // serialization support for all hybrid factors. + // EXPECT(equalsObj(hbt)); + // EXPECT(equalsXML(hbt)); + // EXPECT(equalsBinary(hbt)); } /* ************************************************************************* */ From e01f7e74568dbe9f4c0f288d52d49d1f67e05834 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 03:51:34 -0500 Subject: [PATCH 60/77] kill unnecessary method --- gtsam/hybrid/GaussianMixtureFactor.cpp | 8 -------- 1 file changed, 8 deletions(-) diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 657ef334b0..57f42e6f1d 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -91,14 +91,6 @@ double GaussianMixtureFactor::constant(const DiscreteValues &assignment) const { return factors_(assignment).constant; } -/* *******************************************************************************/ -// NOTE(dellaert): this was not used and is expensive. -// const GaussianMixtureFactor::Mixture GaussianMixtureFactor::factors() const { -// return Mixture(factors_, [](const FactorAndConstant &factor_z) { -// return factor_z.factor; -// }); -// } - /* *******************************************************************************/ GaussianFactorGraphTree GaussianMixtureFactor::add( const GaussianFactorGraphTree &sum) const { From 9e7fcc81cda1ad5e129fe0c6ff59ef375dd74a9a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 05:18:13 -0500 Subject: [PATCH 61/77] make header functions as inline --- gtsam/hybrid/tests/TinyHybridExample.h | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index 1df7e3a318..ba04263f87 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -34,7 +34,7 @@ const DiscreteKey mode{M(0), 2}; * Create a tiny two variable hybrid model which represents * the generative probability P(z,x,mode) = P(z|x,mode)P(x)P(mode). */ -HybridBayesNet createHybridBayesNet(int num_measurements = 1) { +inline HybridBayesNet createHybridBayesNet(int num_measurements = 1) { HybridBayesNet bayesNet; // Create Gaussian mixture z_i = x0 + noise for each measurement. @@ -61,8 +61,8 @@ HybridBayesNet createHybridBayesNet(int num_measurements = 1) { /** * Convert a hybrid Bayes net to a hybrid Gaussian factor graph. */ -HybridGaussianFactorGraph convertBayesNet(const HybridBayesNet& bayesNet, - const VectorValues& measurements) { +inline HybridGaussianFactorGraph convertBayesNet( + const HybridBayesNet& bayesNet, const VectorValues& measurements) { HybridGaussianFactorGraph fg; int num_measurements = bayesNet.size() - 2; for (int i = 0; i < num_measurements; i++) { @@ -81,7 +81,7 @@ HybridGaussianFactorGraph convertBayesNet(const HybridBayesNet& bayesNet, * continuous variable x0. If no measurements are given, they are sampled from * the generative Bayes net model HybridBayesNet::Example(num_measurements) */ -HybridGaussianFactorGraph createHybridGaussianFactorGraph( +inline HybridGaussianFactorGraph createHybridGaussianFactorGraph( int num_measurements = 1, boost::optional measurements = boost::none) { auto bayesNet = createHybridBayesNet(num_measurements); From 3771d638353d559f3b73cc5deb8cbabe36eac695 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 13:37:32 -0500 Subject: [PATCH 62/77] simplify HybridConditional equality check --- gtsam/hybrid/HybridConditional.cpp | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/gtsam/hybrid/HybridConditional.cpp b/gtsam/hybrid/HybridConditional.cpp index 611f9b0e75..8371a33650 100644 --- a/gtsam/hybrid/HybridConditional.cpp +++ b/gtsam/hybrid/HybridConditional.cpp @@ -118,21 +118,8 @@ bool HybridConditional::equals(const HybridFactor &other, double tol) const { } return inner_->equals(*(e->inner_), tol); - if (inner_) { - if (e->inner_) { - // Both the inner_ factors are not null - return inner_->equals(*(e->inner_), tol); - } else { - return false; - } - } else { - if (e->inner_) { - return false; - } else { - // Both inner_ are null - return true; - } - } + return inner_ ? (e->inner_ ? inner_->equals(*(e->inner_), tol) : false) + : !(e->inner_); } /* ************************************************************************ */ From 99a3fbac2cd870e8736e9a76668d57fed0332971 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 16:17:40 -0500 Subject: [PATCH 63/77] DecisionTree serialization --- gtsam/discrete/DecisionTree-inl.h | 31 +++++++++++++++++++ gtsam/discrete/DecisionTree.h | 19 ++++++++++++ gtsam/discrete/tests/testDecisionTree.cpp | 36 ++++++++++++++++++++--- 3 files changed, 82 insertions(+), 4 deletions(-) diff --git a/gtsam/discrete/DecisionTree-inl.h b/gtsam/discrete/DecisionTree-inl.h index f727432065..d01c918401 100644 --- a/gtsam/discrete/DecisionTree-inl.h +++ b/gtsam/discrete/DecisionTree-inl.h @@ -64,6 +64,9 @@ namespace gtsam { */ size_t nrAssignments_; + /// Default constructor for serialization. + Leaf() {} + /// Constructor from constant Leaf(const Y& constant, size_t nrAssignments = 1) : constant_(constant), nrAssignments_(nrAssignments) {} @@ -154,6 +157,18 @@ namespace gtsam { } bool isLeaf() const override { return true; } + + private: + using Base = DecisionTree::Node; + + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE& ar, const unsigned int /*version*/) { + ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + ar& BOOST_SERIALIZATION_NVP(constant_); + ar& BOOST_SERIALIZATION_NVP(nrAssignments_); + } }; // Leaf /****************************************************************************/ @@ -177,6 +192,9 @@ namespace gtsam { using ChoicePtr = boost::shared_ptr; public: + /// Default constructor for serialization. + Choice() {} + ~Choice() override { #ifdef DT_DEBUG_MEMORY std::std::cout << Node::nrNodes << " destructing (Choice) " << this->id() @@ -428,6 +446,19 @@ namespace gtsam { r->push_back(branch->choose(label, index)); return Unique(r); } + + private: + using Base = DecisionTree::Node; + + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE& ar, const unsigned int /*version*/) { + ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + ar& BOOST_SERIALIZATION_NVP(label_); + ar& BOOST_SERIALIZATION_NVP(branches_); + ar& BOOST_SERIALIZATION_NVP(allSame_); + } }; // Choice /****************************************************************************/ diff --git a/gtsam/discrete/DecisionTree.h b/gtsam/discrete/DecisionTree.h index 01a57637f6..a8764a98f7 100644 --- a/gtsam/discrete/DecisionTree.h +++ b/gtsam/discrete/DecisionTree.h @@ -19,9 +19,11 @@ #pragma once +#include #include #include +#include #include #include #include @@ -113,6 +115,12 @@ namespace gtsam { virtual Ptr apply_g_op_fC(const Choice&, const Binary&) const = 0; virtual Ptr choose(const L& label, size_t index) const = 0; virtual bool isLeaf() const = 0; + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE& ar, const unsigned int /*version*/) {} }; /** ------------------------ Node base class --------------------------- */ @@ -364,8 +372,19 @@ namespace gtsam { compose(Iterator begin, Iterator end, const L& label) const; /// @} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE& ar, const unsigned int /*version*/) { + ar& BOOST_SERIALIZATION_NVP(root_); + } }; // DecisionTree + template + struct traits> : public Testable> {}; + /** free versions of apply */ /// Apply unary operator `op` to DecisionTree `f`. diff --git a/gtsam/discrete/tests/testDecisionTree.cpp b/gtsam/discrete/tests/testDecisionTree.cpp index 63b14b05e6..02ef52ab8f 100644 --- a/gtsam/discrete/tests/testDecisionTree.cpp +++ b/gtsam/discrete/tests/testDecisionTree.cpp @@ -20,13 +20,12 @@ // #define DT_DEBUG_MEMORY // #define GTSAM_DT_NO_PRUNING #define DISABLE_DOT -#include - +#include #include +#include +#include #include -#include - using namespace std; using namespace gtsam; @@ -529,6 +528,35 @@ TEST(DecisionTree, ApplyWithAssignment) { EXPECT_LONGS_EQUAL(5, count); } +/* ****************************************************************************/ +using Tree = gtsam::DecisionTree; + +BOOST_CLASS_EXPORT_GUID(Tree, "gtsam_DecisionTreeStringInt") +BOOST_CLASS_EXPORT_GUID(Tree::Leaf, "gtsam_DecisionTree_Leaf") +BOOST_CLASS_EXPORT_GUID(Tree::Choice, "gtsam_DecisionTree_Choice") + +// Test HybridBayesNet serialization. +TEST(DecisionTree, Serialization) { + Tree tree({{"A", 2}}, std::vector{1, 2}); + + using namespace serializationTestHelpers; + + // Object roundtrip + Tree outputObj = create(); + roundtrip(tree, outputObj); + EXPECT(tree.equals(outputObj)); + + // XML roundtrip + Tree outputXml = create(); + roundtripXML(tree, outputXml); + EXPECT(tree.equals(outputXml)); + + // Binary roundtrip + Tree outputBinary = create(); + roundtripBinary(tree, outputBinary); + EXPECT(tree.equals(outputBinary)); +} + /* ************************************************************************* */ int main() { TestResult tr; From 2653c2f8fb075aa0e727bb6eac347a77eed4e096 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 16:34:04 -0500 Subject: [PATCH 64/77] serialization support for DecisionTreeFactor --- gtsam/discrete/DecisionTreeFactor.h | 10 ++++++++ .../discrete/tests/testDecisionTreeFactor.cpp | 25 +++++++++++++++++++ 2 files changed, 35 insertions(+) diff --git a/gtsam/discrete/DecisionTreeFactor.h b/gtsam/discrete/DecisionTreeFactor.h index bb9ddbd961..f1df7ae038 100644 --- a/gtsam/discrete/DecisionTreeFactor.h +++ b/gtsam/discrete/DecisionTreeFactor.h @@ -231,6 +231,16 @@ namespace gtsam { const Names& names = {}) const override; /// @} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE& ar, const unsigned int /*version*/) { + ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(ADT); + ar& BOOST_SERIALIZATION_NVP(cardinalities_); + } }; // traits diff --git a/gtsam/discrete/tests/testDecisionTreeFactor.cpp b/gtsam/discrete/tests/testDecisionTreeFactor.cpp index 1829db0340..d4ca9faa60 100644 --- a/gtsam/discrete/tests/testDecisionTreeFactor.cpp +++ b/gtsam/discrete/tests/testDecisionTreeFactor.cpp @@ -19,6 +19,7 @@ #include #include +#include #include #include #include @@ -220,6 +221,30 @@ TEST(DecisionTreeFactor, htmlWithValueFormatter) { EXPECT(actual == expected); } +/* ************************************************************************* */ +BOOST_CLASS_EXPORT_GUID(DecisionTreeFactor, "gtsam_DecisionTreeFactor"); +using ADT = AlgebraicDecisionTree; +BOOST_CLASS_EXPORT_GUID(ADT, "gtsam_AlgebraicDecisionTree"); +BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_DecisionTree_Leaf") +BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_DecisionTree_Choice") + +// Check serialization for AlgebraicDecisionTree and the DecisionTreeFactor +TEST(DecisionTreeFactor, Serialization) { + using namespace serializationTestHelpers; + + DiscreteKey A(1, 2), B(2, 2), C(3, 2); + + DecisionTreeFactor::ADT tree(A & B & C, "1 5 3 7 2 6 4 8"); + EXPECT(equalsObj(tree)); + EXPECT(equalsXML(tree)); + EXPECT(equalsBinary(tree)); + + DecisionTreeFactor f(A & B & C, "1 5 3 7 2 6 4 8"); + EXPECT(equalsObj(f)); + EXPECT(equalsXML(f)); + EXPECT(equalsBinary(f)); +} + /* ************************************************************************* */ int main() { TestResult tr; From 0ab15cc456edbc72390c634d83942e73577c0fe3 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 16:48:33 -0500 Subject: [PATCH 65/77] fix equality of HybridDiscreteFactor and HybridGaussianFactor --- gtsam/hybrid/HybridDiscreteFactor.cpp | 6 ++++-- gtsam/hybrid/HybridDiscreteFactor.h | 12 ++++++++++++ gtsam/hybrid/HybridGaussianFactor.cpp | 12 +++++++++--- gtsam/hybrid/HybridGaussianFactor.h | 15 ++++++++++++++- 4 files changed, 39 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/HybridDiscreteFactor.cpp b/gtsam/hybrid/HybridDiscreteFactor.cpp index 605ea5738b..dd8b867be7 100644 --- a/gtsam/hybrid/HybridDiscreteFactor.cpp +++ b/gtsam/hybrid/HybridDiscreteFactor.cpp @@ -40,8 +40,10 @@ HybridDiscreteFactor::HybridDiscreteFactor(DecisionTreeFactor &&dtf) /* ************************************************************************ */ bool HybridDiscreteFactor::equals(const HybridFactor &lf, double tol) const { const This *e = dynamic_cast(&lf); - // TODO(Varun) How to compare inner_ when they are abstract types? - return e != nullptr && Base::equals(*e, tol); + if (e == nullptr) return false; + if (!Base::equals(*e, tol)) return false; + return inner_ ? (e->inner_ ? inner_->equals(*(e->inner_), tol) : false) + : !(e->inner_); } /* ************************************************************************ */ diff --git a/gtsam/hybrid/HybridDiscreteFactor.h b/gtsam/hybrid/HybridDiscreteFactor.h index 7ac97443a0..7a43ab3a50 100644 --- a/gtsam/hybrid/HybridDiscreteFactor.h +++ b/gtsam/hybrid/HybridDiscreteFactor.h @@ -45,6 +45,9 @@ class GTSAM_EXPORT HybridDiscreteFactor : public HybridFactor { /// @name Constructors /// @{ + /// Default constructor - for serialization. + HybridDiscreteFactor() = default; + // Implicit conversion from a shared ptr of DF HybridDiscreteFactor(DiscreteFactor::shared_ptr other); @@ -70,6 +73,15 @@ class GTSAM_EXPORT HybridDiscreteFactor : public HybridFactor { /// Return the error of the underlying Discrete Factor. double error(const HybridValues &values) const override; /// @} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE &ar, const unsigned int /*version*/) { + ar &BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + ar &BOOST_SERIALIZATION_NVP(inner_); + } }; // traits diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 5a89a04a8d..4fe18bea77 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -44,15 +44,21 @@ HybridGaussianFactor::HybridGaussianFactor(HessianFactor &&hf) /* ************************************************************************* */ bool HybridGaussianFactor::equals(const HybridFactor &other, double tol) const { const This *e = dynamic_cast(&other); - // TODO(Varun) How to compare inner_ when they are abstract types? - return e != nullptr && Base::equals(*e, tol); + if (e == nullptr) return false; + if (!Base::equals(*e, tol)) return false; + return inner_ ? (e->inner_ ? inner_->equals(*(e->inner_), tol) : false) + : !(e->inner_); } /* ************************************************************************* */ void HybridGaussianFactor::print(const std::string &s, const KeyFormatter &formatter) const { HybridFactor::print(s, formatter); - inner_->print("\n", formatter); + if (inner_) { + inner_->print("\n", formatter); + } else { + std::cout << "\nGaussian: nullptr" << std::endl; + } }; /* ************************************************************************ */ diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 6d179d1c18..6bb022396c 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -43,6 +43,10 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { using This = HybridGaussianFactor; using shared_ptr = boost::shared_ptr; + /// @name Constructors + /// @{ + + /// Default constructor - for serialization. HybridGaussianFactor() = default; /** @@ -79,7 +83,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { */ explicit HybridGaussianFactor(HessianFactor &&hf); - public: + /// @} /// @name Testable /// @{ @@ -101,6 +105,15 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// Return the error of the underlying Gaussian factor. double error(const HybridValues &values) const override; /// @} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE &ar, const unsigned int /*version*/) { + ar &BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + ar &BOOST_SERIALIZATION_NVP(inner_); + } }; // traits From 3f2bff8e1de751bdc02a19c53b91d0c20f445576 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 16:49:37 -0500 Subject: [PATCH 66/77] hybrid serialization tests --- .../hybrid/tests/testSerializationHybrid.cpp | 100 ++++++++++++++++++ 1 file changed, 100 insertions(+) create mode 100644 gtsam/hybrid/tests/testSerializationHybrid.cpp diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp new file mode 100644 index 0000000000..64f0ce5795 --- /dev/null +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -0,0 +1,100 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testSerializationHybrid.cpp + * @brief Unit tests for hybrid serialization + * @author Varun Agrawal + * @date January 2023 + */ + +#include +#include +#include +#include +#include + +// Include for test suite +#include + +using namespace std; +using namespace gtsam; +using symbol_shorthand::M; +using symbol_shorthand::X; + +using namespace serializationTestHelpers; + +BOOST_CLASS_EXPORT_GUID(HybridFactor, "gtsam_HybridFactor"); +BOOST_CLASS_EXPORT_GUID(JacobianFactor, "gtsam_JacobianFactor"); + +BOOST_CLASS_EXPORT_GUID(DecisionTreeFactor, "gtsam_DecisionTreeFactor"); +using ADT = AlgebraicDecisionTree; +BOOST_CLASS_EXPORT_GUID(ADT, "gtsam_AlgebraicDecisionTree"); +BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_DecisionTree_Leaf") +BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_DecisionTree_Choice") + +BOOST_CLASS_EXPORT_GUID(gtsam::GaussianMixtureFactor, "gtsam_GaussianMixtureFactor") +BOOST_CLASS_EXPORT_GUID(gtsam::GaussianMixtureFactor::Factors, "gtsam_GaussianMixtureFactor_Factors") +BOOST_CLASS_EXPORT_GUID(gtsam::GaussianMixtureFactor::Factors::Leaf, "gtsam_GaussianMixtureFactor_Factors_Leaf") +BOOST_CLASS_EXPORT_GUID(gtsam::GaussianMixtureFactor::Factors::Choice, "gtsam_GaussianMixtureFactor_Factors_Choice") + +// BOOST_CLASS_EXPORT_GUID(gtsam::GaussianMixture, "gtsam_GaussianMixture") +// BOOST_CLASS_EXPORT_GUID(gtsam::GaussianMixture::Conditionals, +// "gtsam_GaussianMixture_Conditionals") + +/* ****************************************************************************/ +// Test HybridGaussianFactor serialization. +TEST(HybridSerialization, HybridGaussianFactor) { + const HybridGaussianFactor factor(JacobianFactor(X(0), I_3x3, Z_3x1)); + + EXPECT(equalsObj(factor)); + EXPECT(equalsXML(factor)); + EXPECT(equalsBinary(factor)); +} + +/* ****************************************************************************/ +// Test HybridDiscreteFactor serialization. +TEST(HybridSerialization, HybridDiscreteFactor) { + DiscreteKeys discreteKeys{{M(0), 2}}; + const HybridDiscreteFactor factor( + DecisionTreeFactor(discreteKeys, std::vector{0.4, 0.6})); + + EXPECT(equalsObj(factor)); + EXPECT(equalsXML(factor)); + EXPECT(equalsBinary(factor)); +} + +/* ****************************************************************************/ +// Test GaussianMixtureFactor serialization. +TEST(HybridSerialization, GaussianMixtureFactor) { + KeyVector continuousKeys{X(0)}; + DiscreteKeys discreteKeys{{M(0), 2}}; + + auto A = Matrix::Zero(2, 1); + auto b0 = Matrix::Zero(2, 1); + auto b1 = Matrix::Ones(2, 1); + auto f0 = boost::make_shared(X(0), A, b0); + auto f1 = boost::make_shared(X(0), A, b1); + std::vector factors{f0, f1}; + + const GaussianMixtureFactor factor(continuousKeys, discreteKeys, factors); + + EXPECT(equalsObj(factor)); + EXPECT(equalsXML(factor)); + EXPECT(equalsBinary(factor)); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ From 74142e4be19fc59713bd4f180729ad7cc22c689c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 17:37:09 -0500 Subject: [PATCH 67/77] GaussianMixture serialization --- gtsam/hybrid/GaussianMixture.h | 10 ++++ gtsam/hybrid/GaussianMixtureFactor.h | 18 +++++++ .../hybrid/tests/testSerializationHybrid.cpp | 52 +++++++++++++++---- 3 files changed, 69 insertions(+), 11 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index 240f79dcd7..ba84b5ade3 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -197,6 +197,16 @@ class GTSAM_EXPORT GaussianMixture */ GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const; /// @} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(Archive &ar, const unsigned int /*version*/) { + ar &BOOST_SERIALIZATION_BASE_OBJECT_NVP(BaseFactor); + ar &BOOST_SERIALIZATION_BASE_OBJECT_NVP(BaseConditional); + ar &BOOST_SERIALIZATION_NVP(conditionals_); + } }; /// Return the DiscreteKey vector as a set. diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 9138d6b30b..01de2f0f7a 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -70,6 +70,15 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { bool operator==(const FactorAndConstant &other) const { return factor == other.factor && constant == other.constant; } + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE &ar, const unsigned int /*version*/) { + ar &BOOST_SERIALIZATION_NVP(factor); + ar &BOOST_SERIALIZATION_NVP(constant); + } }; /// typedef for Decision Tree of Gaussian factors and log-constant. @@ -179,6 +188,15 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { return sum; } /// @} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE &ar, const unsigned int /*version*/) { + ar &BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + ar &BOOST_SERIALIZATION_NVP(factors_); + } }; // traits diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 64f0ce5795..5337938ddd 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -17,10 +17,12 @@ */ #include +#include #include #include #include #include +#include // Include for test suite #include @@ -29,26 +31,37 @@ using namespace std; using namespace gtsam; using symbol_shorthand::M; using symbol_shorthand::X; +using symbol_shorthand::Z; using namespace serializationTestHelpers; BOOST_CLASS_EXPORT_GUID(HybridFactor, "gtsam_HybridFactor"); BOOST_CLASS_EXPORT_GUID(JacobianFactor, "gtsam_JacobianFactor"); +BOOST_CLASS_EXPORT_GUID(GaussianConditional, "gtsam_GaussianConditional"); BOOST_CLASS_EXPORT_GUID(DecisionTreeFactor, "gtsam_DecisionTreeFactor"); using ADT = AlgebraicDecisionTree; BOOST_CLASS_EXPORT_GUID(ADT, "gtsam_AlgebraicDecisionTree"); -BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_DecisionTree_Leaf") -BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_DecisionTree_Choice") - -BOOST_CLASS_EXPORT_GUID(gtsam::GaussianMixtureFactor, "gtsam_GaussianMixtureFactor") -BOOST_CLASS_EXPORT_GUID(gtsam::GaussianMixtureFactor::Factors, "gtsam_GaussianMixtureFactor_Factors") -BOOST_CLASS_EXPORT_GUID(gtsam::GaussianMixtureFactor::Factors::Leaf, "gtsam_GaussianMixtureFactor_Factors_Leaf") -BOOST_CLASS_EXPORT_GUID(gtsam::GaussianMixtureFactor::Factors::Choice, "gtsam_GaussianMixtureFactor_Factors_Choice") - -// BOOST_CLASS_EXPORT_GUID(gtsam::GaussianMixture, "gtsam_GaussianMixture") -// BOOST_CLASS_EXPORT_GUID(gtsam::GaussianMixture::Conditionals, -// "gtsam_GaussianMixture_Conditionals") +BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_DecisionTree_Leaf"); +BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_DecisionTree_Choice"); + +BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor, "gtsam_GaussianMixtureFactor"); +BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors, + "gtsam_GaussianMixtureFactor_Factors"); +BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors::Leaf, + "gtsam_GaussianMixtureFactor_Factors_Leaf"); +BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors::Choice, + "gtsam_GaussianMixtureFactor_Factors_Choice"); + +BOOST_CLASS_EXPORT_GUID(GaussianMixture, "gtsam_GaussianMixture"); +BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals, + "gtsam_GaussianMixture_Conditionals"); +BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals::Leaf, + "gtsam_GaussianMixture_Conditionals_Leaf"); +BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals::Choice, + "gtsam_GaussianMixture_Conditionals_Choice"); +// Needed since GaussianConditional::FromMeanAndStddev uses it +BOOST_CLASS_EXPORT_GUID(noiseModel::Isotropic, "gtsam_noiseModel_Isotropic"); /* ****************************************************************************/ // Test HybridGaussianFactor serialization. @@ -92,6 +105,23 @@ TEST(HybridSerialization, GaussianMixtureFactor) { EXPECT(equalsBinary(factor)); } +/* ****************************************************************************/ +// Test GaussianMixture serialization. +TEST(HybridSerialization, GaussianMixture) { + const DiscreteKey mode(M(0), 2); + Matrix1 I = Matrix1::Identity(); + const auto conditional0 = boost::make_shared( + GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 0.5)); + const auto conditional1 = boost::make_shared( + GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3)); + const GaussianMixture gm({Z(0)}, {X(0)}, {mode}, + {conditional0, conditional1}); + + EXPECT(equalsObj(gm)); + EXPECT(equalsXML(gm)); + EXPECT(equalsBinary(gm)); +} + /* ************************************************************************* */ int main() { TestResult tr; From 2bb4fd6530165cc5bc81cdbf8fb774ecb25a01ad Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 17:40:04 -0500 Subject: [PATCH 68/77] fix minor bug in HybridConditional and test its serialization --- gtsam/hybrid/HybridConditional.cpp | 1 - gtsam/hybrid/tests/testSerializationHybrid.cpp | 15 +++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridConditional.cpp b/gtsam/hybrid/HybridConditional.cpp index 8371a33650..0bfcfec4da 100644 --- a/gtsam/hybrid/HybridConditional.cpp +++ b/gtsam/hybrid/HybridConditional.cpp @@ -116,7 +116,6 @@ bool HybridConditional::equals(const HybridFactor &other, double tol) const { auto other = e->asDiscrete(); return other != nullptr && dc->equals(*other, tol); } - return inner_->equals(*(e->inner_), tol); return inner_ ? (e->inner_ ? inner_->equals(*(e->inner_), tol) : false) : !(e->inner_); diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 5337938ddd..9597fe8f0e 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -19,6 +19,7 @@ #include #include #include +#include #include #include #include @@ -105,6 +106,20 @@ TEST(HybridSerialization, GaussianMixtureFactor) { EXPECT(equalsBinary(factor)); } +/* ****************************************************************************/ +// Test HybridConditional serialization. +TEST(HybridSerialization, HybridConditional) { + const DiscreteKey mode(M(0), 2); + Matrix1 I = Matrix1::Identity(); + const auto conditional = boost::make_shared( + GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 0.5)); + const HybridConditional hc(conditional); + + EXPECT(equalsObj(hc)); + EXPECT(equalsXML(hc)); + EXPECT(equalsBinary(hc)); +} + /* ****************************************************************************/ // Test GaussianMixture serialization. TEST(HybridSerialization, GaussianMixture) { From 6fcc0870308a1dc0c0517b311250011cff093c01 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 18:15:59 -0500 Subject: [PATCH 69/77] serialize DiscreteConditional --- gtsam/discrete/DiscreteConditional.h | 9 ++++++++ .../tests/testDiscreteConditional.cpp | 23 ++++++++++++++++--- 2 files changed, 29 insertions(+), 3 deletions(-) diff --git a/gtsam/discrete/DiscreteConditional.h b/gtsam/discrete/DiscreteConditional.h index 6a286633d3..b68953eb58 100644 --- a/gtsam/discrete/DiscreteConditional.h +++ b/gtsam/discrete/DiscreteConditional.h @@ -239,6 +239,15 @@ class GTSAM_EXPORT DiscreteConditional /// Internal version of choose DiscreteConditional::ADT choose(const DiscreteValues& given, bool forceComplete) const; + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(Archive& ar, const unsigned int /*version*/) { + ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(BaseFactor); + ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(BaseConditional); + } }; // DiscreteConditional diff --git a/gtsam/discrete/tests/testDiscreteConditional.cpp b/gtsam/discrete/tests/testDiscreteConditional.cpp index 9098f7a1d6..99ea138b14 100644 --- a/gtsam/discrete/tests/testDiscreteConditional.cpp +++ b/gtsam/discrete/tests/testDiscreteConditional.cpp @@ -17,13 +17,14 @@ * @date Feb 14, 2011 */ -#include - #include +#include #include #include #include +#include + using namespace std; using namespace gtsam; @@ -209,7 +210,6 @@ TEST(DiscreteConditional, marginals2) { DiscreteConditional conditional(A | B = "2/2 3/1"); DiscreteConditional prior(B % "1/2"); DiscreteConditional pAB = prior * conditional; - GTSAM_PRINT(pAB); // P(A=0) = P(A=0|B=0)P(B=0) + P(A=0|B=1)P(B=1) = 2*1 + 3*2 = 8 // P(A=1) = P(A=1|B=0)P(B=0) + P(A=1|B=1)P(B=1) = 2*1 + 1*2 = 4 DiscreteConditional actualA = pAB.marginal(A.first); @@ -368,6 +368,23 @@ TEST(DiscreteConditional, html) { EXPECT(actual == expected); } +/* ************************************************************************* */ +using ADT = AlgebraicDecisionTree; +BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_ADT_Leaf") +BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_ADT_Choice") + +// Check serialization for DiscreteConditional +TEST(DiscreteConditional, Serialization) { + using namespace serializationTestHelpers; + + DiscreteKey A(Symbol('x', 1), 3); + DiscreteConditional conditional(A % "1/2/2"); + + EXPECT(equalsObj(conditional)); + EXPECT(equalsXML(conditional)); + EXPECT(equalsBinary(conditional)); +} + /* ************************************************************************* */ int main() { TestResult tr; From 230f65dd4496cac516574c713e9f924132e20e88 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 3 Jan 2023 18:21:47 -0500 Subject: [PATCH 70/77] serialization tests for HybridBayesNet and HybridBayesTree --- gtsam/hybrid/HybridConditional.h | 13 +++++++ gtsam/hybrid/tests/testHybridBayesNet.cpp | 16 -------- gtsam/hybrid/tests/testHybridBayesTree.cpp | 16 -------- .../hybrid/tests/testSerializationHybrid.cpp | 38 ++++++++++++++++++- 4 files changed, 49 insertions(+), 34 deletions(-) diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index da7c1421e3..021ca13614 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -188,6 +188,19 @@ class GTSAM_EXPORT HybridConditional ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(BaseFactor); ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(BaseConditional); ar& BOOST_SERIALIZATION_NVP(inner_); + + // register the various casts based on the type of inner_ + // https://www.boost.org/doc/libs/1_80_0/libs/serialization/doc/serialization.html#runtimecasting + if (isDiscrete()) { + boost::serialization::void_cast_register( + static_cast(NULL), static_cast(NULL)); + } else if (isContinuous()) { + boost::serialization::void_cast_register( + static_cast(NULL), static_cast(NULL)); + } else { + boost::serialization::void_cast_register( + static_cast(NULL), static_cast(NULL)); + } } }; // HybridConditional diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 43eebcf889..ef552bd92a 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -18,7 +18,6 @@ * @date December 2021 */ -#include #include #include #include @@ -31,7 +30,6 @@ using namespace std; using namespace gtsam; -using namespace gtsam::serializationTestHelpers; using noiseModel::Isotropic; using symbol_shorthand::M; @@ -330,20 +328,6 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) { discrete_conditional_tree->apply(checker); } -/* ****************************************************************************/ -// Test HybridBayesNet serialization. -TEST(HybridBayesNet, Serialization) { - Switching s(4); - Ordering ordering = s.linearizedFactorGraph.getHybridOrdering(); - HybridBayesNet hbn = *(s.linearizedFactorGraph.eliminateSequential(ordering)); - - // TODO(Varun) Serialization of inner factor doesn't work. Requires - // serialization support for all hybrid factors. - // EXPECT(equalsObj(hbn)); - // EXPECT(equalsXML(hbn)); - // EXPECT(equalsBinary(hbn)); -} - /* ****************************************************************************/ // Test HybridBayesNet sampling. TEST(HybridBayesNet, Sampling) { diff --git a/gtsam/hybrid/tests/testHybridBayesTree.cpp b/gtsam/hybrid/tests/testHybridBayesTree.cpp index 1e65103830..b957a67d04 100644 --- a/gtsam/hybrid/tests/testHybridBayesTree.cpp +++ b/gtsam/hybrid/tests/testHybridBayesTree.cpp @@ -220,22 +220,6 @@ TEST(HybridBayesTree, Choose) { EXPECT(assert_equal(expected_gbt, gbt)); } -/* ****************************************************************************/ -// Test HybridBayesTree serialization. -TEST(HybridBayesTree, Serialization) { - Switching s(4); - Ordering ordering = s.linearizedFactorGraph.getHybridOrdering(); - HybridBayesTree hbt = - *(s.linearizedFactorGraph.eliminateMultifrontal(ordering)); - - using namespace gtsam::serializationTestHelpers; - // TODO(Varun) Serialization of inner factor doesn't work. Requires - // serialization support for all hybrid factors. - // EXPECT(equalsObj(hbt)); - // EXPECT(equalsXML(hbt)); - // EXPECT(equalsBinary(hbt)); -} - /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 9597fe8f0e..941a1cdb3a 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -17,14 +17,19 @@ */ #include +#include #include #include +#include +#include #include #include #include #include #include +#include "Switching.h" + // Include for test suite #include @@ -36,15 +41,17 @@ using symbol_shorthand::Z; using namespace serializationTestHelpers; +BOOST_CLASS_EXPORT_GUID(Factor, "gtsam_Factor"); BOOST_CLASS_EXPORT_GUID(HybridFactor, "gtsam_HybridFactor"); BOOST_CLASS_EXPORT_GUID(JacobianFactor, "gtsam_JacobianFactor"); BOOST_CLASS_EXPORT_GUID(GaussianConditional, "gtsam_GaussianConditional"); +BOOST_CLASS_EXPORT_GUID(DiscreteConditional, "gtsam_DiscreteConditional"); BOOST_CLASS_EXPORT_GUID(DecisionTreeFactor, "gtsam_DecisionTreeFactor"); using ADT = AlgebraicDecisionTree; BOOST_CLASS_EXPORT_GUID(ADT, "gtsam_AlgebraicDecisionTree"); -BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_DecisionTree_Leaf"); -BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_DecisionTree_Choice"); +BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_AlgebraicDecisionTree_Leaf"); +BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_AlgebraicDecisionTree_Choice") BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor, "gtsam_GaussianMixtureFactor"); BOOST_CLASS_EXPORT_GUID(GaussianMixtureFactor::Factors, @@ -64,6 +71,8 @@ BOOST_CLASS_EXPORT_GUID(GaussianMixture::Conditionals::Choice, // Needed since GaussianConditional::FromMeanAndStddev uses it BOOST_CLASS_EXPORT_GUID(noiseModel::Isotropic, "gtsam_noiseModel_Isotropic"); +BOOST_CLASS_EXPORT_GUID(HybridBayesNet, "gtsam_HybridBayesNet"); + /* ****************************************************************************/ // Test HybridGaussianFactor serialization. TEST(HybridSerialization, HybridGaussianFactor) { @@ -137,6 +146,31 @@ TEST(HybridSerialization, GaussianMixture) { EXPECT(equalsBinary(gm)); } +/* ****************************************************************************/ +// Test HybridBayesNet serialization. +TEST(HybridSerialization, HybridBayesNet) { + Switching s(2); + Ordering ordering = s.linearizedFactorGraph.getHybridOrdering(); + HybridBayesNet hbn = *(s.linearizedFactorGraph.eliminateSequential(ordering)); + + EXPECT(equalsObj(hbn)); + EXPECT(equalsXML(hbn)); + EXPECT(equalsBinary(hbn)); +} + +/* ****************************************************************************/ +// Test HybridBayesTree serialization. +TEST(HybridSerialization, HybridBayesTree) { + Switching s(2); + Ordering ordering = s.linearizedFactorGraph.getHybridOrdering(); + HybridBayesTree hbt = + *(s.linearizedFactorGraph.eliminateMultifrontal(ordering)); + + EXPECT(equalsObj(hbt)); + EXPECT(equalsXML(hbt)); + EXPECT(equalsBinary(hbt)); +} + /* ************************************************************************* */ int main() { TestResult tr; From 4cb910ce16abff1792c74f54b16f7dfc7b75e808 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 4 Jan 2023 01:06:41 -0500 Subject: [PATCH 71/77] move discrete serialization tests to common file to remove export key collision --- gtsam/discrete/tests/testDecisionTree.cpp | 29 ----- .../discrete/tests/testDecisionTreeFactor.cpp | 24 ---- .../tests/testDiscreteConditional.cpp | 17 --- .../tests/testSerializationDiscrete.cpp | 105 ++++++++++++++++++ 4 files changed, 105 insertions(+), 70 deletions(-) create mode 100644 gtsam/discrete/tests/testSerializationDiscrete.cpp diff --git a/gtsam/discrete/tests/testDecisionTree.cpp b/gtsam/discrete/tests/testDecisionTree.cpp index 02ef52ab8f..46e6c38136 100644 --- a/gtsam/discrete/tests/testDecisionTree.cpp +++ b/gtsam/discrete/tests/testDecisionTree.cpp @@ -528,35 +528,6 @@ TEST(DecisionTree, ApplyWithAssignment) { EXPECT_LONGS_EQUAL(5, count); } -/* ****************************************************************************/ -using Tree = gtsam::DecisionTree; - -BOOST_CLASS_EXPORT_GUID(Tree, "gtsam_DecisionTreeStringInt") -BOOST_CLASS_EXPORT_GUID(Tree::Leaf, "gtsam_DecisionTree_Leaf") -BOOST_CLASS_EXPORT_GUID(Tree::Choice, "gtsam_DecisionTree_Choice") - -// Test HybridBayesNet serialization. -TEST(DecisionTree, Serialization) { - Tree tree({{"A", 2}}, std::vector{1, 2}); - - using namespace serializationTestHelpers; - - // Object roundtrip - Tree outputObj = create(); - roundtrip(tree, outputObj); - EXPECT(tree.equals(outputObj)); - - // XML roundtrip - Tree outputXml = create(); - roundtripXML(tree, outputXml); - EXPECT(tree.equals(outputXml)); - - // Binary roundtrip - Tree outputBinary = create(); - roundtripBinary(tree, outputBinary); - EXPECT(tree.equals(outputBinary)); -} - /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/discrete/tests/testDecisionTreeFactor.cpp b/gtsam/discrete/tests/testDecisionTreeFactor.cpp index d4ca9faa60..869b3c6303 100644 --- a/gtsam/discrete/tests/testDecisionTreeFactor.cpp +++ b/gtsam/discrete/tests/testDecisionTreeFactor.cpp @@ -221,30 +221,6 @@ TEST(DecisionTreeFactor, htmlWithValueFormatter) { EXPECT(actual == expected); } -/* ************************************************************************* */ -BOOST_CLASS_EXPORT_GUID(DecisionTreeFactor, "gtsam_DecisionTreeFactor"); -using ADT = AlgebraicDecisionTree; -BOOST_CLASS_EXPORT_GUID(ADT, "gtsam_AlgebraicDecisionTree"); -BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_DecisionTree_Leaf") -BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_DecisionTree_Choice") - -// Check serialization for AlgebraicDecisionTree and the DecisionTreeFactor -TEST(DecisionTreeFactor, Serialization) { - using namespace serializationTestHelpers; - - DiscreteKey A(1, 2), B(2, 2), C(3, 2); - - DecisionTreeFactor::ADT tree(A & B & C, "1 5 3 7 2 6 4 8"); - EXPECT(equalsObj(tree)); - EXPECT(equalsXML(tree)); - EXPECT(equalsBinary(tree)); - - DecisionTreeFactor f(A & B & C, "1 5 3 7 2 6 4 8"); - EXPECT(equalsObj(f)); - EXPECT(equalsXML(f)); - EXPECT(equalsBinary(f)); -} - /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/discrete/tests/testDiscreteConditional.cpp b/gtsam/discrete/tests/testDiscreteConditional.cpp index 99ea138b14..3df4bf9e6f 100644 --- a/gtsam/discrete/tests/testDiscreteConditional.cpp +++ b/gtsam/discrete/tests/testDiscreteConditional.cpp @@ -368,23 +368,6 @@ TEST(DiscreteConditional, html) { EXPECT(actual == expected); } -/* ************************************************************************* */ -using ADT = AlgebraicDecisionTree; -BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_ADT_Leaf") -BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_ADT_Choice") - -// Check serialization for DiscreteConditional -TEST(DiscreteConditional, Serialization) { - using namespace serializationTestHelpers; - - DiscreteKey A(Symbol('x', 1), 3); - DiscreteConditional conditional(A % "1/2/2"); - - EXPECT(equalsObj(conditional)); - EXPECT(equalsXML(conditional)); - EXPECT(equalsBinary(conditional)); -} - /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/discrete/tests/testSerializationDiscrete.cpp b/gtsam/discrete/tests/testSerializationDiscrete.cpp new file mode 100644 index 0000000000..df7df0b7ec --- /dev/null +++ b/gtsam/discrete/tests/testSerializationDiscrete.cpp @@ -0,0 +1,105 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/* + * testSerializtionDiscrete.cpp + * + * @date January 2023 + * @author Varun Agrawal + */ + +#include +#include +#include +#include +#include + +using namespace std; +using namespace gtsam; + +using Tree = gtsam::DecisionTree; + +BOOST_CLASS_EXPORT_GUID(Tree, "gtsam_DecisionTreeStringInt") +BOOST_CLASS_EXPORT_GUID(Tree::Leaf, "gtsam_DecisionTreeStringInt_Leaf") +BOOST_CLASS_EXPORT_GUID(Tree::Choice, "gtsam_DecisionTreeStringInt_Choice") + +BOOST_CLASS_EXPORT_GUID(DecisionTreeFactor, "gtsam_DecisionTreeFactor"); + +using ADT = AlgebraicDecisionTree; +BOOST_CLASS_EXPORT_GUID(ADT, "gtsam_AlgebraicDecisionTree"); +BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_AlgebraicDecisionTree_Leaf") +BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_AlgebraicDecisionTree_Choice") + +/* ****************************************************************************/ +// Test DecisionTree serialization. +TEST(DiscreteSerialization, DecisionTree) { + Tree tree({{"A", 2}}, std::vector{1, 2}); + + using namespace serializationTestHelpers; + + // Object roundtrip + Tree outputObj = create(); + roundtrip(tree, outputObj); + EXPECT(tree.equals(outputObj)); + + // XML roundtrip + Tree outputXml = create(); + roundtripXML(tree, outputXml); + EXPECT(tree.equals(outputXml)); + + // Binary roundtrip + Tree outputBinary = create(); + roundtripBinary(tree, outputBinary); + EXPECT(tree.equals(outputBinary)); +} + +/* ************************************************************************* */ +// Check serialization for AlgebraicDecisionTree and the DecisionTreeFactor +TEST(DiscreteSerialization, DecisionTreeFactor) { + using namespace serializationTestHelpers; + + DiscreteKey A(1, 2), B(2, 2), C(3, 2); + + DecisionTreeFactor::ADT tree(A & B & C, "1 5 3 7 2 6 4 8"); + EXPECT(equalsObj(tree)); + EXPECT(equalsXML(tree)); + EXPECT(equalsBinary(tree)); + + DecisionTreeFactor f(A & B & C, "1 5 3 7 2 6 4 8"); + EXPECT(equalsObj(f)); + EXPECT(equalsXML(f)); + EXPECT(equalsBinary(f)); +} + +/* ************************************************************************* */ +// Check serialization for DiscreteConditional & DiscreteDistribution +TEST(DiscreteSerialization, DiscreteConditional) { + using namespace serializationTestHelpers; + + DiscreteKey A(Symbol('x', 1), 3); + DiscreteConditional conditional(A % "1/2/2"); + + EXPECT(equalsObj(conditional)); + EXPECT(equalsXML(conditional)); + EXPECT(equalsBinary(conditional)); + + DiscreteDistribution P(A % "3/2/1"); + EXPECT(equalsObj(P)); + EXPECT(equalsXML(P)); + EXPECT(equalsBinary(P)); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ From bb31956a96c583cc5386c0e4cd595c13471569b3 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 4 Jan 2023 02:52:33 -0500 Subject: [PATCH 72/77] enable previously failing test, now works!! --- gtsam/hybrid/tests/testHybridEstimation.cpp | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index a45c5b92c0..4a53a31412 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -114,7 +114,7 @@ TEST(HybridEstimation, Full) { /****************************************************************************/ // Test approximate inference with an additional pruning step. -TEST_DISABLED(HybridEstimation, Incremental) { +TEST(HybridEstimation, Incremental) { size_t K = 15; std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; @@ -151,9 +151,6 @@ TEST_DISABLED(HybridEstimation, Incremental) { graph.resize(0); } - /*TODO(Varun) Gives degenerate result due to probability underflow. - Need to normalize probabilities. - */ HybridValues delta = smoother.hybridBayesNet().optimize(); Values result = initial.retract(delta.continuous()); From ed16c335c1286eb16c81bc934412ce2df88fef96 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 4 Jan 2023 02:53:12 -0500 Subject: [PATCH 73/77] add check in GaussianMixtureFactor::likelihood --- gtsam/hybrid/GaussianMixture.cpp | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index fdeea8ccdb..12e88f81d8 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -69,8 +69,7 @@ GaussianFactorGraphTree GaussianMixture::asGaussianFactorGraphTree() const { GaussianFactorGraph result; result.push_back(conditional); if (conditional) { - return GraphAndConstant( - result, conditional->logNormalizationConstant()); + return GraphAndConstant(result, conditional->logNormalizationConstant()); } else { return GraphAndConstant(result, 0.0); } @@ -163,7 +162,13 @@ KeyVector GaussianMixture::continuousParents() const { /* ************************************************************************* */ boost::shared_ptr GaussianMixture::likelihood( const VectorValues &frontals) const { - // TODO(dellaert): check that values has all frontals + // Check that values has all frontals + for (auto &&kv : frontals) { + if (frontals.find(kv.first) == frontals.end()) { + throw std::runtime_error("GaussianMixture: frontals missing factor key."); + } + } + const DiscreteKeys discreteParentKeys = discreteKeys(); const KeyVector continuousParentKeys = continuousParents(); const GaussianMixtureFactor::Factors likelihoods( From 34daecd7a4f5bf2d8a4cd8d0ce83526ad0283a5c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 4 Jan 2023 02:54:34 -0500 Subject: [PATCH 74/77] remove deferredFactors --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 22 ++++++++-------------- 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 7b32b90b7e..f6b713a768 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -60,10 +60,10 @@ template class EliminateableFactorGraph; /* ************************************************************************ */ static GaussianFactorGraphTree addGaussian( - const GaussianFactorGraphTree &sum, + const GaussianFactorGraphTree &gfgTree, const GaussianFactor::shared_ptr &factor) { // If the decision tree is not initialized, then initialize it. - if (sum.empty()) { + if (gfgTree.empty()) { GaussianFactorGraph result; result.push_back(factor); return GaussianFactorGraphTree(GraphAndConstant(result, 0.0)); @@ -74,20 +74,18 @@ static GaussianFactorGraphTree addGaussian( result.push_back(factor); return GraphAndConstant(result, graph_z.constant); }; - return sum.apply(add); + return gfgTree.apply(add); } } /* ************************************************************************ */ -// TODO(dellaert): We need to document why deferredFactors need to be -// added last, which I would undo if possible. Implementation-wise, it's -// probably more efficient to first collect the discrete keys, and then loop -// over all assignments to populate a vector. +// TODO(dellaert): Implementation-wise, it's probably more efficient to first +// collect the discrete keys, and then loop over all assignments to populate a +// vector. GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const { gttic(assembleGraphTree); GaussianFactorGraphTree result; - std::vector deferredFactors; for (auto &f : factors_) { // TODO(dellaert): just use a virtual method defined in HybridFactor. @@ -101,10 +99,10 @@ GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const { } else if (f->isContinuous()) { if (auto gf = boost::dynamic_pointer_cast(f)) { - deferredFactors.push_back(gf->inner()); + result = addGaussian(result, gf->inner()); } if (auto cg = boost::dynamic_pointer_cast(f)) { - deferredFactors.push_back(cg->asGaussian()); + result = addGaussian(result, cg->asGaussian()); } } else if (f->isDiscrete()) { @@ -126,10 +124,6 @@ GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const { } } - for (auto &f : deferredFactors) { - result = addGaussian(result, f); - } - gttoc(assembleGraphTree); return result; From 7dd4bc990a09cc012931b70748534cf489b7fdcb Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 4 Jan 2023 02:55:06 -0500 Subject: [PATCH 75/77] implement dim() for MixtureFactor --- gtsam/hybrid/MixtureFactor.h | 12 +++++++++--- gtsam/hybrid/tests/testMixtureFactor.cpp | 19 ++++++++++++++++--- 2 files changed, 25 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/MixtureFactor.h b/gtsam/hybrid/MixtureFactor.h index fc1a9a2b83..5285dd1911 100644 --- a/gtsam/hybrid/MixtureFactor.h +++ b/gtsam/hybrid/MixtureFactor.h @@ -162,14 +162,20 @@ class MixtureFactor : public HybridFactor { } /// Error for HybridValues is not provided for nonlinear hybrid factor. - double error(const HybridValues &values) const override { + double error(const HybridValues& values) const override { throw std::runtime_error( "MixtureFactor::error(HybridValues) not implemented."); } + /** + * @brief Get the dimension of the factor (number of rows on linearization). + * Returns the dimension of the first component factor. + * @return size_t + */ size_t dim() const { - // TODO(Varun) - throw std::runtime_error("MixtureFactor::dim not implemented."); + const auto assignments = DiscreteValues::CartesianProduct(discreteKeys_); + auto factor = factors_(assignments.at(0)); + return factor->dim(); } /// Testable diff --git a/gtsam/hybrid/tests/testMixtureFactor.cpp b/gtsam/hybrid/tests/testMixtureFactor.cpp index fe3212eda5..9e4d66bf27 100644 --- a/gtsam/hybrid/tests/testMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testMixtureFactor.cpp @@ -70,8 +70,7 @@ MixtureFactor } /* ************************************************************************* */ -// Test the error of the MixtureFactor -TEST(MixtureFactor, Error) { +static MixtureFactor getMixtureFactor() { DiscreteKey m1(1, 2); double between0 = 0.0; @@ -86,7 +85,13 @@ TEST(MixtureFactor, Error) { boost::make_shared>(X(1), X(2), between1, model); std::vector factors{f0, f1}; - MixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); + return MixtureFactor({X(1), X(2)}, {m1}, factors); +} + +/* ************************************************************************* */ +// Test the error of the MixtureFactor +TEST(MixtureFactor, Error) { + auto mixtureFactor = getMixtureFactor(); Values continuousValues; continuousValues.insert(X(1), 0); @@ -94,6 +99,7 @@ TEST(MixtureFactor, Error) { AlgebraicDecisionTree error_tree = mixtureFactor.error(continuousValues); + DiscreteKey m1(1, 2); std::vector discrete_keys = {m1}; std::vector errors = {0.5, 0}; AlgebraicDecisionTree expected_error(discrete_keys, errors); @@ -101,6 +107,13 @@ TEST(MixtureFactor, Error) { EXPECT(assert_equal(expected_error, error_tree)); } +/* ************************************************************************* */ +// Test dim of the MixtureFactor +TEST(MixtureFactor, Dim) { + auto mixtureFactor = getMixtureFactor(); + EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); +} + /* ************************************************************************* */ int main() { TestResult tr; From b16480bab1e2cc84de6f9fb046866112aee50f57 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 4 Jan 2023 02:55:37 -0500 Subject: [PATCH 76/77] simplify code --- gtsam/discrete/DecisionTreeFactor.cpp | 4 ++-- gtsam/hybrid/HybridDiscreteFactor.cpp | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/gtsam/discrete/DecisionTreeFactor.cpp b/gtsam/discrete/DecisionTreeFactor.cpp index 4e16fc689e..7f604086c1 100644 --- a/gtsam/discrete/DecisionTreeFactor.cpp +++ b/gtsam/discrete/DecisionTreeFactor.cpp @@ -156,9 +156,9 @@ namespace gtsam { std::vector> DecisionTreeFactor::enumerate() const { // Get all possible assignments - std::vector> pairs = discreteKeys(); + DiscreteKeys pairs = discreteKeys(); // Reverse to make cartesian product output a more natural ordering. - std::vector> rpairs(pairs.rbegin(), pairs.rend()); + DiscreteKeys rpairs(pairs.rbegin(), pairs.rend()); const auto assignments = DiscreteValues::CartesianProduct(rpairs); // Construct unordered_map with values diff --git a/gtsam/hybrid/HybridDiscreteFactor.cpp b/gtsam/hybrid/HybridDiscreteFactor.cpp index dd8b867be7..afdb6472aa 100644 --- a/gtsam/hybrid/HybridDiscreteFactor.cpp +++ b/gtsam/hybrid/HybridDiscreteFactor.cpp @@ -26,7 +26,6 @@ namespace gtsam { /* ************************************************************************ */ -// TODO(fan): THIS IS VERY VERY DIRTY! We need to get DiscreteFactor right! HybridDiscreteFactor::HybridDiscreteFactor(DiscreteFactor::shared_ptr other) : Base(boost::dynamic_pointer_cast(other) ->discreteKeys()), From 25fd6181ac852db15abcaa610f9061ec6658856f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 4 Jan 2023 03:30:49 -0500 Subject: [PATCH 77/77] rename getAssignment to assignment, fix printing --- gtsam/hybrid/HybridNonlinearISAM.cpp | 6 ++++-- gtsam/hybrid/HybridNonlinearISAM.h | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridNonlinearISAM.cpp b/gtsam/hybrid/HybridNonlinearISAM.cpp index 57e0daf8da..d6b83e30d6 100644 --- a/gtsam/hybrid/HybridNonlinearISAM.cpp +++ b/gtsam/hybrid/HybridNonlinearISAM.cpp @@ -99,9 +99,11 @@ void HybridNonlinearISAM::print(const string& s, const KeyFormatter& keyFormatter) const { cout << s << "ReorderInterval: " << reorderInterval_ << " Current Count: " << reorderCounter_ << endl; - isam_.print("HybridGaussianISAM:\n", keyFormatter); + std::cout << "HybridGaussianISAM:" << std::endl; + isam_.print("", keyFormatter); linPoint_.print("Linearization Point:\n", keyFormatter); - factors_.print("Nonlinear Graph:\n", keyFormatter); + std::cout << "Nonlinear Graph:" << std::endl; + factors_.print("", keyFormatter); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/HybridNonlinearISAM.h b/gtsam/hybrid/HybridNonlinearISAM.h index 47aa81c558..53bacb0fff 100644 --- a/gtsam/hybrid/HybridNonlinearISAM.h +++ b/gtsam/hybrid/HybridNonlinearISAM.h @@ -90,7 +90,7 @@ class GTSAM_EXPORT HybridNonlinearISAM { const Values& getLinearizationPoint() const { return linPoint_; } /** Return the current discrete assignment */ - const DiscreteValues& getAssignment() const { return assignment_; } + const DiscreteValues& assignment() const { return assignment_; } /** get underlying nonlinear graph */ const HybridNonlinearFactorGraph& getFactorsUnsafe() const {