From 5235637c07b6db9bb0459abebdf4d2ab207e6de9 Mon Sep 17 00:00:00 2001 From: Lev Konstantinovskiy Date: Mon, 13 Mar 2017 18:05:37 -0300 Subject: [PATCH] Increase epsilon for comparing in KL tests (#1211) --- gensim/test/test_similarity_metrics.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/gensim/test/test_similarity_metrics.py b/gensim/test/test_similarity_metrics.py index 700086d20e..858e388e77 100644 --- a/gensim/test/test_similarity_metrics.py +++ b/gensim/test/test_similarity_metrics.py @@ -188,7 +188,7 @@ def test_distributions(self): vec_2 = [(1, 0.1), (3, 0.8), (4, 0.1)] result = matutils.kullback_leibler(vec_2, vec_1, 8) expected = 0.55451775 - self.assertAlmostEqual(expected, result) + self.assertAlmostEqual(expected, result, places=5) # KL is not symetric; vec1 compared with vec2 will contain log of zeros and return infinity vec_1 = [(2, 0.1), (3, 0.4), (4, 0.1), (5, 0.1), (1, 0.1), (7, 0.2)] @@ -201,14 +201,14 @@ def test_distributions(self): vec_2 = csr_matrix([[1, 0.4], [0, 0.2], [2, 0.2]]) result = matutils.kullback_leibler(vec_1, vec_2, 3) expected = 0.0894502 - self.assertAlmostEqual(expected, result) + self.assertAlmostEqual(expected, result, places=5) # checking ndarray, list as inputs vec_1 = np.array([0.6, 0.1, 0.1, 0.2]) vec_2 = [0.2, 0.2, 0.1, 0.5] result = matutils.kullback_leibler(vec_1, vec_2) expected = 0.40659450877 - self.assertAlmostEqual(expected, result) + self.assertAlmostEqual(expected, result, places=5) # testing LDA distribution vectors np.random.seed(0) @@ -217,7 +217,7 @@ def test_distributions(self): lda_vec2 = model[[(2, 2), (1, 3)]] result = matutils.kullback_leibler(lda_vec1, lda_vec2) expected = 4.283407e-12 - self.assertAlmostEqual(expected, result) + self.assertAlmostEqual(expected, result, places=5) class TestJaccard(unittest.TestCase): def test_inputs(self):