diff --git a/tests/test_diametrizer.py b/tests/test_diametrizer.py index 87717f6f..de58eccd 100644 --- a/tests/test_diametrizer.py +++ b/tests/test_diametrizer.py @@ -1,6 +1,7 @@ '''Test tns.generate.diametrizer code''' import os +import copy from nose.tools import assert_raises import numpy as np from numpy.testing import assert_array_almost_equal @@ -107,7 +108,7 @@ def test_diametrize_smoothing(): def test_diametrize_from_root(): neu1 = morphio.mut.Morphology(NEU_PATH1) # has to be loaded to start clean np.random.seed(0) # ensure constant random number for sampling - diametrizer.diametrize_from_root(neu1, MODEL) + diametrizer.diametrize_from_root(neu1, model_all=MODEL) assert_array_almost_equal(morphio.Morphology(neu1).diameters, [4. , 3.9 , 3.8 , 3.7 , 3.6 , 3.5 , 3.4 , 3.4 , 1.202082, 1.182082, 1.162082, 1.142082, @@ -116,7 +117,7 @@ def test_diametrize_from_root(): neu2 = morphio.mut.Morphology(NEU_PATH3) # has to be loaded to start clean np.random.seed(0) # ensure constant random number for sampling - diametrizer.diametrize_from_root(neu2, MODEL, neurite_type=SectionType.axon) + diametrizer.diametrize_from_root(neu2, SectionType.axon, model_all=MODEL) assert_array_almost_equal(morphio.Morphology(neu2).diameters, [4., 3.8, 3.6, 3.4, 3.2, 3., 2.8 , 2.8, 2.8, 2.6, 2.4, 2.2, @@ -127,7 +128,7 @@ def test_diametrize_from_root(): def test_diametrize_from_tips(): neu1 = morphio.mut.Morphology(NEU_PATH1) # has to be loaded to start clean np.random.seed(0) # ensure constant random number for sampling - diametrizer.diametrize_from_tips(neu1, MODEL) + diametrizer.diametrize_from_tips(neu1, model_all=MODEL) assert_array_almost_equal(morphio.Morphology(neu1).diameters, [2.52333 , 2.423331, 2.32333 , 2.22333 , 2.12333 , 2.02333 , 1.92333 , 1.92333 , 0.68 , 0.66 , 0.64 , 0.62 , @@ -136,7 +137,7 @@ def test_diametrize_from_tips(): neu2 = morphio.mut.Morphology(NEU_PATH3) # has to be loaded to start clean np.random.seed(0) # ensure constant random number for sampling - diametrizer.diametrize_from_tips(neu2, MODEL, neurite_type=SectionType.axon) + diametrizer.diametrize_from_tips(neu2, model_all=MODEL, neurite_type=SectionType.axon) assert_array_almost_equal(morphio.Morphology(neu2).diameters, [4., 3.8, 3.6, 3.4, 3.2, 3., 2.8 , 2.8, 2.8, 2.6, 2.4, 2.2, @@ -157,6 +158,7 @@ def test_redefine_diameter_section(): def test_build(): + np.random.seed(1) # ensure constant random number for sampling neuron = morphio.mut.Morphology(NEU_PATH1) diametrizer.build(neuron, diam_method="M1") diameters = [i.diameters for i in neuron.sections.values()] @@ -198,7 +200,7 @@ def test_build(): assert_array_almost_equal(diameters[1], diameters[2]) assert_array_almost_equal(diameters[2], [1.9233304, 0.68, 0.66, 0.64, 0.62, 0.6]) - def diam_method(neuron, input_model, tree_type): + def diam_method(neuron, tree_type, **kwargs): diametrizer.diametrize_constant_per_neurite(neuron) neuron = morphio.mut.Morphology(NEU_PATH1) @@ -236,6 +238,34 @@ def diam_method(neuron, input_model, tree_type): assert_array_almost_equal(diameters[3], diameters[4]) assert_array_almost_equal(diameters[4], diameters[5]) + # Test with custom random generator + neuron = morphio.mut.Morphology(NEU_PATH1) + test_model = copy.deepcopy(MODEL) + test_model["basal"]["trunk_taper"] = np.arange(0, 10, 0.3) + diametrizer.build( + neuron, + input_model=test_model, + diam_method="M4", + random_generator=np.random.default_rng(3), + ) + diameters = [i.diameters for i in neuron.sections.values()] + assert_array_almost_equal(diameters[0], [3, 2.9, 2.8, 2.7, 2.6, 2.5, 2.4]) + assert_array_almost_equal(diameters[1], diameters[2]) + assert_array_almost_equal( + diameters[2], + [2.4, 0.84852815, 0.82852817, 0.8085281 , 0.78852814, 0.76852816] + ) + + neuron = morphio.mut.Morphology(NEU_PATH1) + diametrizer.build(neuron, input_model=MODEL, diam_method="M5") + diameters = [i.diameters for i in neuron.sections.values()] + assert_array_almost_equal( + diameters[0], + [2.5233305, 2.4233305, 2.3233304, 2.2233305, 2.1233304, 2.0233305, 1.923330] + ) + assert_array_almost_equal(diameters[1], diameters[2]) + assert_array_almost_equal(diameters[2], [1.9233304, 0.68, 0.66, 0.64, 0.62, 0.6]) + neuron = morphio.mut.Morphology(NEU_PATH1) assert_raises(KeyError, diametrizer.build, neuron, None, None, "UNKNOWN") diff --git a/tests/test_neuron_functional.py b/tests/test_neuron_functional.py index 56f9a7d4..dd21e366 100644 --- a/tests/test_neuron_functional.py +++ b/tests/test_neuron_functional.py @@ -21,6 +21,12 @@ _path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data') +def build_random_generator(seed=None): + mt = np.random.MT19937() + mt._legacy_seeding(seed) # Use legacy seeding to get the same result as with np.random.seed() + return np.random.RandomState(mt) + + def assert_close_persistent_diagram(actual, expected): # compute distances between points distances = np.min(cdist(np.array(tmd.analysis.sort_ph(expected)), actual), axis=0) @@ -42,11 +48,25 @@ def _load_inputs(distributions, parameters): return distributions, params -def _test_full(feature, distributions, parameters, ref_cell, ref_persistence_diagram, save=False): - - np.random.seed(0) +def _test_full( + feature, + distributions, + parameters, + ref_cell, + ref_persistence_diagram, + save=False, + rng_or_seed=None, +): distributions, params = _load_inputs(join(_path, distributions), join(_path, parameters)) - n = NeuronGrower(input_distributions=distributions, input_parameters=params).grow() + if rng_or_seed is None: + np.random.seed(0) + n = NeuronGrower(input_distributions=distributions, input_parameters=params).grow() + else: + n = NeuronGrower( + input_distributions=distributions, + input_parameters=params, + rng_or_seed=rng_or_seed, + ).grow() with TemporaryDirectory('test_grower') as folder: out_neuron = os.path.join(folder, 'test_output_neuron_.h5') @@ -80,6 +100,25 @@ def test_wrong_filtration(): assert_raises(ValueError, NeuronGrower, parameters, distributions) +def test_seeding(): + '''Test seeding of internal random number generator''' + distributions, parameters = _load_inputs(os.path.join(_path, 'bio_path_distribution.json'), + os.path.join(_path, 'bio_path_params.json')) + ng = NeuronGrower(parameters, distributions, rng_or_seed=0) + assert ng._rng.bit_generator.state == { + "bit_generator": "PCG64", + "state": { + "state": 80186449399738619878794082838194943960, + "inc": 87136372517582989555478159403783844777 + }, + "has_uint32": 0, + "uinteger": 0, + } + + ng = NeuronGrower(parameters, distributions, rng_or_seed=None) + assert ng._rng.bit_generator.state["bit_generator"] == "PCG64" + + def test_grow_trunk_1_basal(): '''Test NeuronGrower._grow_trunk() with only 1 basal (should raise an Exception)''' distributions, parameters = _load_inputs(os.path.join(_path, 'bio_path_distribution.json'), @@ -203,6 +242,16 @@ def test_breaker_of_tmd_algo(): assert_array_almost_equal(n.sections[169].points[-1], np.array([117.20551, -41.12157, 189.57013]), decimal=5) assert_array_almost_equal(n.sections[122].points[-1], np.array([ 77.08879, 115.79825, -0.99393]), decimal=5) + # Test with a specific random generator + rng = build_random_generator(3367155) + + N = NeuronGrower(input_distributions=distributions, input_parameters=params, rng_or_seed=rng) + n = N.grow() + + assert_array_equal(N.apical_sections, [33]) + assert_array_almost_equal(n.sections[169].points[-1], np.array([117.20551, -41.12157, 189.57013]), decimal=5) + assert_array_almost_equal(n.sections[122].points[-1], np.array([ 77.08879, 115.79825, -0.99393]), decimal=5) + def test_axon_grower(): '''Test axon grower, which should only grow trunks with 1 section to allow later axon grafting. @@ -214,12 +263,24 @@ def test_axon_grower(): 'axon_trunk_parameters.json', 'test_axon_grower.h5', None) + _test_full('radial_distances', + 'axon_trunk_distribution.json', + 'axon_trunk_parameters.json', + 'test_axon_grower.h5', + None, + rng_or_seed=build_random_generator(0)) _test_full('radial_distances', 'axon_trunk_distribution.json', 'axon_trunk_parameters_absolute.json', 'test_axon_grower_absolute.h5', None) + _test_full('radial_distances', + 'axon_trunk_distribution.json', + 'axon_trunk_parameters_absolute.json', + 'test_axon_grower_absolute.h5', + None, + rng_or_seed=build_random_generator(0)) def test_basic_grower(): @@ -228,6 +289,43 @@ def test_basic_grower(): 'trunk_parameters.json', 'test_trunk_grower.h5', None) + _test_full('radial_distances', + 'bio_trunk_distribution.json', + 'trunk_parameters.json', + 'test_trunk_grower.h5', + None, + rng_or_seed=build_random_generator(0)) + + +def test_basic_grower_with_generator(): + distributions, params = _load_inputs( + join(_path, 'bio_trunk_distribution.json'), + join(_path, 'trunk_parameters.json'), + ) + expected_pts = [ + [-0.7312348484992981, 7.604228973388672, 11.173797607421875], + [-13.377432823181152, -1.2863954305648804, 2.9336819648742676], + [11.861421585083008, -0.049414388835430145, 6.1279988288879395], + [-2.3804218769073486, 12.54181957244873, 1.118072748184204], + ] + + rng = np.random.default_rng(0) + rng_or_seeds = [0, rng] + + for rng_or_seed in rng_or_seeds: + n = NeuronGrower( + input_distributions=distributions, + input_parameters=params, + rng_or_seed=rng_or_seed, + ).grow() + assert len(n.root_sections) == 4 + assert_array_almost_equal( + [i.points[-1].tolist() for i in n.root_sections], + expected_pts, + ) + + assert_raises(TypeError, NeuronGrower, params, distributions, rng_or_seed="NOT A SEED") + def test_path_grower(): '''test tmd_path and tmd_apical_path''' @@ -236,6 +334,13 @@ def test_path_grower(): 'bio_path_params.json', 'path_grower.h5', 'bio_path_persistence_diagram.json') + _test_full('path_distances', + 'bio_distribution.json', + 'bio_path_params.json', + 'path_grower.h5', + 'bio_path_persistence_diagram.json', + rng_or_seed=build_random_generator(0)) + def test_gradient_path_grower(): '''test tmd_path''' @@ -244,6 +349,12 @@ def test_gradient_path_grower(): 'bio_gradient_path_params.json', 'gradient_path_grower.h5', 'gradient_path_persistence_diagram.json') + _test_full('path_distances', + 'bio_distribution.json', + 'bio_gradient_path_params.json', + 'gradient_path_grower.h5', + 'gradient_path_persistence_diagram.json', + rng_or_seed=build_random_generator(0)) def test_bio_rat_l5_tpc(): @@ -252,21 +363,45 @@ def test_bio_rat_l5_tpc(): 'params1.json', 'expected_bio_rat_L5_TPC_B_with_params1.h5', 'expected_bio_rat_L5_TPC_B_with_params1_persistence_diagram.json') + _test_full('path_distances', + 'bio_rat_L5_TPC_B.json', + 'params1.json', + 'expected_bio_rat_L5_TPC_B_with_params1.h5', + 'expected_bio_rat_L5_TPC_B_with_params1_persistence_diagram.json', + rng_or_seed=build_random_generator(0)) _test_full('path_distances', 'bio_rat_L5_TPC_B.json', 'params2.json', 'expected_bio_rat_L5_TPC_B_with_params2.h5', 'expected_bio_rat_L5_TPC_B_with_params2_persistence_diagram.json') + _test_full('path_distances', + 'bio_rat_L5_TPC_B.json', + 'params2.json', + 'expected_bio_rat_L5_TPC_B_with_params2.h5', + 'expected_bio_rat_L5_TPC_B_with_params2_persistence_diagram.json', + rng_or_seed=build_random_generator(0)) _test_full('path_distances', 'bio_rat_L5_TPC_B.json', 'params3.json', 'expected_bio_rat_L5_TPC_B_with_params3.h5', 'expected_bio_rat_L5_TPC_B_with_params3_persistence_diagram.json') + _test_full('path_distances', + 'bio_rat_L5_TPC_B.json', + 'params3.json', + 'expected_bio_rat_L5_TPC_B_with_params3.h5', + 'expected_bio_rat_L5_TPC_B_with_params3_persistence_diagram.json', + rng_or_seed=build_random_generator(0)) _test_full('path_distances', 'bio_rat_L5_TPC_B.json', 'params4.json', 'expected_bio_rat_L5_TPC_B_with_params4.h5', 'expected_bio_rat_L5_TPC_B_with_params4_persistence_diagram.json') + _test_full('path_distances', + 'bio_rat_L5_TPC_B.json', + 'params4.json', + 'expected_bio_rat_L5_TPC_B_with_params4.h5', + 'expected_bio_rat_L5_TPC_B_with_params4_persistence_diagram.json', + rng_or_seed=build_random_generator(0)) diff --git a/tests/test_sample.py b/tests/test_sample.py index d04f97eb..e7293478 100644 --- a/tests/test_sample.py +++ b/tests/test_sample.py @@ -1,26 +1,120 @@ from tns.morphmath import sample import numpy as np -from numpy.testing import assert_equal +from numpy.testing import assert_equal, assert_raises + def test_Distr(): - params = {'soma': {'size': {'norm': {'mean': 9.024144162609812, 'std': 3.5462697985669935}}}} + # Test distributions + assert_equal(sample.Distr.norm({"mean": 1, "std": 0.5}), ("standard_normal", 1, 0.5)) + assert_equal(sample.Distr.uniform({"min": 1, "max": 1.25}), ("uniform", 1, 0.25)) + assert_equal(sample.Distr.expon({"loc": 1, "lambda": 2}), ("standard_exponential", 1, 0.5)) + + # Setup normal distribution np.random.seed(0) - soma_d = sample.Distr(params['soma']['size']) - val = soma_d.draw_positive() + mt = np.random.MT19937() + mt._legacy_seeding(0) # Use legacy seeding to get the same result as with np.random.seed(0) + rng = np.random.RandomState(mt) + mt2 = np.random.MT19937() + mt2._legacy_seeding(0) + rng_new = np.random.Generator(mt2) - np.random.seed(79) - val_neg = soma_d.draw_positive() + params = {'norm': {'mean': 9.024144162609812, 'std': 3.5462697985669935}} + soma_d = sample.Distr(params) + soma_d_rng = sample.Distr(params, random_generator=rng) + soma_d_new = sample.Distr(params, random_generator=rng_new) - np.random.seed(0) - val1 = sample.soma_size(params) - assert_equal(val, val1) + # Test draw_positive + val = soma_d.draw_positive() + val_rng = soma_d_rng.draw_positive() + val_new = soma_d_new.draw_positive() assert_equal(val, 15.279949720206192) - assert_equal(val_neg, 9.270213756873975) + assert_equal(val_rng, val) + assert_equal(val_new, 10.356455608693715) + + assert_equal(soma_d.draw_positive(), 10.443209585321375) + assert_equal(soma_d_rng.draw_positive(), 10.443209585321375) + assert_equal(soma_d_new.draw_positive(), 14.207311245358344) + + # Test draw + assert_equal(soma_d.draw(), 12.495013116354336) + assert_equal(soma_d_rng.draw(), 12.495013116354336) + assert_equal(soma_d_new.draw(), 18.201541834082416) + + # Setup uniform distribution + params = {'uniform': {'min': 20, 'max': 30}} + soma_d = sample.Distr(params) + soma_d_rng = sample.Distr(params, random_generator=rng) + soma_d_new = sample.Distr(params, random_generator=rng_new) + + # Test draw_positive + val = soma_d.draw_positive() + val_rng = soma_d_rng.draw_positive() + soma_d_new.draw_positive() # This call is just to align random states of rng and rng_new + val_new = soma_d_new.draw_positive() + assert_equal(val, 24.236547993389046) + assert_equal(val_rng, val) + assert_equal(val_new, val) + + expected = 26.45894113066656 + assert_equal(soma_d.draw_positive(), expected) + assert_equal(soma_d_rng.draw_positive(), expected) + assert_equal(soma_d_new.draw_positive(), expected) + + # Test draw + expected = 24.375872112626926 + assert_equal(soma_d.draw(), expected) + assert_equal(soma_d_rng.draw(), expected) + assert_equal(soma_d_new.draw(), expected) + # Setup exponential distribution + params = {'expon': {'loc': 10, 'lambda': 5}} + soma_d = sample.Distr(params) + soma_d_rng = sample.Distr(params, random_generator=rng) + soma_d_new = sample.Distr(params, random_generator=rng_new) + + # Test draw_positive + val = soma_d.draw_positive() + val_rng = soma_d_rng.draw_positive() + val_new = soma_d_new.draw_positive() + assert_equal(val, 10.444704882606532) + assert_equal(val_rng, val) + assert_equal(val_new, 10.461004518163628) + + assert_equal(soma_d.draw_positive(), 10.662982436410763) + assert_equal(soma_d_rng.draw_positive(), 10.662982436410763) + assert_equal(soma_d_new.draw_positive(), 10.46145745042702) + + # Test draw + assert_equal(soma_d.draw(), 10.09672042018045) + assert_equal(soma_d_rng.draw(), 10.09672042018045) + assert_equal(soma_d_new.draw(), 10.092614156264565) + + # Setup data distribution params = {'data': {'weights': [0.1, 0.9], 'bins': [1, 2]}} soma_d = sample.Distr(params) assert_equal(soma_d.draw_positive(), 2) - assert_equal(sample.Distr.norm({"mean": 1, "std": 0.5}), {"loc": 1, "scale": 0.5}) - assert_equal(sample.Distr.uniform({"min": 1, "max": 1.25}), {"loc": 1, "scale": 0.25}) - assert_equal(sample.Distr.expon({"loc": 1, "lambda": 2}), {"loc": 1, "scale": 0.5}) + soma_d_rng = sample.Distr(params, random_generator=rng) + assert_equal(soma_d_rng.draw_positive(), 2) + + # Setup negative loc distribution + params = {'uniform': {'min': -10, 'max': -10}} + soma_d = sample.Distr(params) + assert_raises(ValueError, soma_d.draw_positive) + + # Setup negative val distribution + params = {'uniform': {'min': -50, 'max': 10}} + soma_d = sample.Distr(params, random_generator=rng) + assert_equal(soma_d.draw_positive(), 5.535798297559666) + + +def test_soma_size(): + np.random.seed(0) + rng = np.random.default_rng(0) + params = {'soma': {'size': {'norm': {'mean': 9.024144162609812, 'std': 3.5462697985669935}}}} + + val1 = sample.soma_size(params) + assert_equal(val1, 15.279949720206192) + + val1_rng = sample.soma_size(params, random_generator=rng) + assert_equal(val1_rng, 9.470017448440464) diff --git a/tns/astrocyte/grower.py b/tns/astrocyte/grower.py index bb7975cc..c2a01910 100644 --- a/tns/astrocyte/grower.py +++ b/tns/astrocyte/grower.py @@ -21,11 +21,11 @@ L = logging.getLogger(__name__) -def _number_of_trees(tree_type, orientations, distributions): +def _number_of_trees(tree_type, orientations, distributions, random_generator=np.random): """ Sample the number of trees depending on the tree type if no predef orientations """ if orientations is None: - n_trees = sample.n_neurites(distributions["num_trees"]) + n_trees = sample.n_neurites(distributions["num_trees"], random_generator) else: n_trees = len(orientations) @@ -75,11 +75,17 @@ class AstrocyteGrower(NeuronGrower): as a morphIO Morphology object. A set of input distributions that store the data consumed by the algorithms and the user-selected parameters are also stored. """ - def __init__(self, input_parameters, input_distributions, context, external_diametrizer=None): + + def __init__(self, input_parameters, input_distributions, + context, external_diametrizer=None, skip_validation=False, + rng_or_seed=np.random): super().__init__( input_parameters, input_distributions, context=SpaceColonizationContext(context), - external_diametrizer=external_diametrizer) + external_diametrizer=external_diametrizer, + skip_validation=skip_validation, + rng_or_seed=rng_or_seed, + ) def validate_params(self): """Astrocyte parameter validation""" @@ -103,7 +109,8 @@ def _add_active_neurite(self, initial_soma_point, parameters, distributions): initial_point=initial_soma_point, parameters=parameters, distributions=distributions, - context=self.context) + context=self.context, + random_generator=self._rng) self.active_neurites.append(obj) @@ -116,8 +123,8 @@ def _orientations_to_points(self, orientations, n_trees, distr): ''' if orientations is None: assert n_trees != 0, "Number of trees should be greater than zero" - trunk_angles = sample.trunk_angles(distr, n_trees) - trunk_z = sample.azimuth_angles(distr, n_trees) + trunk_angles = sample.trunk_angles(distr, n_trees, self._rng) + trunk_z = sample.azimuth_angles(distr, n_trees, self._rng) return self.soma.add_points_from_trunk_angles(trunk_angles, trunk_z) assert len(orientations) >= n_trees, "n_orientations < n_trees" @@ -169,7 +176,7 @@ def _create_process_trunks(self, tree_type, parameters, distributions): trunk_points = self._orientations_to_points( orientations, - _number_of_trees(tree_type, orientations, distributions), + _number_of_trees(tree_type, orientations, distributions, self._rng), distributions) for i, trunk_point in enumerate(trunk_points): diff --git a/tns/astrocyte/section.py b/tns/astrocyte/section.py index 4185a468..b6d68a86 100644 --- a/tns/astrocyte/section.py +++ b/tns/astrocyte/section.py @@ -64,7 +64,7 @@ class SectionSpatialGrower(SectionGrowerPath): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._influence_distance = self.context.influence_distance( - self.step_size_distribution.params['mean']) + self.step_size_distribution.loc) @property def point_cloud(self): diff --git a/tns/astrocyte/space_colonization.py b/tns/astrocyte/space_colonization.py index abee73da..cdd82b69 100644 --- a/tns/astrocyte/space_colonization.py +++ b/tns/astrocyte/space_colonization.py @@ -339,7 +339,7 @@ class SpaceColonization(TMDAlgo): - Seeds point cloud - Space colonization parameters (kill and influence distance) """ - def select_persistence(self, input_data): + def select_persistence(self, input_data, random_generator=np.random): """Selects randomly from the barcodes the max radial of which is greater or equal to the distance from the soma to the domain face. @@ -350,12 +350,14 @@ def select_persistence(self, input_data): Barcode: The topology barcode """ if 'distance_to_domain' not in self.params: - return super().select_persistence(input_data) + return super().select_persistence(input_data, random_generator) target_distance = self.params['distance_to_domain'] persistence = sample.ph(barcodes_greater_than_distance( - input_data['persistence_diagram'], target_distance)) + input_data['persistence_diagram'], target_distance), + random_generator, + ) if self.params['barcode_scaling']: persistence = scale_barcode(persistence, target_distance) @@ -384,7 +386,7 @@ class SpaceColonizationTarget(SpaceColonization): """ A target is specified fot this algorithm. The tree grows biased from the target and when it reaches it, it stops being influenced by the point. """ - def select_persistence(self, input_data): + def select_persistence(self, input_data, random_generator=np.random): """Selects randomly from the barcodes the max radial of which is greater or equal to the distance from the soma to the target. @@ -396,7 +398,9 @@ def select_persistence(self, input_data): target_distance = self.params['distance_soma_target'] persistence = sample.ph(barcodes_greater_than_distance( - input_data['persistence_diagram'], target_distance)) + input_data['persistence_diagram'], target_distance), + random_generator, + ) if self.params['barcode_scaling']: persistence = scale_barcode(persistence, target_distance) diff --git a/tns/astrocyte/tree.py b/tns/astrocyte/tree.py index f7e5f784..9c58a6ac 100644 --- a/tns/astrocyte/tree.py +++ b/tns/astrocyte/tree.py @@ -28,7 +28,8 @@ def _initialize_algorithm(self): growth_algo = grow_meth(input_data=self.distr, params=self.params, start_point=self.point, - context=self.context) + context=self.context, + random_generator=self._rng) stop, num_sec = growth_algo.initialize() @@ -62,7 +63,8 @@ def add_section(self, parent, direction, first_point, stop, stop_criteria=copy.deepcopy(stop), step_size_distribution=self.seg_length_distr, pathlength=pathlength, - context=self.context) + context=self.context, + random_generator=self._rng) self.active_sections.append(sec_grower) return sec_grower diff --git a/tns/generate/algorithms/basicgrower.py b/tns/generate/algorithms/basicgrower.py index e53c2d46..ef05e619 100644 --- a/tns/generate/algorithms/basicgrower.py +++ b/tns/generate/algorithms/basicgrower.py @@ -16,7 +16,8 @@ def __init__(self, input_data, params, start_point, - context=None): + context=None, + **_): """ input_data: saves all the data required for the growth params: parameters needed for growth, it should include the bif_method diff --git a/tns/generate/algorithms/tmdgrower.py b/tns/generate/algorithms/tmdgrower.py index 734d31ac..c67767b5 100644 --- a/tns/generate/algorithms/tmdgrower.py +++ b/tns/generate/algorithms/tmdgrower.py @@ -19,7 +19,7 @@ class TMDAlgo(AbstractAlgo): """TreeGrower of TMD basic growth""" - def __init__(self, input_data, params, start_point, context=None): + def __init__(self, input_data, params, start_point, context=None, random_generator=np.random, **_): """ TMD basic grower input_data: saves all the data required for the growth @@ -30,7 +30,7 @@ def __init__(self, input_data, params, start_point, context=None): super(TMDAlgo, self).__init__(input_data, params, start_point, context) self.bif_method = bif_methods[params["branching_method"]] self.params = copy.deepcopy(params) - self.ph_angles = self.select_persistence(input_data) + self.ph_angles = self.select_persistence(input_data, random_generator) # Consistency check between parameters - persistence diagram barSZ = np.min(get_lengths(self.ph_angles)) stepSZ = self.params['step_size']['norm']['mean'] @@ -42,12 +42,12 @@ def __init__(self, input_data, params, start_point, context=None): self.apical_point_distance_from_soma = 0.0 self.persistence_length = self.barcode.get_persistence_length() - def select_persistence(self, input_data): + def select_persistence(self, input_data, random_generator=np.random): """Samples one persistence diagram from a list of diagrams and modifies according to input parameters. """ list_of_persistences = input_data["persistence_diagram"] - persistence = sample.ph(list_of_persistences) + persistence = sample.ph(list_of_persistences, random_generator) if self.params.get('modify'): persistence = self.params['modify']['funct'](persistence, diff --git a/tns/generate/diametrizer.py b/tns/generate/diametrizer.py index a8afa028..e8bf0084 100644 --- a/tns/generate/diametrizer.py +++ b/tns/generate/diametrizer.py @@ -4,9 +4,9 @@ from tns.morphio_utils import section_filter, root_section_filter -def sample(data): +def sample(data, random_generator=np.random): """Returns a value according to the input data""" - return np.random.choice(data) + return random_generator.choice(data) def section_lengths(section): @@ -106,14 +106,20 @@ def sec_mean_taper(sec): max_diam=np.max(section.diameters)) -def diametrize_from_root(neuron, model_all, neurite_type=None): +def diametrize_from_root( + neuron, + neurite_type=None, + *, + model_all, + random_generator=np.random, +): # pylint: disable=too-many-locals '''Corrects the diameters of a morphio-neuron according to the model. Starts from the root and moves towards the tips. ''' for r in root_section_filter(neuron, tree_type=neurite_type): model = model_all[TYPE_TO_STR[r.type]] # Selected by the root type. - trunk_diam = sample(model['trunk']) + trunk_diam = sample(model['trunk'], random_generator) min_diam = np.min(model['term']) status = {s.id: False for s in r.iter()} @@ -123,10 +129,10 @@ def diametrize_from_root(neuron, model_all, neurite_type=None): for section in list(active): if section.is_root: - taper = sample(model['trunk_taper']) + taper = sample(model['trunk_taper'], random_generator) init_diam = trunk_diam else: - taper = sample(model['taper']) + taper = sample(model['taper'], random_generator) init_diam = section.diameters[0] taper_section_diam_from_root(section, init_diam, taper=taper, @@ -152,25 +158,29 @@ def diametrize_from_root(neuron, model_all, neurite_type=None): redefine_diameter_section(section, 0, section.parent.diameters[-1]) -def diametrize_from_tips(neuron, model_all, neurite_type=None): +def diametrize_from_tips(neuron, neurite_type=None, *, model_all, random_generator=np.random): '''Corrects the diameters of a morphio-neuron according to the model. Starts from the tips and moves towards the root. ''' for r in root_section_filter(neuron, tree_type=neurite_type): model = model_all[TYPE_TO_STR[r.type]] # Selected by the root type. - trunk_diam = sample(model['trunk']) + trunk_diam = sample(model['trunk'], random_generator) min_diam = np.min(model['term']) tips = [s for s in r.iter() if not s.children] status = {s.id: False for s in r.iter()} for tip in tips: - redefine_diameter_section(tip, len(tip.diameters) - 1, sample(model['term'])) + redefine_diameter_section(tip, len(tip.diameters) - 1, sample(model['term'], random_generator)) active = tips while active: for section in list(active): - taper = sample(model['trunk_taper']) if section.is_root else sample(model['taper']) + taper = sample( + model['trunk_taper'], random_generator + ) if section.is_root else sample( + model['taper'], random_generator + ) taper_section_diam_from_tips(section, section.diameters[-1], taper=taper, min_diam=min_diam, max_diam=trunk_diam) @@ -190,14 +200,14 @@ def diametrize_from_tips(neuron, model_all, neurite_type=None): redefine_diameter_section(section, 0, section.parent.diameters[-1]) -def diametrize_constant_per_section(neuron, neurite_type=None): +def diametrize_constant_per_section(neuron, neurite_type=None, **_): '''Corrects the diameters of a morphio-neuron to make them constant per section''' for sec in section_filter(neuron, neurite_type): mean_diam = np.mean(sec.diameters) sec.diameters = mean_diam * np.ones(len(sec.diameters)) -def diametrize_constant_per_neurite(neuron, neurite_type=None): +def diametrize_constant_per_neurite(neuron, neurite_type=None, **_): '''Corrects the diameters of a morphio-neuron to make them constant per neurite''' roots = root_section_filter(neuron, neurite_type) @@ -207,20 +217,23 @@ def diametrize_constant_per_neurite(neuron, neurite_type=None): sec.diameters = mean_diam * np.ones(len(sec.diameters)) -def diametrize_smoothing(neuron, neurite_type=None): +def diametrize_smoothing(neuron, neurite_type=None, **_): '''Corrects the diameters of a morphio-neuron, by smoothing them within each section''' for sec in section_filter(neuron, neurite_type): smooth_section_diam(sec) -def build(neuron, input_model=None, neurite_types=None, diam_method=None): +def build(neuron, + input_model=None, + neurite_types=None, + diam_method=None, + random_generator=np.random): '''Diametrize according to the selected method. if diam_method is a string matching the models below it will use an internal diametrizer. If it a function is provided, it will use the function to diametrize cells. This function should have the following arguments: neuron, diameter model, type of neurite (str), and only update the neuron object''' - if neurite_types is None: neurite_types = ['apical', 'basal'] @@ -231,15 +244,12 @@ def build(neuron, input_model=None, neurite_types=None, diam_method=None): 'M4': diametrize_from_root, 'M5': diametrize_from_tips} - for tree_type in neurite_types: - if diam_method in ['M1', 'M2', 'M3']: - methods[diam_method](neuron, STR_TO_TYPES[tree_type]) - else: - methods[diam_method](neuron, input_model, STR_TO_TYPES[tree_type]) - - elif hasattr(diam_method, '__call__'): - for tree_type in neurite_types: - diam_method(neuron, input_model, tree_type) + diam_method = methods[diam_method] - else: + elif not hasattr(diam_method, '__call__'): raise ValueError('Diameter method not understood, we got {}'.format(diam_method)) + + for tree_type in neurite_types: + if isinstance(tree_type, str): + tree_type = STR_TO_TYPES.get(tree_type) + diam_method(neuron, tree_type, model_all=input_model, random_generator=random_generator) diff --git a/tns/generate/grower.py b/tns/generate/grower.py index 80f8bddb..67f3943c 100644 --- a/tns/generate/grower.py +++ b/tns/generate/grower.py @@ -5,12 +5,13 @@ import logging import numpy as np +from numpy.random import BitGenerator, Generator, RandomState, SeedSequence from morphio.mut import Morphology # pylint: disable=import-error +from tns.generate import diametrizer from tns.generate.soma import SomaGrower -from tns.morphmath import sample from tns.generate.tree import TreeGrower -from tns.generate import diametrizer +from tns.morphmath import sample from tns.validator import validate_neuron_params, validate_neuron_distribs L = logging.getLogger(__name__) @@ -26,16 +27,31 @@ class NeuronGrower: """ def __init__(self, input_parameters, input_distributions, - context=None, external_diametrizer=None, skip_validation=False): + context=None, external_diametrizer=None, skip_validation=False, + rng_or_seed=np.random): """TNS NeuronGrower input_parameters: the user-defined parameters input_distributions: distributions extracted from biological data context: an object containing contextual information external_diametrizer: diametrizer function for external diametrizer module skip_validation: if set to False, the parameters and distributions are validated + rng_or_seed: should be a `numpy.random.Generator` or an object that can be used as a seed + for the `numpy.random.default_rng()` function. """ self.neuron = Morphology() self.context = context + if rng_or_seed is None or isinstance( + rng_or_seed, + (int, np.integer, SeedSequence, BitGenerator) + ): + self._rng = np.random.default_rng(rng_or_seed) + elif isinstance(rng_or_seed, (RandomState, Generator)) or rng_or_seed is np.random: + self._rng = rng_or_seed + else: + raise TypeError( + "The 'rng_or_seed' argument must be None, np.random or an instance of one of the " + "following types: [int, SeedSequence, BitGenerator, RandomState, Generator]." + ) self.input_parameters = copy.deepcopy(input_parameters) L.debug('Input Parameters: %s', input_parameters) @@ -69,8 +85,8 @@ def __init__(self, input_parameters, input_distributions, # and initial points on the soma surface will be initialized. self.active_neurites = list() self.soma = SomaGrower(initial_point=self.input_parameters["origin"], - radius=sample.soma_size(self.input_distributions), - context=context) + radius=sample.soma_size(self.input_distributions, self._rng), + context=context, random_generator=self._rng) # Create a list to expose apical sections for each apical tree in the neuron, # the user can call NeuronGrower.apical_sections to get section IDs whose the last # point is the apical point of each generated apical tree. @@ -141,8 +157,8 @@ def _diametrize(): if neurite_types is None: neurite_types = self.input_parameters['grow_types'] diametrizer.build(self.neuron, self.input_distributions['diameter'], - neurite_types=neurite_types, - diam_method=diam_method) + neurite_types=neurite_types, diam_method=diam_method, + random_generator=self._rng) self._diametrize = _diametrize def _convert_orientation2points(self, orientation, n_trees, distr, params): @@ -159,8 +175,8 @@ def _convert_orientation2points(self, orientation, n_trees, distr, params): if params.get('trunk_absolute_orientation', False): if len(orientation) == 1: # Pick random absolute angles - trunk_absolute_angles = sample.trunk_absolute_angles(distr, n_trees) - z_angles = sample.azimuth_angles(distr, n_trees) + trunk_absolute_angles = sample.trunk_absolute_angles(distr, n_trees, self._rng) + z_angles = sample.azimuth_angles(distr, n_trees, self._rng) pts = self.soma.add_points_from_trunk_absolute_orientation( orientation, trunk_absolute_angles, z_angles) else: @@ -172,8 +188,8 @@ def _convert_orientation2points(self, orientation, n_trees, distr, params): else: raise ValueError('Not enough orientation points!') elif orientation is None: # Samples from trunk_angles - trunk_angles = sample.trunk_angles(distr, n_trees) - trunk_z = sample.azimuth_angles(distr, n_trees) + trunk_angles = sample.trunk_angles(distr, n_trees, self._rng) + trunk_z = sample.azimuth_angles(distr, n_trees, self._rng) pts = self.soma.add_points_from_trunk_angles(trunk_angles, trunk_z) elif orientation == 'from_space': raise ValueError('Not implemented yet!') @@ -195,7 +211,7 @@ def _grow_trunks(self): distr = self.input_distributions[type_of_tree] # Sample the number of trees depending on the tree type - n_trees = sample.n_neurites(distr["num_trees"]) + n_trees = sample.n_neurites(distr["num_trees"], self._rng) if type_of_tree == 'basal' and n_trees < 2: raise Exception('There should be at least 2 basal dendrites (got {})'.format( n_trees)) @@ -213,7 +229,8 @@ def _grow_trunks(self): initial_point=p, parameters=params, distributions=distr, - context=self.context) + context=self.context, + random_generator=self._rng) self.active_neurites.append(obj) def _grow_soma(self, soma_type='contour'): diff --git a/tns/generate/section.py b/tns/generate/section.py index 718894a8..041fb9d0 100644 --- a/tns/generate/section.py +++ b/tns/generate/section.py @@ -19,7 +19,8 @@ class SectionGrower: ''' # pylint: disable-msg=too-many-arguments def __init__(self, parent, children, first_point, direction, parameters, - process, stop_criteria, step_size_distribution, pathlength, context=None): + process, stop_criteria, step_size_distribution, pathlength, + context=None, random_generator=np.random): '''A section is a list of points in 4D space (x, y, x, r) that are sequentially connected to each other. This process generates a tubular morphology that resembles a random walk. @@ -37,6 +38,7 @@ def __init__(self, parent, children, first_point, direction, parameters, self.process = process self.latest_directions = deque(maxlen=MEMORY) self.context = context + self._rng = random_generator self.step_size_distribution = step_size_distribution self.pathlength = 0 if parent is None else pathlength @@ -53,9 +55,11 @@ def next_point(self, current_point): """Returns the next point depending on the growth method and the previous point. """ - direction = self.params.targeting * self.direction + \ - self.params.randomness * get_random_point() + \ + direction = ( + self.params.targeting * self.direction + + self.params.randomness * get_random_point(random_generator=self._rng) + self.params.history * self.history() + ) direction = direction / vectorial_norm(direction) seg_length = self.step_size_distribution.draw_positive() @@ -144,7 +148,7 @@ def _check(self, value, which): # no need to exponentiate, the comparison below automatically resolves to `True` return True # Check if close enough to exp( distance * lamda) - return np.random.random() < np.exp(-x * lamda) + return self._rng.random() < np.exp(-x * lamda) def check_stop(self): '''Probabilities of bifurcating and stopping are proportional diff --git a/tns/generate/soma.py b/tns/generate/soma.py index f47dfb5a..e3b9c8a2 100644 --- a/tns/generate/soma.py +++ b/tns/generate/soma.py @@ -17,7 +17,7 @@ class SomaGrower: """Soma class""" - def __init__(self, initial_point, radius=1.0, context=None): + def __init__(self, initial_point, radius=1.0, context=None, random_generator=np.random): """TNS Soma Object Parameters: @@ -28,6 +28,7 @@ def __init__(self, initial_point, radius=1.0, context=None): self.radius = float(radius) self._center = np.asarray(initial_point, dtype=np.float) self.context = context # for future, hypothetical use + self._rng = random_generator @property def points(self): @@ -163,7 +164,7 @@ def interpolate(self, points, interpolation=10): points_to_interpolate = points else: # Adds points from circle circumference to the soma points. - angles = 2. * np.pi * np.random.rand(interpolation - len(points)) + angles = 2. * np.pi * self._rng.random(interpolation - len(points)) x = self.radius * np.sin(angles) + self.center[0] y = self.radius * np.cos(angles) + self.center[1] z = np.full_like(angles, self.center[2]) diff --git a/tns/generate/tree.py b/tns/generate/tree.py index a047c765..0d2520c1 100644 --- a/tns/generate/tree.py +++ b/tns/generate/tree.py @@ -76,7 +76,8 @@ def __init__(self, initial_point, parameters, distributions, - context=None): + context=None, + random_generator=np.random): """TNS Tree Object Parameters: @@ -95,10 +96,11 @@ def __init__(self, self.distr = distributions self.active_sections = list() self.context = context + self._rng = random_generator # Creates the distribution from which the segment lengths # To sample a new seg_len call self.seg_len.draw() - self.seg_length_distr = sample.Distr(self.params["step_size"]) + self.seg_length_distr = sample.Distr(self.params["step_size"], random_generator=self._rng) self._section_parameters = _create_section_parameters(parameters) self.growth_algo = self._initialize_algorithm() @@ -110,7 +112,8 @@ def _initialize_algorithm(self): growth_algo = grow_meth(input_data=self.distr, params=self.params, start_point=self.point, - context=self.context) + context=self.context, + random_generator=self._rng) stop, num_sec = growth_algo.initialize() @@ -143,7 +146,8 @@ def add_section(self, parent, direction, first_point, stop, pathlength, stop_criteria=copy.deepcopy(stop), step_size_distribution=self.seg_length_distr, pathlength=pathlength, - context=self.context) + context=self.context, + random_generator=self._rng) self.active_sections.append(sec_grower) return sec_grower diff --git a/tns/morphmath/sample.py b/tns/morphmath/sample.py index e3eb06a2..cf175d20 100644 --- a/tns/morphmath/sample.py +++ b/tns/morphmath/sample.py @@ -2,19 +2,19 @@ import numpy as np -from scipy import stats - class Distr: '''Class of custom distributions ''' - def __init__(self, params): + def __init__(self, params, random_generator=np.random): """Object of statistical distribution """ - self.type = next(iter(params.keys())) - self.params = params[self.type] - self.set_distribution() + self.type, distr_params = next(iter(params.items())) + self._rng = random_generator + self.loc = 0.0 + self.scale = 1.0 + self.set_distribution(distr_params) @staticmethod def norm(params): @@ -22,8 +22,7 @@ def norm(params): as expected from scipy from mean, std data """ - return {"loc": params["mean"], - "scale": params["std"]} + return "standard_normal", params["mean"], params["std"] @staticmethod def uniform(params): @@ -31,8 +30,7 @@ def uniform(params): as expected from scipy from min, max of a uniform """ - return {'loc': params['min'], - 'scale': params['max'] - params['min']} + return "uniform", params['min'], params['max'] - params['min'] @staticmethod def expon(params): @@ -40,41 +38,49 @@ def expon(params): as expected from scipy from mean, std data """ - return {"loc": params["loc"], - "scale": 1. / params["lambda"]} + return "standard_exponential", params["loc"], 1. / params["lambda"] - def set_distribution(self): + def set_distribution(self, params): """Returns a statistical distribution according to input parameters """ # If distribution is a statistical distribution if self.type != "data": - fit = getattr(self, self.type)(self.params) - self.distribution = getattr(stats, self.type)(**fit) + name, self.loc, self.scale = getattr(self, self.type)(params) + self.distribution = getattr(self._rng, name) # If distribution consists of data we reformat input else: - w = np.array(self.params["weights"], dtype=np.float) - b = np.array(self.params["bins"]) + w = np.array(params["weights"], dtype=np.float) + b = np.array(params["bins"]) self.distribution = {"bins": b, "weights": w / np.sum(w)} def draw(self): """Returns a sampled number""" if self.type == "data": - return np.random.choice(self.distribution["bins"], - p=self.distribution["weights"]) + return self._rng.choice(self.distribution["bins"], + p=self.distribution["weights"]) - return self.distribution.rvs() + return self.loc + self.scale * self.distribution() def draw_positive(self): """Returns a positive sampled number""" if self.type == "data": positives = np.where(self.distribution["bins"] > 0) - return np.random.choice(self.distribution["bins"][positives], - p=self.distribution["weights"][positives]) - - val = self.distribution.rvs() + return self._rng.choice(self.distribution["bins"][positives], + p=self.distribution["weights"][positives]) + + if self.scale == 0: + if self.loc >= 0: + return self.loc + else: + raise ValueError( + "The 'loc' of the distribution must be >= 0 when 'scale' == 0 (loc == " + f"{self.scale})" + ) + + val = self.loc + self.scale * self.distribution() while val <= 0: - val = self.distribution.rvs() + val = self.loc + self.scale * self.distribution() return val @@ -91,38 +97,38 @@ def d_transform(distr, funct): return transf -def soma_size(distrib): +def soma_size(distrib, random_generator=np.random): """Returns a random soma radius as sampled from a distribution plus some constraints. """ - soma_d = Distr(distrib['soma']['size']) + soma_d = Distr(distrib['soma']['size'], random_generator) return soma_d.draw_positive() -def n_neurites(distrib): +def n_neurites(distrib, random_generator=np.random): """Returns a number of neurites as sampled from a distribution plus some constraints. It ensures the number will be an INT. """ - neurites_d = Distr(distrib) + neurites_d = Distr(distrib, random_generator) numtrees = int(neurites_d.draw()) return numtrees -def trunk_angles(distrib, N): +def trunk_angles(distrib, N, random_generator=np.random): """Returns a sequence of relative angles, depending on the number of trunks "N" and the input distribution. """ - trunks_d = Distr(distrib['trunk']['orientation_deviation']) + trunks_d = Distr(distrib['trunk']['orientation_deviation'], random_generator) angles = [trunks_d.draw() for _ in range(N - 1)] angles = angles + [sum(angles)] return angles -def trunk_absolute_angles(distrib, N): +def trunk_absolute_angles(distrib, N, random_generator=np.random): """Returns a sequence of absolute angles, depending on the number of trunks "N" and the input distribution. @@ -133,23 +139,23 @@ def trunk_absolute_angles(distrib, N): "No elevation distribution found in parameters['trunk']" "['absolute_elevation_deviation']." ) - trunks_d_theta = Distr(elevation) + trunks_d_theta = Distr(elevation, random_generator) return [trunks_d_theta.draw() for _ in range(N)] -def azimuth_angles(distrib, N): +def azimuth_angles(distrib, N, random_generator=np.random): """Returns a sequence of angles, depending on the number of trunks "N" and the input distribution. """ - trunks_d = Distr(d_transform(distrib['trunk']['azimuth'], np.cos)) + trunks_d = Distr(d_transform(distrib['trunk']['azimuth'], np.cos), random_generator) angles = [np.arccos(trunks_d.draw()) for _ in range(N)] return angles -def ph(phs): +def ph(phs, random_generator=np.random): """Samples randomly a persistence diagram from the input distribution. """ - index = np.random.choice(len(phs)) + index = random_generator.choice(len(phs)) return phs[index] diff --git a/tns/morphmath/utils.py b/tns/morphmath/utils.py index 956cbc1e..1e19e2b6 100644 --- a/tns/morphmath/utils.py +++ b/tns/morphmath/utils.py @@ -4,15 +4,15 @@ from numpy import sqrt -def get_random_point(D=1.0): +def get_random_point(D=1.0, random_generator=np.random): ''' Get 3-d coordinates of a new random point. The distance between the produced point and (0,0,0) is given by the value D. ''' # pylint: disable=assignment-from-no-return - phi = np.random.uniform(0., 2. * np.pi) - theta = np.arccos(np.random.uniform(-1.0, 1.0)) + phi = random_generator.uniform(0., 2. * np.pi) + theta = np.arccos(random_generator.uniform(-1.0, 1.0)) sn_theta = np.sin(theta)