Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

adding tests. simplifying getfbounds #1

Merged
merged 4 commits into from
Mar 8, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 14 additions & 8 deletions tedana/selection/selection_nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -714,10 +714,14 @@ def calc_kappa_elbow(
This also means the kappa elbow should be calculated before those two other functions
are called
"""
if "echo_dof" in selector.cross_component_metrics_.keys():
if (
"echo_dof" in selector.cross_component_metrics_.keys()
and selector.cross_component_metrics_["echo_dof"]
):
echo_dof = selector.cross_component_metrics_["echo_dof"]
else:
echo_dof = None
# DOF is number of echoes if not otherwise specified
echo_dof = selector.cross_component_metrics_["n_echos"]
outputs = {
"decision_node_idx": selector.current_node_idx_,
"node_label": None,
Expand Down Expand Up @@ -782,8 +786,7 @@ def calc_kappa_elbow(
outputs["varex_upper_p"],
) = kappa_elbow_kundu(
selector.component_table_,
selector.cross_component_metrics_["n_echos"],
echo_dof=echo_dof,
echo_dof,
comps2use=comps2use,
)
selector.cross_component_metrics_["kappa_elbow_kundu"] = outputs["kappa_elbow_kundu"]
Expand Down Expand Up @@ -852,10 +855,14 @@ def calc_rho_elbow(
f"It is {rho_elbow_type} "
)

if "echo_dof" in selector.cross_component_metrics_.keys():
if (
"echo_dof" in selector.cross_component_metrics_.keys()
and selector.cross_component_metrics_["echo_dof"]
):
echo_dof = selector.cross_component_metrics_["echo_dof"]
else:
echo_dof = None
# DOF is number of echoes if not otherwise specified
echo_dof = selector.cross_component_metrics_["n_echos"]

outputs = {
"decision_node_idx": selector.current_node_idx_,
Expand Down Expand Up @@ -916,8 +923,7 @@ def calc_rho_elbow(
outputs["elbow_f05"],
) = rho_elbow_kundu_liberal(
selector.component_table_,
selector.cross_component_metrics_["n_echos"],
echo_dof=echo_dof,
echo_dof,
rho_elbow_type=rho_elbow_type,
comps2use=comps2use,
subset_comps2use=subset_comps2use,
Expand Down
31 changes: 10 additions & 21 deletions tedana/selection/selection_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -580,7 +580,7 @@ def getelbow(arr, return_val=False):
return k_min_ind


def kappa_elbow_kundu(component_table, n_echos, echo_dof=None, comps2use=None):
def kappa_elbow_kundu(component_table, echo_dof, comps2use=None):
"""
Calculate an elbow for kappa.

Expand All @@ -592,12 +592,10 @@ def kappa_elbow_kundu(component_table, n_echos, echo_dof=None, comps2use=None):
Component metric table. One row for each component, with a column for
each metric. The index should be the component number.
Only the 'kappa' column is used in this function
n_echos : :obj:`int`
The number of echos in the multi-echo data
echo_dof : :obj:`int`, optional
echo_dof : :obj:`int`
Degree of freedom to use in goodness of fit metrics (fstat).
Primarily used for EPTI acquisitions.
If None, number of echoes will be used. Default is None.
Typically the number of echos in the multi-echo data
May be a lower value for EPTI acquisitions.
comps2use : :obj:`list[int]`
A list of component indices used to calculate the elbow
default=None which means use all components
Expand Down Expand Up @@ -637,10 +635,7 @@ def kappa_elbow_kundu(component_table, n_echos, echo_dof=None, comps2use=None):
kappas2use = component_table.loc[comps2use, "kappa"].to_numpy()

# low kappa threshold
if echo_dof is None:
_, _, f01 = getfbounds(n_echos)
else:
_, _, f01 = getfbounds(echo_dof)
_, _, f01 = getfbounds(echo_dof)
# get kappa values for components below a significance threshold
kappas_nonsig = kappas2use[kappas2use < f01]

Expand Down Expand Up @@ -678,8 +673,7 @@ def kappa_elbow_kundu(component_table, n_echos, echo_dof=None, comps2use=None):

def rho_elbow_kundu_liberal(
component_table,
n_echos,
echo_dof=None,
echo_dof,
rho_elbow_type="kundu",
comps2use=None,
subset_comps2use=-1,
Expand All @@ -696,12 +690,10 @@ def rho_elbow_kundu_liberal(
Component metric table. One row for each component, with a column for
each metric. The index should be the component number.
Only the 'kappa' column is used in this function
n_echos : :obj:`int`
The number of echos in the multi-echo data
echo_dof : :obj:`int`, optional
echo_dof : :obj:`int`
Degree of freedom to use in goodness of fit metrics (fstat).
Primarily used for EPTI acquisitions.
If None, number of echoes will be used. Default is None.
Typically the number of echos in the multi-echo data
May be a lower value for EPTI acquisitions.
rho_elbow_type : :obj:`str`
The algorithm used to calculate the rho elbow. Current options are
'kundu' and 'liberal'.
Expand Down Expand Up @@ -769,10 +761,7 @@ def rho_elbow_kundu_liberal(
].tolist()

# One rho elbow threshold set just on the number of echoes
if echo_dof is None:
elbow_f05, _, _ = getfbounds(n_echos)
else:
elbow_f05, _, _ = getfbounds(echo_dof)
elbow_f05, _, _ = getfbounds(echo_dof)
# One rho elbow threshold set using all componets in comps2use
rhos_comps2use = component_table.loc[comps2use, "rho"].to_numpy()
rho_allcomps_elbow = getelbow(rhos_comps2use, return_val=True)
Expand Down
14 changes: 8 additions & 6 deletions tedana/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,24 +11,26 @@
RepLGR = logging.getLogger("REPORT")


def getfbounds(n_echos):
def getfbounds(echo_dof):
"""
Get F-statistic boundaries based on number of echos.

Parameters
----------
n_echos : :obj:`int`
Number of echoes
echo_dof : :obj:`int`
Degree of freedom to use in goodness of fit metrics (fstat).
Typically the number of echos in the multi-echo data
May be a lower value for EPTI acquisitions.

Returns
-------
fmin, fmid, fmax : :obj:`float`
F-statistic thresholds for alphas of 0.05, 0.025, and 0.01,
respectively.
"""
f05 = stats.f.ppf(q=(1 - 0.05), dfn=1, dfd=(n_echos - 1))
f025 = stats.f.ppf(q=(1 - 0.025), dfn=1, dfd=(n_echos - 1))
f01 = stats.f.ppf(q=(1 - 0.01), dfn=1, dfd=(n_echos - 1))
f05 = stats.f.ppf(q=(1 - 0.05), dfn=1, dfd=(echo_dof - 1))
f025 = stats.f.ppf(q=(1 - 0.025), dfn=1, dfd=(echo_dof - 1))
f01 = stats.f.ppf(q=(1 - 0.01), dfn=1, dfd=(echo_dof - 1))
return f05, f025, f01


Expand Down
3 changes: 3 additions & 0 deletions tedana/tests/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,11 +126,13 @@ def test_integration_five_echo(skip_integration):
suffix = ".sm.nii.gz"
datalist = [prepend + str(i + 1) + suffix for i in range(5)]
echo_times = [15.4, 29.7, 44.0, 58.3, 72.6]
# also adding echo_dof=4 to make sure all workflow code using echo_dof is executed
tedana_cli.tedana_workflow(
data=datalist,
tes=echo_times,
ica_method="robustica",
n_robust_runs=4,
echo_dof=4,
out_dir=out_dir,
tedpca=0.95,
fittype="curvefit",
Expand Down Expand Up @@ -631,6 +633,7 @@ def test_integration_t2smap(skip_integration):
+ [str(te) for te in echo_times]
+ ["--out-dir", out_dir, "--fittype", "curvefit"]
+ ["--masktype", "dropout", "decay"]
+ ["--n-independent-echos", "4"]
)
t2smap_cli._main(args)

Expand Down
18 changes: 18 additions & 0 deletions tedana/tests/test_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,15 +207,33 @@ def test_smoke_calculate_f_maps():
mixing = np.random.random((n_volumes, n_components))
adaptive_mask = np.random.randint(1, n_echos + 1, size=n_voxels)
tes = np.array([15, 25, 35, 45, 55])
f_t2_maps_orig, f_s0_maps_orig, _, _ = dependence.calculate_f_maps(
data_cat=data_cat,
z_maps=z_maps,
mixing=mixing,
adaptive_mask=adaptive_mask,
tes=tes,
f_max=500,
)
assert f_t2_maps_orig.shape == f_s0_maps_orig.shape == (n_voxels, n_components)

# rerunning with echo_dof=3
f_t2_maps, f_s0_maps, _, _ = dependence.calculate_f_maps(
data_cat=data_cat,
z_maps=z_maps,
mixing=mixing,
adaptive_mask=adaptive_mask,
tes=tes,
echo_dof=3,
f_max=500,
)
assert f_t2_maps.shape == f_s0_maps.shape == (n_voxels, n_components)
# When echo_dof < the number of echoes, then f_maps_orig should have the same or larger values
assert np.min(f_t2_maps_orig - f_t2_maps) == 0
assert np.min(f_s0_maps_orig - f_s0_maps) == 0
# When echo_dof==3 and there are 5 good echoes, then f_maps_orig should always be larger than 0
assert np.min(f_t2_maps_orig[adaptive_mask == 5] - f_t2_maps[adaptive_mask == 5]) > 0
assert np.min(f_s0_maps_orig[adaptive_mask == 5] - f_s0_maps[adaptive_mask == 5]) > 0


def test_smoke_calculate_varex():
Expand Down
16 changes: 8 additions & 8 deletions tedana/tests/test_selection_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,7 +388,7 @@ def test_kappa_elbow_kundu_smoke():
kappa_allcomps_elbow,
kappa_nonsig_elbow,
varex_upper_p,
) = selection_utils.kappa_elbow_kundu(component_table, n_echos=5)
) = selection_utils.kappa_elbow_kundu(component_table, echo_dof=5)
assert isinstance(kappa_elbow_kundu, float)
assert isinstance(kappa_allcomps_elbow, float)
assert isinstance(kappa_nonsig_elbow, float)
Expand All @@ -401,7 +401,7 @@ def test_kappa_elbow_kundu_smoke():
kappa_allcomps_elbow,
kappa_nonsig_elbow,
varex_upper_p,
) = selection_utils.kappa_elbow_kundu(component_table, n_echos=6)
) = selection_utils.kappa_elbow_kundu(component_table, echo_dof=6)
assert isinstance(kappa_elbow_kundu, float)
assert isinstance(kappa_allcomps_elbow, float)
assert isinstance(kappa_nonsig_elbow, type(None))
Expand All @@ -415,7 +415,7 @@ def test_kappa_elbow_kundu_smoke():
varex_upper_p,
) = selection_utils.kappa_elbow_kundu(
component_table,
n_echos=5,
echo_dof=5,
comps2use=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 14, 15, 17, 18, 20],
)
assert isinstance(kappa_elbow_kundu, float)
Expand All @@ -434,7 +434,7 @@ def test_rho_elbow_kundu_liberal_smoke():
rho_allcomps_elbow,
rho_unclassified_elbow,
elbow_f05,
) = selection_utils.rho_elbow_kundu_liberal(component_table, n_echos=3)
) = selection_utils.rho_elbow_kundu_liberal(component_table, echo_dof=3)
assert isinstance(rho_elbow_kundu, float)
assert isinstance(rho_allcomps_elbow, float)
assert isinstance(rho_unclassified_elbow, float)
Expand All @@ -447,7 +447,7 @@ def test_rho_elbow_kundu_liberal_smoke():
rho_unclassified_elbow,
elbow_f05,
) = selection_utils.rho_elbow_kundu_liberal(
component_table, n_echos=3, rho_elbow_type="liberal"
component_table, echo_dof=3, rho_elbow_type="liberal"
)
assert isinstance(rho_elbow_kundu, float)
assert isinstance(rho_allcomps_elbow, float)
Expand All @@ -462,7 +462,7 @@ def test_rho_elbow_kundu_liberal_smoke():
elbow_f05,
) = selection_utils.rho_elbow_kundu_liberal(
component_table,
n_echos=3,
echo_dof=3,
rho_elbow_type="kundu",
comps2use=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 14, 15, 17, 18, 20],
subset_comps2use=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 14, 18, 20],
Expand All @@ -479,15 +479,15 @@ def test_rho_elbow_kundu_liberal_smoke():
rho_allcomps_elbow,
rho_unclassified_elbow,
elbow_f05,
) = selection_utils.rho_elbow_kundu_liberal(component_table, n_echos=3)
) = selection_utils.rho_elbow_kundu_liberal(component_table, echo_dof=3)
assert isinstance(rho_elbow_kundu, float)
assert isinstance(rho_allcomps_elbow, float)
assert isinstance(rho_unclassified_elbow, type(None))
assert isinstance(elbow_f05, float)

with pytest.raises(ValueError):
selection_utils.rho_elbow_kundu_liberal(
component_table, n_echos=3, rho_elbow_type="perfect"
component_table, echo_dof=3, rho_elbow_type="perfect"
)


Expand Down
50 changes: 49 additions & 1 deletion tedana/tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def test_reshape_niimg():
assert utils.reshape_niimg(fimg.get_fdata()).shape == exp_shape


def test_make_adaptive_mask():
def test_make_adaptive_mask(caplog):
"""Test tedana.utils.make_adaptive_mask with different methods."""
# load data make masks
mask_file = pjoin(datadir, "mask.nii.gz")
Expand Down Expand Up @@ -102,6 +102,16 @@ def test_make_adaptive_mask():
# Decay: good good good (3)
data[idx + 5, :, :] = np.array([1, 0.9, -1])[:, None]

# Simulating 5 echo data to test the echo_dof parameter
data5 = np.concat(
(
data,
0.95 * np.expand_dims(data[:, 2, :], axis=1),
0.9 * np.expand_dims(data[:, 2, :], axis=1),
),
axis=1,
)

# Just dropout method
mask, adaptive_mask = utils.make_adaptive_mask(
data,
Expand All @@ -122,6 +132,7 @@ def test_make_adaptive_mask():
vals, counts = np.unique(adaptive_mask, return_counts=True)
assert np.allclose(vals, np.array([0, 1, 2, 3]))
assert np.allclose(counts, np.array([14976, 1817, 4427, 43130]))
assert "voxels in user-defined mask do not have good signal" in caplog.text

# Just decay method
mask, adaptive_mask = utils.make_adaptive_mask(
Expand Down Expand Up @@ -206,6 +217,43 @@ def test_make_adaptive_mask():
vals, counts = np.unique(adaptive_mask, return_counts=True)
assert np.allclose(vals, np.array([0, 1, 2, 3]))
assert np.allclose(counts, np.array([3365, 1412, 1195, 58378]))
assert "No methods provided for adaptive mask generation." in caplog.text

# testing echo_dof
# This should match "decay" from above, except all voxels with 3 good echoes should now have 5
# since two echoes were added that should not have caused more decay
mask, adaptive_mask = utils.make_adaptive_mask(
data5, mask=mask_file, threshold=1, methods=["decay"], echo_dof=3
)

assert mask.shape == adaptive_mask.shape == (64350,)
assert np.allclose(mask, (adaptive_mask >= 1).astype(bool))
assert adaptive_mask[idx] == 5
assert adaptive_mask[idx + 1] == 2
assert adaptive_mask[idx + 2] == 2
assert adaptive_mask[idx + 3] == 1
assert adaptive_mask[idx + 4] == 5
assert adaptive_mask[idx + 5] == 2
assert mask.sum() == 60985 # This method can't flag first echo as bad
vals, counts = np.unique(adaptive_mask, return_counts=True)
assert np.allclose(vals, np.array([0, 1, 2, 5]))
assert np.allclose(counts, np.array([3365, 4366, 5973, 50646]))
# 4366 + 5973 = 10399 (i.e. voxels with 1 or 2 good echoes are flagged here)
assert (
"10339 voxels (17.0%) have fewer than 3.0 good voxels. "
"These voxels will be used in all analyses, "
"but might not include 3 independant echo measurements."
) in caplog.text

mask, adaptive_mask = utils.make_adaptive_mask(
data5, mask=mask_file, threshold=1, methods=["decay"], echo_dof=4
)

assert (
"10339 voxels (17.0%) have fewer than 3.0 good voxels. "
"The degrees of freedom for fits across echoes will remain 4 even if "
"there might be fewer independant echo measurements."
) in caplog.text


# SMOKE TESTS
Expand Down
Loading