Skip to content

Commit

Permalink
FIX: Paths and pyarrow (#834)
Browse files Browse the repository at this point in the history
  • Loading branch information
larsoner authored Jan 24, 2024
1 parent 9055e9c commit 860c5c4
Show file tree
Hide file tree
Showing 11 changed files with 36 additions and 18 deletions.
10 changes: 2 additions & 8 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,11 @@ files: ^(.*\.(py|yaml))$
# for example
exclude: ^(\.[^/]*cache/.*|.*/freesurfer/contrib/.*)$
repos:
- repo: https://github.com/psf/black
rev: 23.12.1
hooks:
- id: black
args:
- --safe
- --quiet
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.13
rev: v0.1.14
hooks:
- id: ruff
- id: ruff-format
- repo: https://github.com/codespell-project/codespell
rev: v2.2.6
hooks:
Expand Down
5 changes: 3 additions & 2 deletions docs/source/v1.6.md.inc
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,11 @@

- MNE-BIDS-Pipeline now requires Python 3.9 or newer. (#825 by @hoechenberger)

[//]: # (### :bug: Bug fixes)
### :bug: Bug fixes

[//]: # (- Whatever (#000 by @whoever))
- Fix minor issues with path handling for cross-talk and calibration files (#834 by @larsoner)

### :medical_symbol: Code health

- The package build backend has been switched from `setuptools` to `hatchling`. (#825 by @hoechenberger)
- Code formatting now uses `ruff format` instead of `black` (#834 by @larsoner)
10 changes: 7 additions & 3 deletions mne_bids_pipeline/_config_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,15 +353,19 @@ def get_mf_cal_fname(
*, config: SimpleNamespace, subject: str, session: str
) -> pathlib.Path:
if config.mf_cal_fname is None:
mf_cal_fpath = BIDSPath(
bids_path = BIDSPath(
subject=subject,
session=session,
suffix="meg",
datatype="meg",
root=config.bids_root,
).meg_calibration_fpath
).match()[0]
mf_cal_fpath = bids_path.meg_calibration_fpath
if mf_cal_fpath is None:
raise ValueError("Could not find Maxwell Filter Calibration file.")
raise ValueError(
"Could not determine Maxwell Filter Calibration file from BIDS "
f"definition for file {bids_path}."
)
else:
mf_cal_fpath = pathlib.Path(config.mf_cal_fname).expanduser().absolute()
if not mf_cal_fpath.exists():
Expand Down
4 changes: 2 additions & 2 deletions mne_bids_pipeline/_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def main():
metavar="FILE",
help="Create a template configuration file with the specified name. "
"If specified, all other parameters will be ignored.",
),
)
parser.add_argument(
"--steps",
dest="steps",
Expand Down Expand Up @@ -70,7 +70,7 @@ def main():
If unspecified, this will be derivatives/mne-bids-pipeline
inside the BIDS root."""
),
),
)
parser.add_argument(
"--subject", dest="subject", default=None, help="The subject to process."
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ def filter_data(
raw = import_er_data(
cfg=cfg,
bids_path_er_in=bids_path_in,
bids_path_ref_in=in_files.pop("raw_ref_run"),
bids_path_ref_in=in_files.pop("raw_ref_run", None),
bids_path_er_bads_in=bids_path_bads_in,
# take bads from this run (0)
bids_path_ref_bads_in=in_files.pop("raw_ref_run-bads", None),
Expand All @@ -196,6 +196,7 @@ def filter_data(
split=None,
task=task,
run=run,
check=False,
)

raw.load_data()
Expand Down Expand Up @@ -232,6 +233,9 @@ def filter_data(
run_type=run_type,
)

# For example, might need to create
# derivatives/mne-bids-pipeline/sub-emptyroom/ses-20230412/meg
out_files[in_key].fpath.parent.mkdir(exist_ok=True, parents=True)
raw.save(
out_files[in_key],
overwrite=True,
Expand Down
3 changes: 2 additions & 1 deletion mne_bids_pipeline/steps/sensor/_99_group_average.py
Original file line number Diff line number Diff line change
Expand Up @@ -814,7 +814,8 @@ def average_csp_decoding(
import scipy.stats

cluster_forming_t_threshold = scipy.stats.t.ppf(
1 - 0.05, len(cfg.subjects) - 1 # one-sided test
1 - 0.05,
len(cfg.subjects) - 1, # one-sided test
)
else:
cluster_forming_t_threshold = cfg.cluster_forming_t_threshold
Expand Down
2 changes: 1 addition & 1 deletion mne_bids_pipeline/tests/configs/config_ds003392.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
# Artifact correction.
spatial_filter = "ica"
ica_algorithm = "picard-extended_infomax"
ica_max_iterations = 500
ica_max_iterations = 1000
ica_l_freq = 1.0
ica_n_components = 0.99
ica_reject_components = "auto"
Expand Down
2 changes: 2 additions & 0 deletions mne_bids_pipeline/tests/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,8 @@ class DATASET_OPTIONS_T(TypedDict, total=False):
"ds003775": {
"openneuro": "ds003775",
"include": ["sub-010"],
# See https://github.com/OpenNeuroOrg/openneuro/issues/2976
"exclude": ["sub-010/ses-t1/sub-010_ses-t1_scans.tsv"],
},
"ds001810": {
"openneuro": "ds001810",
Expand Down
2 changes: 2 additions & 0 deletions mne_bids_pipeline/tests/sub-010_ses-t1_scans.tsv
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
filename acq_time
eeg/sub-010_ses-t1_task-resteyesc_eeg.edf 2017-05-09T12:11:44
9 changes: 9 additions & 0 deletions mne_bids_pipeline/tests/test_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,15 @@ def test_run(dataset, monkeypatch, dataset_test, capsys, tmp_path):
src=fix_path / "ds001971_participants.tsv",
dst=DATA_DIR / "ds001971" / "participants.tsv",
)
elif dataset == "ds003775":
shutil.copy(
src=fix_path / "sub-010_ses-t1_scans.tsv",
dst=DATA_DIR
/ "ds003775"
/ "sub-010"
/ "ses-t1"
/ "sub-010_ses-t1_scans.tsv",
)

# Run the tests.
steps = test_options.get("steps", ("preprocessing", "sensor"))
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ dependencies = [
"jupyter-server-proxy", # to have dask and jupyter working together
"scikit-learn",
"pandas",
"pyarrow", # from pandas
"seaborn",
"json_tricks",
"pydantic >= 2.0.0",
Expand Down

0 comments on commit 860c5c4

Please sign in to comment.