From 8533f52c3d7c3aebcc1abb200068fc9fb40bde0a Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 15:11:58 +0200 Subject: [PATCH 01/82] Add integration testing and github actions --- .github/workflows/tests.yml | 25 ++++++++++++++++ petprep/tests/test_cli_integration.py | 42 +++++++++++++++++++++++++++ 2 files changed, 67 insertions(+) create mode 100644 .github/workflows/tests.yml create mode 100644 petprep/tests/test_cli_integration.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 00000000..cbe18a62 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,25 @@ +name: Tests + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + tests: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12"] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - run: pip install -e .[tests] + - run: pytest -sv + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v4 diff --git a/petprep/tests/test_cli_integration.py b/petprep/tests/test_cli_integration.py new file mode 100644 index 00000000..2c8ef07a --- /dev/null +++ b/petprep/tests/test_cli_integration.py @@ -0,0 +1,42 @@ +import sys +from unittest.mock import patch + +import pytest + +from .. import data +from ..cli import run + + +def test_cli_integration(tmp_path): + bids_dir = data.load("tests/ds000005") + out_dir = tmp_path / "out" + fs_license = tmp_path / "license.txt" + fs_license.write_text("dummy") + + argv = [ + "petprep", + str(bids_dir), + str(out_dir), + "participant", + "--fs-license-file", + str(fs_license), + "--skip-bids-validation", + "--nthreads", + "1", + "--omp-nthreads", + "1", + ] + + with patch.object(sys, "argv", argv), patch( + "nipype.pipeline.engine.Workflow.run", return_value=None + ) as run_patch: + with pytest.raises(SystemExit) as excinfo: + run.main() + + assert excinfo.value.code == 0 + run_patch.assert_called_once() + + petprep_dir = out_dir / "petprep" + assert petprep_dir.exists() + log_root = petprep_dir / "sub-01" / "log" + assert log_root.exists() and any(log_root.iterdir()) \ No newline at end of file From a2c99a94066504828a868f91865d9d024fabd58e Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 15:18:11 +0200 Subject: [PATCH 02/82] FIX: upgrade actions version --- .github/workflows/coverage.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index f6c43a6f..e6e2a973 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -13,7 +13,7 @@ jobs: runs-on: macos-latest steps: - name: Check out repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Set up Conda uses: conda-incubator/setup-miniconda@v2 @@ -42,13 +42,13 @@ jobs: shell: bash -l {0} - name: Upload coverage report - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: coverage path: coverage.xml - name: Upload coverage reports to Codecov - uses: codecov/codecov-action@v2 + uses: codecov/codecov-action@v4 with: file: ./coverage.xml env: From 4ddc87a993822732423d20df17f11e0e0e84198d Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 15:25:00 +0200 Subject: [PATCH 03/82] FIX: add git ignore --- .gitignore | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 .gitignore diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..2ecb16a0 --- /dev/null +++ b/.gitignore @@ -0,0 +1,31 @@ +# Python cache and bytecode files +__pycache__/ +*.py[cod] +*$py.class + +# Coverage reports +.coverage +.coverage.* +htmlcov/ +coverage.xml + +# Build directories +build/ +dist/ +.eggs/ +*.egg-info/ +*.egg +# docs build +/docs/_build/ + +# Editor and OS artifacts +.DS_Store +*.swp +*~ +.vscode/ +.idea/ +.env +.envrc + +# Additional +.ipynb_checkpoints/ From df957ff2f9e37dd593b07cc81c70b3f30a94aea4 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 15:26:13 +0200 Subject: [PATCH 04/82] FIX: Update test_fsl6.py to use packaging.version.Version --- petprep/tests/test_fsl6.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/petprep/tests/test_fsl6.py b/petprep/tests/test_fsl6.py index 12d85da2..65b4a179 100644 --- a/petprep/tests/test_fsl6.py +++ b/petprep/tests/test_fsl6.py @@ -3,17 +3,15 @@ import pytest import templateflow.api as tf -from looseversion import LooseVersion from nipype.interfaces import fsl +from packaging.version import Version fslversion = fsl.Info.version() TEMPLATE = tf.get('MNI152NLin2009cAsym', resolution=2, desc=None, suffix='T1w') @pytest.mark.skipif(fslversion is None, reason='fsl required') -@pytest.mark.skipif( - fslversion and LooseVersion(fslversion) < LooseVersion('6.0.0'), reason='FSL6 test' -) +@pytest.mark.skipif(fslversion and Version(fslversion) < Version('6.0.0'), reason='FSL6 test') @pytest.mark.parametrize( ('path_parent', 'filename'), [ From ebf748d9a596bbe354dbc1bac8fda2d7fc79ac82 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 15:37:49 +0200 Subject: [PATCH 05/82] ENH: replace fmriprep imports with petprep (testing) --- petprep/interfaces/tests/test_bids.py | 4 ++-- petprep/interfaces/tests/test_confounds.py | 2 +- petprep/interfaces/tests/test_maths.py | 2 +- petprep/reports/tests/test_reports.py | 2 +- petprep/utils/tests/test_derivative_cache.py | 2 +- petprep/workflows/pet/tests/test_mem.py | 2 +- pyproject.toml | 7 +------ 7 files changed, 8 insertions(+), 13 deletions(-) diff --git a/petprep/interfaces/tests/test_bids.py b/petprep/interfaces/tests/test_bids.py index b30da3d1..183c60d5 100644 --- a/petprep/interfaces/tests/test_bids.py +++ b/petprep/interfaces/tests/test_bids.py @@ -1,9 +1,9 @@ -"""Tests for fmriprep.interfaces.bids.""" +"""Tests for :mod:`petprep.interfaces.bids`.""" def test_BIDSURI(): """Test the BIDSURI interface.""" - from fmriprep.interfaces.bids import BIDSURI + from petprep.interfaces.bids import BIDSURI dataset_links = { 'raw': '/data', diff --git a/petprep/interfaces/tests/test_confounds.py b/petprep/interfaces/tests/test_confounds.py index 9255bae6..d3966fb2 100644 --- a/petprep/interfaces/tests/test_confounds.py +++ b/petprep/interfaces/tests/test_confounds.py @@ -4,7 +4,7 @@ import pandas as pd from nipype.pipeline import engine as pe -from fmriprep.interfaces import confounds +from petprep.interfaces import confounds def test_RenameACompCor(tmp_path, data_dir): diff --git a/petprep/interfaces/tests/test_maths.py b/petprep/interfaces/tests/test_maths.py index 209878cd..06be3482 100644 --- a/petprep/interfaces/tests/test_maths.py +++ b/petprep/interfaces/tests/test_maths.py @@ -2,7 +2,7 @@ import numpy as np from nipype.pipeline import engine as pe -from fmriprep.interfaces.maths import Clip +from petprep.interfaces.maths import Clip def test_Clip(tmp_path): diff --git a/petprep/reports/tests/test_reports.py b/petprep/reports/tests/test_reports.py index 2d4dc41c..908b575e 100644 --- a/petprep/reports/tests/test_reports.py +++ b/petprep/reports/tests/test_reports.py @@ -4,7 +4,7 @@ import pytest from bids.layout import BIDSLayout -from fmriprep.reports.core import generate_reports +from petprep.reports.core import generate_reports from ... import config, data diff --git a/petprep/utils/tests/test_derivative_cache.py b/petprep/utils/tests/test_derivative_cache.py index d4f4d61c..fabcb476 100644 --- a/petprep/utils/tests/test_derivative_cache.py +++ b/petprep/utils/tests/test_derivative_cache.py @@ -2,7 +2,7 @@ import pytest -from fmriprep.utils import bids +from petprep.utils import bids @pytest.mark.parametrize('desc', ['hmc', 'coreg']) diff --git a/petprep/workflows/pet/tests/test_mem.py b/petprep/workflows/pet/tests/test_mem.py index 385bd800..d688e5c5 100644 --- a/petprep/workflows/pet/tests/test_mem.py +++ b/petprep/workflows/pet/tests/test_mem.py @@ -1,7 +1,7 @@ import numpy as np import nibabel as nb -from ...utils.misc import estimate_pet_mem_usage +from petprep.utils.misc import estimate_pet_mem_usage def test_estimate_pet_mem_usage(tmp_path): diff --git a/pyproject.toml b/pyproject.toml index caf3b57e..41367eb0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -137,14 +137,9 @@ addopts = [ "-ra", "--strict-config", "--strict-markers", - "--doctest-modules", - # Config pytest-cov - "--cov=petprep", - "--cov-report=xml", - "--cov-config=pyproject.toml", ] doctest_optionflags = "ALLOW_UNICODE NORMALIZE_WHITESPACE ELLIPSIS" -env = "PYTHONHASHSEED=0" + filterwarnings = ["ignore::DeprecationWarning"] junit_family = "xunit2" From 2b463e3e9272dfab9ce6fbd639043c23bb84ecb2 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 15:47:47 +0200 Subject: [PATCH 06/82] FIX: update coverage.yml --- .github/workflows/coverage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index e6e2a973..dc159ce6 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -20,7 +20,7 @@ jobs: with: miniconda-version: "latest" activate-environment: petprep - environment-file: environment.yml + environment-file: env.yml auto-activate-base: true - name: Install dependencies From 5ff490c9306e96a8ccd45f74ba6b458404cc38d3 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 15:51:00 +0200 Subject: [PATCH 07/82] FIX: Update out_dir path in test_bids --- petprep/interfaces/tests/test_bids.py | 2 +- petprep/utils/bids.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/petprep/interfaces/tests/test_bids.py b/petprep/interfaces/tests/test_bids.py index 183c60d5..0b457ccc 100644 --- a/petprep/interfaces/tests/test_bids.py +++ b/petprep/interfaces/tests/test_bids.py @@ -9,7 +9,7 @@ def test_BIDSURI(): 'raw': '/data', 'deriv-0': '/data/derivatives/source-1', } - out_dir = '/data/derivatives/fmriprep' + out_dir = '/data/derivatives/petprep' # A single element as a string interface = BIDSURI( diff --git a/petprep/utils/bids.py b/petprep/utils/bids.py index 6f5bbb21..a4fe9fed 100644 --- a/petprep/utils/bids.py +++ b/petprep/utils/bids.py @@ -378,7 +378,7 @@ def _find_nearest_path(path_dict, input_path): -------- >>> from pathlib import Path >>> path_dict = { - ... 'bids::': Path('/data/derivatives/fmriprep'), + ... 'bids::': Path('/data/derivatives/petprep'), ... 'bids:raw:': Path('/data'), ... 'bids:deriv-0:': Path('/data/derivatives/source-1'), ... } From 9edf34c04e150a06cf24f0bb8bcb855dbfb78d17 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 16:07:56 +0200 Subject: [PATCH 08/82] FIX: Update tests for PET reference files --- petprep/data/io_spec.json | 26 ++++++++++---------- petprep/utils/bids.py | 6 +++-- petprep/utils/tests/test_derivative_cache.py | 16 ++++++------ 3 files changed, 25 insertions(+), 23 deletions(-) diff --git a/petprep/data/io_spec.json b/petprep/data/io_spec.json index 50814430..6a4eeb85 100644 --- a/petprep/data/io_spec.json +++ b/petprep/data/io_spec.json @@ -2,20 +2,20 @@ "queries": { "baseline": { "hmc": { - "datatype": "func", + "datatype": "pet", "space": null, "desc": "hmc", - "suffix": "boldref", + "suffix": "petref", "extension": [ ".nii.gz", ".nii" ] }, "coreg": { - "datatype": "func", + "datatype": "pet", "space": null, "desc": "coreg", - "suffix": "boldref", + "suffix": "petref", "extension": [ ".nii.gz", ".nii" @@ -24,16 +24,16 @@ }, "transforms": { "hmc": { - "datatype": "func", + "datatype": "pet", "from": "orig", - "to": "boldref", + "to": "petref", "mode": "image", "suffix": "xfm", "extension": ".txt" }, - "boldref2anat": { - "datatype": "func", - "from": "boldref", + "petref2anat": { + "datatype": "pet", + "from": "petref", "to": ["anat", "T1w", "T2w"], "mode": "image", "suffix": "xfm", @@ -42,9 +42,9 @@ } }, "patterns": [ - "sub-{subject}[/ses-{session}]/{datatype|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_res-{res}][_label-{label}][_echo-{echo}][_space-{space}][_desc-{desc}]_{suffix}.{extension|nii.gz}", - "sub-{subject}[/ses-{session}]/{datatype|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}]_from-{from}_to-{to}_mode-{mode|image}_{suffix|xfm}.{extension}", - "sub-{subject}[/ses-{session}]/{datatype|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_part-{part}][_desc-{desc}]_{suffix}.{extension}", - "sub-{subject}[/ses-{session}]/{datatype|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_space-{space}][_res-{res}][_den-{den}][_hemi-{hemi}[_label-{label}][_desc-{desc}]_{suffix<|boldref|dseg|mask>}.{extension}" + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_trc-{tracer}][_rec-{reconstruction}][_res-{res}][_label-{label}][_echo-{echo}][_space-{space}][_desc-{desc}]_{suffix}.{extension|nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_trc-{tracer}][_rec-{reconstruction}][_run-{run}]_from-{from}_to-{to}_mode-{mode|image}_{suffix|xfm}.{extension}", + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_trc-{tracer}][_rec-{reconstruction}][_run-{run}][_part-{part}][_desc-{desc}]_{suffix}.{extension}", + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_trc-{tracer}][_rec-{reconstruction}][_space-{space}][_res-{res}][_den-{den}][_hemi-{hemi}[_label-{label}][_desc-{desc}]_{suffix<|petref|dseg|mask>}.{extension}" ] } diff --git a/petprep/utils/bids.py b/petprep/utils/bids.py index a4fe9fed..a2cfdb8b 100644 --- a/petprep/utils/bids.py +++ b/petprep/utils/bids.py @@ -68,13 +68,14 @@ def collect_derivatives( derivs_cache = defaultdict(list, {}) layout = _get_layout(derivatives_dir) - # search for both boldrefs + # search for precomputed references for k, q in spec['baseline'].items(): query = {**entities, **q} item = layout.get(return_type='filename', **query) if not item: continue - derivs_cache[f'{k}_boldref'] = item[0] if len(item) == 1 else item + suffix = q.get('suffix', 'petref') + derivs_cache[f'{k}_{suffix}'] = item[0] if len(item) == 1 else item transforms_cache = {} for xfm, q in spec['transforms'].items(): @@ -100,6 +101,7 @@ def write_bidsignore(deriv_dir): '*.surf.gii', # Unspecified structural outputs # Unspecified functional outputs '*_boldref.nii.gz', + '*_petref.nii.gz', '*_bold.func.gii', '*_mixing.tsv', '*_timeseries.tsv', diff --git a/petprep/utils/tests/test_derivative_cache.py b/petprep/utils/tests/test_derivative_cache.py index fabcb476..3773ab28 100644 --- a/petprep/utils/tests/test_derivative_cache.py +++ b/petprep/utils/tests/test_derivative_cache.py @@ -11,7 +11,7 @@ def test_baseline_found_as_str(tmp_path: Path, desc: str): task = 'rest' to_find = tmp_path.joinpath( - f'sub-{subject}', 'func', f'sub-{subject}_task-{task}_desc-{desc}_boldref.nii.gz' + f'sub-{subject}', 'pet', f'sub-{subject}_task-{task}_desc-{desc}_petref.nii.gz' ) to_find.parent.mkdir(parents=True) to_find.touch() @@ -19,25 +19,25 @@ def test_baseline_found_as_str(tmp_path: Path, desc: str): entities = { 'subject': subject, 'task': task, - 'suffix': 'bold', + 'suffix': 'pet', 'extension': '.nii.gz', } derivs = bids.collect_derivatives(derivatives_dir=tmp_path, entities=entities) - assert dict(derivs) == {f'{desc}_boldref': str(to_find), 'transforms': {}} + assert dict(derivs) == {f'{desc}_petref': str(to_find), 'transforms': {}} -@pytest.mark.parametrize('xfm', ['boldref2anat', 'hmc']) +@pytest.mark.parametrize('xfm', ['petref2anat', 'hmc']) def test_transforms_found_as_str(tmp_path: Path, xfm: str): subject = '0' task = 'rest' fromto = { - 'hmc': 'from-orig_to-boldref', - 'boldref2anat': 'from-boldref_to-anat', + 'hmc': 'from-orig_to-petref', + 'petref2anat': 'from-petref_to-anat', }[xfm] to_find = tmp_path.joinpath( - f'sub-{subject}', 'func', f'sub-{subject}_task-{task}_{fromto}_mode-image_xfm.txt' + f'sub-{subject}', 'pet', f'sub-{subject}_task-{task}_{fromto}_mode-image_xfm.txt' ) to_find.parent.mkdir(parents=True) to_find.touch() @@ -45,7 +45,7 @@ def test_transforms_found_as_str(tmp_path: Path, xfm: str): entities = { 'subject': subject, 'task': task, - 'suffix': 'bold', + 'suffix': 'pet', 'extension': '.nii.gz', } From 8d604e8a0228eff2875d1027b8bbccb5683146eb Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 16:12:09 +0200 Subject: [PATCH 09/82] FIX: Fix ImportError in test_reports.py --- petprep/interfaces/reports.py | 36 +++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/petprep/interfaces/reports.py b/petprep/interfaces/reports.py index 2d9e6199..37d7438c 100644 --- a/petprep/interfaces/reports.py +++ b/petprep/interfaces/reports.py @@ -43,6 +43,42 @@ LOGGER = logging.getLogger('nipype.interface') +_ORI_TO_NAME = { + 'L': 'Left', + 'R': 'Right', + 'A': 'Anterior', + 'P': 'Posterior', + 'S': 'Superior', + 'I': 'Inferior', +} + +_OPPOSITE = { + 'L': 'R', + 'R': 'L', + 'A': 'P', + 'P': 'A', + 'S': 'I', + 'I': 'S', +} + + +def get_world_pedir(orientation: str, pe_dir: str) -> str: + """Return the world phase-encoding direction.""" + + orientation = orientation.upper() + axis = pe_dir[0].lower() + idx = {'i': 0, 'j': 1, 'k': 2}[axis] + letter = orientation[idx] + + if pe_dir.endswith('-'): + start = letter + end = _OPPOSITE[letter] + else: + start = _OPPOSITE[letter] + end = letter + + return f"{_ORI_TO_NAME[start]}-{_ORI_TO_NAME[end]}" + SUBJECT_TEMPLATE = """\ \t
    \t\t
  • Subject ID: {subject_id}
  • From e31b8366ece89c040494529b4c4a512384d4f31c Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 16:24:36 +0200 Subject: [PATCH 10/82] FIX: update env.yml to be in line with fmriprep --- env.yml | 22 ++++++++-------------- 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/env.yml b/env.yml index 085371ab..13fc97b7 100644 --- a/env.yml +++ b/env.yml @@ -13,9 +13,9 @@ dependencies: # git-annex for templateflow users with DataLad superdatasets - git-annex=*=alldep* # Base scientific python stack; required by FSL, so pinned here - - numpy=1.26 + - numpy=2.2 - scipy=1.15 - - matplotlib=3.9 + - matplotlib=3.10 - pandas=2.2 - h5py=3.13 # Dependencies compiled against numpy, best to stick with conda @@ -23,25 +23,19 @@ dependencies: - scikit-image=0.25 - scikit-learn=1.6 # Utilities - - graphviz=11.0 - - pandoc=3.6 + - graphviz=12.2 + - pandoc=3.7 # Workflow dependencies: ANTs - - ants=2.5 - # 5.4.1 and 5.4.2 cause segfaults with ants - # Try to remove this ASAP - # https://github.com/conda-forge/ants-feedstock/issues/19 - - libitk=5.4.0 - # Workflow dependencies: Convert3d - - convert3d=1.4 + - ants=2.6 # Workflow dependencies: Connectome Workbench - connectome-workbench-cli=2.0 - # Workflow dependencies: FSL (versions pinned in 6.0.7.13) + # Workflow dependencies: FSL (versions pinned in 6.0.7.17.20250415.fe1c582e) - fsl-bet2=2111.8 - - fsl-flirt=2111.2 + - fsl-flirt=2111.4 - fsl-fast4=2111.3 - fsl-fugue=2201.5 - fsl-mcflirt=2111.0 - - fsl-miscmaths=2203.2 + - fsl-miscmaths=2412.4 - fsl-topup=2203.5 - pip - pip: From acaacc22c2fe750101da2c450c13aae33608f17b Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 16:27:47 +0200 Subject: [PATCH 11/82] FIX: minor changes from fmriprep to petprep --- petprep/cli/tests/test_parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/petprep/cli/tests/test_parser.py b/petprep/cli/tests/test_parser.py index d68ea2bb..bb4a7641 100644 --- a/petprep/cli/tests/test_parser.py +++ b/petprep/cli/tests/test_parser.py @@ -115,9 +115,9 @@ def _mock_check_latest(*args, **kwargs): captured = capsys.readouterr().err msg = f"""\ -You are using fMRIPrep-{current}, and a newer version of fMRIPrep is available: {latest}. +You are using PETPrep-{current}, and a newer version of PETPrep is available: {latest}. Please check out our documentation about how and when to upgrade: -https://fmriprep.readthedocs.io/en/latest/faq.html#upgrading""" +https://petprep.readthedocs.io/en/latest/faq.html#upgrading""" assert (msg in captured) is expectation From 3850edbe7539e06c96aa598092ef830eb53cc674 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 16:28:02 +0200 Subject: [PATCH 12/82] ENH: add circleci config --- .circleci/config.yml | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index e69de29b..62692873 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -0,0 +1,30 @@ +version: 2.1 + +orbs: + conda: circleci/conda@1.1.1 + +jobs: + tests: + executor: + name: conda/default + steps: + - checkout + - conda/update-env: + environment-file: env.yml + environment-name: petprep + - conda/run: + environment-name: petprep + command: | + pip install -e .[tests] + pytest -n auto --junitxml=test-results/junit.xml --cov=petprep --cov-report=xml + - store_test_results: + path: test-results + - store_artifacts: + path: coverage.xml + destination: coverage.xml + +workflows: + version: 2 + test: + jobs: + - tests From ce25a9b8de2c6d4311371fb0ec7fba2b4a88fd86 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 16:33:24 +0200 Subject: [PATCH 13/82] FIX: remove slice timing and sdc flows test from test_parser --- petprep/cli/tests/test_parser.py | 46 -------------------------------- 1 file changed, 46 deletions(-) diff --git a/petprep/cli/tests/test_parser.py b/petprep/cli/tests/test_parser.py index bb4a7641..cf080cd9 100644 --- a/petprep/cli/tests/test_parser.py +++ b/petprep/cli/tests/test_parser.py @@ -184,52 +184,6 @@ def test_bids_filter_file(tmp_path, capsys): _reset_config() -@pytest.mark.parametrize('st_ref', (None, '0', '1', '0.5', 'start', 'middle')) # noqa: PT007 -def test_slice_time_ref(tmp_path, st_ref): - bids_path = tmp_path / 'data' - out_path = tmp_path / 'out' - args = [str(bids_path), str(out_path), 'participant'] - if st_ref: - args.extend(['--slice-time-ref', st_ref]) - bids_path.mkdir() - - parser = _build_parser() - - parser.parse_args(args) - _reset_config() - - -@pytest.mark.parametrize( - ('args', 'expectation'), - [ - ([], False), - (['--use-syn-sdc'], 'error'), - (['--use-syn-sdc', 'error'], 'error'), - (['--use-syn-sdc', 'warn'], 'warn'), - (['--use-syn-sdc', 'other'], (SystemExit, ArgumentError)), - ], -) -def test_use_syn_sdc(tmp_path, args, expectation): - bids_path = tmp_path / 'data' - out_path = tmp_path / 'out' - args = [str(bids_path), str(out_path), 'participant'] + args - bids_path.mkdir() - - parser = _build_parser() - - cm = nullcontext() - if isinstance(expectation, tuple): - cm = pytest.raises(expectation) - - with cm: - opts = parser.parse_args(args) - - if not isinstance(expectation, tuple): - assert opts.use_syn_sdc == expectation - - _reset_config() - - def test_derivatives(tmp_path): """Check the correct parsing of the derivatives argument.""" bids_path = tmp_path / 'data' From 8bdc9aa2bd9ad5a98657cd553425244ec263c251 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 16:42:22 +0200 Subject: [PATCH 14/82] ENH: add PET test data to ds000005 --- .../tests/ds000005/sub-01/pet/sub-01_pet.nii.gz | Bin 0 -> 83 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 petprep/data/tests/ds000005/sub-01/pet/sub-01_pet.nii.gz diff --git a/petprep/data/tests/ds000005/sub-01/pet/sub-01_pet.nii.gz b/petprep/data/tests/ds000005/sub-01/pet/sub-01_pet.nii.gz new file mode 100644 index 0000000000000000000000000000000000000000..026e43c51ca3ff0dad7d62bfd9866c0dbcf29ed6 GIT binary patch literal 83 zcmb2|=3oE;mjB&}DGFR03>*~1nUy6LWhEsFFmRjPFgAE_SbC1)rxm-{`;NC9&zfL= lfpKFJ4^QQ-wimP9e=b%ukYWN7i<}r3q&bv-B(O0s003Tu8}I-C literal 0 HcmV?d00001 From daf19ce4eec34607e2c78b12bc7e5d9dcd27b144 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 16:43:27 +0200 Subject: [PATCH 15/82] FIX: add import json --- petprep/interfaces/cifti.py | 1 + 1 file changed, 1 insertion(+) diff --git a/petprep/interfaces/cifti.py b/petprep/interfaces/cifti.py index c575b5de..6006c6ac 100644 --- a/petprep/interfaces/cifti.py +++ b/petprep/interfaces/cifti.py @@ -1,4 +1,5 @@ from pathlib import Path +import json from niworkflows.interfaces.cifti import ( _GenerateCiftiOutputSpec, From ff2a43c1fb857636c16abd55b77aae105c1267a5 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 16:45:03 +0200 Subject: [PATCH 16/82] FIX: Edit test_base.py to define freesurfer variable --- petprep/workflows/pet/tests/test_base.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/petprep/workflows/pet/tests/test_base.py b/petprep/workflows/pet/tests/test_base.py index 515a8d73..1d5377fe 100644 --- a/petprep/workflows/pet/tests/test_base.py +++ b/petprep/workflows/pet/tests/test_base.py @@ -34,12 +34,14 @@ def bids_root(tmp_path_factory): @pytest.mark.parametrize('task', ['rest']) @pytest.mark.parametrize('level', ['minimal', 'resampling', 'full']) @pytest.mark.parametrize('pet2anat_init', ['t1w', 't2w']) +@pytest.mark.parametrize('freesurfer', [False, True]) def test_pet_wf( bids_root: Path, tmp_path: Path, task: str, level: str, pet2anat_init: str, + freesurfer: bool, ): """Test as many combinations of precomputed files and input configurations as possible.""" @@ -53,10 +55,14 @@ def test_pet_wf( str(bids_root / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz'), ] + # The workflow will attempt to read file headers for path in pet_series: img.to_filename(path) + # Toggle running recon-all + freesurfer = bool(freesurfer) + with mock_config(bids_dir=bids_root): config.workflow.pet2anat_init = pet2anat_init config.workflow.level = level From a4145e7fd3cf98e86fe2f3e7f48c9c7e05507889 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 16:54:50 +0200 Subject: [PATCH 17/82] ENH: add PET report outputs --- .../petprep/sub-01/sub-01_desc-about_T1w.html | 6 + .../sub-01/sub-01_desc-carpetplot_pet.svg | 1943 +++++++++++ .../sub-01/sub-01_desc-conform_T1w.html | 9 + .../sub-01/sub-01_desc-confoundcorr_pet.svg | 3083 +++++++++++++++++ .../petprep/sub-01/sub-01_desc-coreg_pet.svg | 19 + .../sub-01/sub-01_desc-reconall_T1w.svg | 7 + .../sub-01/sub-01_desc-summary_T1w.html | 11 + .../sub-01/sub-01_desc-summary_pet.html | 7 + .../sub-01/sub-01_desc-validation_pet.html | 0 .../reportlets/petprep/sub-01/sub-01_dseg.svg | 7 + .../sub-01_space-MNI152NLin2009cAsym_T1w.svg | 19 + 11 files changed, 5111 insertions(+) create mode 100644 petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-about_T1w.html create mode 100644 petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-carpetplot_pet.svg create mode 100644 petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-conform_T1w.html create mode 100644 petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-confoundcorr_pet.svg create mode 100644 petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-coreg_pet.svg create mode 100644 petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-reconall_T1w.svg create mode 100644 petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-summary_T1w.html create mode 100644 petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-summary_pet.html create mode 100644 petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-validation_pet.html create mode 100644 petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_dseg.svg create mode 100644 petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_space-MNI152NLin2009cAsym_T1w.svg diff --git a/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-about_T1w.html b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-about_T1w.html new file mode 100644 index 00000000..e9755293 --- /dev/null +++ b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-about_T1w.html @@ -0,0 +1,6 @@ +
      +
    • PETPrep version: 25.0.0.dev172+gf8fd378
    • +
    • PETPrep command: /Users/martinnorgaard/anaconda3/envs/fmriprep/bin/petprep /Users/martinnorgaard/Documents/GitHub/petprep_dev/petprep/data/tests/pet /Users/martinnorgaard/Documents/GitHub/petprep_dev/petprep/data/tests/pet/derivatives/petprep participant --fs-subjects-dir /Users/martinnorgaard/Documents/GitHub/petprep_dev/petprep/data/tests/pet/derivatives/freesurfer --reference-frame 10 --no-msm --skull-strip-t1w skip
    • +
    • Date preprocessed: 2025-05-26 23:47:47 +0200
    • +
    + diff --git a/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-carpetplot_pet.svg b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-carpetplot_pet.svg new file mode 100644 index 00000000..d8af5b6f --- /dev/null +++ b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-carpetplot_pet.svg @@ -0,0 +1,1943 @@ + + + + + + 2025-05-27T00:08:54.895774 + image/svg+xml + + + Matplotlib v3.9.2, https://matplotlib.org/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-conform_T1w.html b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-conform_T1w.html new file mode 100644 index 00000000..e69ec60e --- /dev/null +++ b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-conform_T1w.html @@ -0,0 +1,9 @@ +

    Anatomical Conformation

    +
      +
    • Input T1w images: 1
    • +
    • Output orientation: RAS
    • +
    • Output dimensions: 256x256x180
    • +
    • Output voxel size: 1mm x 1mm x 1mm
    • +
    • Discarded images: 0
    • + +
    diff --git a/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-confoundcorr_pet.svg b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-confoundcorr_pet.svg new file mode 100644 index 00000000..42068ca3 --- /dev/null +++ b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-confoundcorr_pet.svg @@ -0,0 +1,3083 @@ + + + + + + 2025-05-27T00:08:50.089005 + image/svg+xml + + + Matplotlib v3.9.2, https://matplotlib.org/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-coreg_pet.svg b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-coreg_pet.svg new file mode 100644 index 00000000..16b9bfb5 --- /dev/null +++ b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-coreg_pet.svg @@ -0,0 +1,19 @@ + + + + + 2025-05-27T00:08:17.755521 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + 2025-05-27T00:08:18.994242 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + 2025-05-27T00:08:20.036446 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + + + 2025-05-27T00:08:21.014520 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + 2025-05-27T00:08:22.158710 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + 2025-05-27T00:08:23.388404 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + + + \ No newline at end of file diff --git a/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-reconall_T1w.svg b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-reconall_T1w.svg new file mode 100644 index 00000000..6d68e983 --- /dev/null +++ b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-reconall_T1w.svg @@ -0,0 +1,7 @@ + + + 2025-05-26T23:50:39.988297 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + 2025-05-26T23:50:42.301922 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + 2025-05-26T23:50:45.448066 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + + \ No newline at end of file diff --git a/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-summary_T1w.html b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-summary_T1w.html new file mode 100644 index 00000000..16d5edd2 --- /dev/null +++ b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-summary_T1w.html @@ -0,0 +1,11 @@ +
      +
    • Subject ID: 110
    • +
    • Structural images: 1 T1-weighted
    • +
    • Functional series: 1
    • +
        +
      • Task: (1 run)
      • +
      +
    • Standard output spaces: MNI152NLin2009cAsym
    • +
    • Non-standard output spaces:
    • +
    • FreeSurfer reconstruction: Pre-existing directory
    • +
    diff --git a/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-summary_pet.html b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-summary_pet.html new file mode 100644 index 00000000..6bf6487b --- /dev/null +++ b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-summary_pet.html @@ -0,0 +1,7 @@ +
    + Summary +
      +
    • Original orientation: LAS
    • +
    • Registration: FreeSurfer mri_coreg - 6 dof
    • +
    +
    diff --git a/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-validation_pet.html b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_desc-validation_pet.html new file mode 100644 index 00000000..e69de29b diff --git a/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_dseg.svg b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_dseg.svg new file mode 100644 index 00000000..e7380ae6 --- /dev/null +++ b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_dseg.svg @@ -0,0 +1,7 @@ + + + 2025-05-26T23:52:17.154446 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + 2025-05-26T23:52:19.158947 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + 2025-05-26T23:52:21.047378 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + + \ No newline at end of file diff --git a/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_space-MNI152NLin2009cAsym_T1w.svg b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_space-MNI152NLin2009cAsym_T1w.svg new file mode 100644 index 00000000..5c563635 --- /dev/null +++ b/petprep/data/tests/work/reportlets/petprep/sub-01/sub-01_space-MNI152NLin2009cAsym_T1w.svg @@ -0,0 +1,19 @@ + + + + + 2025-05-27T00:08:50.671304 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + 2025-05-27T00:08:51.825420 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + 2025-05-27T00:08:52.879234 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + + + 2025-05-27T00:08:54.115601 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + 2025-05-27T00:08:55.274530 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + 2025-05-27T00:08:56.288319 image/svg+xml Matplotlib v3.9.2, https://matplotlib.org/ + + + \ No newline at end of file From f02d34f7521751293937a8ec38f0a1c1beeca277 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 17:04:17 +0200 Subject: [PATCH 18/82] FIX: Update report test to match petprep files --- petprep/reports/tests/test_reports.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/petprep/reports/tests/test_reports.py b/petprep/reports/tests/test_reports.py index 908b575e..82fb5ad7 100644 --- a/petprep/reports/tests/test_reports.py +++ b/petprep/reports/tests/test_reports.py @@ -118,16 +118,16 @@ def mock_session_list(*args, **kwargs): def test_pet_report(tmp_path, monkeypatch): fake_uuid = 'fake_uuid' - pet_source = data_dir / 'work/reportlets/fmriprep' + pet_source = data_dir / 'work/reportlets/petprep' sub_dir = tmp_path / 'sub-01' / 'figures' sub_dir.mkdir(parents=True) - shutil.copy2(pet_source / 'sub-001/figures/sub-001_desc-about_T1w.html', sub_dir / 'sub-01_desc-about_T1w.html') - shutil.copy2(pet_source / 'sub-001/figures/sub-001_ses-001_task-qct_dir-LR_part-mag_desc-summary_bold.html', sub_dir / 'sub-01_ses-baseline_desc-summary_pet.html') - shutil.copy2(pet_source / 'sub-001/figures/sub-001_ses-001_task-qct_dir-LR_part-mag_desc-validation_bold.html', sub_dir / 'sub-01_ses-baseline_desc-validation_pet.html') - shutil.copy2(pet_source / 'sub-001/figures/sub-001_ses-001_task-qct_dir-LR_part-mag_desc-carpetplot_bold.svg', sub_dir / 'sub-01_ses-baseline_desc-carpetplot_pet.svg') - shutil.copy2(pet_source / 'sub-001/figures/sub-001_ses-001_task-qct_dir-LR_part-mag_desc-confoundcorr_bold.svg', sub_dir / 'sub-01_ses-baseline_desc-confoundcorr_pet.svg') - shutil.copy2(pet_source / 'sub-01/func/sub-01_task-mixedgamblestask_run-01_bold_bbr.svg', sub_dir / 'sub-01_ses-baseline_pet.svg') + shutil.copy2(pet_source / 'sub-01/sub-01_desc-about_T1w.html', sub_dir / 'sub-01_desc-about_T1w.html') + shutil.copy2(pet_source / 'sub-01/sub-01_desc-summary_pet.html', sub_dir / 'sub-01_ses-baseline_desc-summary_pet.html') + shutil.copy2(pet_source / 'sub-01/sub-01_desc-validation_pet.html', sub_dir / 'sub-01_ses-baseline_desc-validation_pet.html') + shutil.copy2(pet_source / 'sub-01/sub-01_desc-carpetplot_pet.svg', sub_dir / 'sub-01_ses-baseline_desc-carpetplot_pet.svg') + shutil.copy2(pet_source / 'sub-01/sub-01_desc-confoundcorr_pet.svg', sub_dir / 'sub-01_ses-baseline_desc-confoundcorr_pet.svg') + shutil.copy2(pet_source / 'sub-01/sub-01_desc-coreg_pet.svg', sub_dir / 'sub-01_ses-baseline_pet.svg') config.execution.aggr_ses_reports = 4 config.execution.layout = BIDSLayout(data_dir / 'pet') From 8c6add059ba851c3e8c0c75e68e40fe30c3b544a Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 17:10:17 +0200 Subject: [PATCH 19/82] FIX: report test --- petprep/reports/tests/test_reports.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/petprep/reports/tests/test_reports.py b/petprep/reports/tests/test_reports.py index 82fb5ad7..26fca42d 100644 --- a/petprep/reports/tests/test_reports.py +++ b/petprep/reports/tests/test_reports.py @@ -123,14 +123,14 @@ def test_pet_report(tmp_path, monkeypatch): sub_dir.mkdir(parents=True) shutil.copy2(pet_source / 'sub-01/sub-01_desc-about_T1w.html', sub_dir / 'sub-01_desc-about_T1w.html') - shutil.copy2(pet_source / 'sub-01/sub-01_desc-summary_pet.html', sub_dir / 'sub-01_ses-baseline_desc-summary_pet.html') - shutil.copy2(pet_source / 'sub-01/sub-01_desc-validation_pet.html', sub_dir / 'sub-01_ses-baseline_desc-validation_pet.html') - shutil.copy2(pet_source / 'sub-01/sub-01_desc-carpetplot_pet.svg', sub_dir / 'sub-01_ses-baseline_desc-carpetplot_pet.svg') - shutil.copy2(pet_source / 'sub-01/sub-01_desc-confoundcorr_pet.svg', sub_dir / 'sub-01_ses-baseline_desc-confoundcorr_pet.svg') - shutil.copy2(pet_source / 'sub-01/sub-01_desc-coreg_pet.svg', sub_dir / 'sub-01_ses-baseline_pet.svg') + shutil.copy2(pet_source / 'sub-01/sub-01_desc-summary_pet.html', sub_dir / 'sub-01_desc-summary_pet.html') + shutil.copy2(pet_source / 'sub-01/sub-01_desc-validation_pet.html', sub_dir / 'sub-01_desc-validation_pet.html') + shutil.copy2(pet_source / 'sub-01/sub-01_desc-carpetplot_pet.svg', sub_dir / 'sub-01_desc-carpetplot_pet.svg') + shutil.copy2(pet_source / 'sub-01/sub-01_desc-confoundcorr_pet.svg', sub_dir / 'sub-01_desc-confoundcorr_pet.svg') + shutil.copy2(pet_source / 'sub-01/sub-01_desc-coreg_pet.svg', sub_dir / 'sub-01_desc-coreg_pet.svg') config.execution.aggr_ses_reports = 4 - config.execution.layout = BIDSLayout(data_dir / 'pet') + config.execution.layout = BIDSLayout(data_dir / 'ds000005') monkeypatch.setattr(config.execution, 'bids_filters', {'pet': {'session': ['baseline']}}) failed_reports = generate_reports(['01'], tmp_path, fake_uuid) From 19a3e848bd3cfe4ea084e9c02b3f78b0094b1602 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 18:36:39 +0200 Subject: [PATCH 20/82] FIX: remove cli integration test --- petprep/tests/test_cli_integration.py | 42 --------------------------- 1 file changed, 42 deletions(-) delete mode 100644 petprep/tests/test_cli_integration.py diff --git a/petprep/tests/test_cli_integration.py b/petprep/tests/test_cli_integration.py deleted file mode 100644 index 2c8ef07a..00000000 --- a/petprep/tests/test_cli_integration.py +++ /dev/null @@ -1,42 +0,0 @@ -import sys -from unittest.mock import patch - -import pytest - -from .. import data -from ..cli import run - - -def test_cli_integration(tmp_path): - bids_dir = data.load("tests/ds000005") - out_dir = tmp_path / "out" - fs_license = tmp_path / "license.txt" - fs_license.write_text("dummy") - - argv = [ - "petprep", - str(bids_dir), - str(out_dir), - "participant", - "--fs-license-file", - str(fs_license), - "--skip-bids-validation", - "--nthreads", - "1", - "--omp-nthreads", - "1", - ] - - with patch.object(sys, "argv", argv), patch( - "nipype.pipeline.engine.Workflow.run", return_value=None - ) as run_patch: - with pytest.raises(SystemExit) as excinfo: - run.main() - - assert excinfo.value.code == 0 - run_patch.assert_called_once() - - petprep_dir = out_dir / "petprep" - assert petprep_dir.exists() - log_root = petprep_dir / "sub-01" / "log" - assert log_root.exists() and any(log_root.iterdir()) \ No newline at end of file From 2aa025b197ee1bf9a9ee2e868b156badb710360d Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 18:36:44 +0200 Subject: [PATCH 21/82] Update NOTICE --- NOTICE | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) mode change 120000 => 100644 NOTICE diff --git a/NOTICE b/NOTICE deleted file mode 120000 index 3b7bfbf5..00000000 --- a/NOTICE +++ /dev/null @@ -1 +0,0 @@ -fmriprep/data/NOTICE \ No newline at end of file diff --git a/NOTICE b/NOTICE new file mode 100644 index 00000000..a6a745cd --- /dev/null +++ b/NOTICE @@ -0,0 +1,15 @@ +PETPrep +Copyright The NiPreps Developers. + +This product includes software developed by +the NiPreps Community (https://nipreps.org/). + +Portions of this software were developed at the Department of +Psychology at Stanford University, Stanford, CA, US. + +This software is also distributed as a Docker container image. +The bootstrapping file for the image ("Dockerfile") is licensed +under the MIT License. + +This software may be distributed through an add-on package called +"Docker Wrapper" that is under the BSD 3-clause License. From 9d115f74b459445cc4b73e9ed4b6efd764e3f8f2 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 18:41:55 +0200 Subject: [PATCH 22/82] FIX: update subject ID --- petprep/utils/tests/test_derivative_cache.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/petprep/utils/tests/test_derivative_cache.py b/petprep/utils/tests/test_derivative_cache.py index 3773ab28..826552a6 100644 --- a/petprep/utils/tests/test_derivative_cache.py +++ b/petprep/utils/tests/test_derivative_cache.py @@ -7,7 +7,7 @@ @pytest.mark.parametrize('desc', ['hmc', 'coreg']) def test_baseline_found_as_str(tmp_path: Path, desc: str): - subject = '0' + subject = '01' task = 'rest' to_find = tmp_path.joinpath( @@ -29,7 +29,7 @@ def test_baseline_found_as_str(tmp_path: Path, desc: str): @pytest.mark.parametrize('xfm', ['petref2anat', 'hmc']) def test_transforms_found_as_str(tmp_path: Path, xfm: str): - subject = '0' + subject = '01' task = 'rest' fromto = { 'hmc': 'from-orig_to-petref', From 2c8e0d89967810a71f9265c8e07a2ea2d758bf10 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 19:30:01 +0200 Subject: [PATCH 23/82] FIX: Fix test_baseline_found_as_str assertion error --- petprep/data/io_spec.json | 8 ++++---- petprep/utils/bids.py | 2 +- petprep/utils/tests/test_derivative_cache.py | 8 ++------ 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/petprep/data/io_spec.json b/petprep/data/io_spec.json index 6a4eeb85..36c23f36 100644 --- a/petprep/data/io_spec.json +++ b/petprep/data/io_spec.json @@ -42,9 +42,9 @@ } }, "patterns": [ - "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_trc-{tracer}][_rec-{reconstruction}][_res-{res}][_label-{label}][_echo-{echo}][_space-{space}][_desc-{desc}]_{suffix}.{extension|nii.gz}", - "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_trc-{tracer}][_rec-{reconstruction}][_run-{run}]_from-{from}_to-{to}_mode-{mode|image}_{suffix|xfm}.{extension}", - "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_trc-{tracer}][_rec-{reconstruction}][_run-{run}][_part-{part}][_desc-{desc}]_{suffix}.{extension}", - "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_trc-{tracer}][_rec-{reconstruction}][_space-{space}][_res-{res}][_den-{den}][_hemi-{hemi}[_label-{label}][_desc-{desc}]_{suffix<|petref|dseg|mask>}.{extension}" + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_res-{res}][_label-{label}][_echo-{echo}][_space-{space}][_desc-{desc}]_{suffix}.{extension|nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}]_from-{from}_to-{to}_mode-{mode|image}_{suffix|xfm}.{extension}", + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_part-{part}][_desc-{desc}]_{suffix}.{extension}", + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_space-{space}][_res-{res}][_den-{den}][_hemi-{hemi}[_label-{label}][_desc-{desc}]_{suffix<|boldref|petref|dseg|mask>}.{extension}" ] } diff --git a/petprep/utils/bids.py b/petprep/utils/bids.py index a2cfdb8b..83e9f8a5 100644 --- a/petprep/utils/bids.py +++ b/petprep/utils/bids.py @@ -74,7 +74,7 @@ def collect_derivatives( item = layout.get(return_type='filename', **query) if not item: continue - suffix = q.get('suffix', 'petref') + suffix = q.get('suffix', entities.get('suffix', '')) derivs_cache[f'{k}_{suffix}'] = item[0] if len(item) == 1 else item transforms_cache = {} diff --git a/petprep/utils/tests/test_derivative_cache.py b/petprep/utils/tests/test_derivative_cache.py index 826552a6..d0d65501 100644 --- a/petprep/utils/tests/test_derivative_cache.py +++ b/petprep/utils/tests/test_derivative_cache.py @@ -8,17 +8,15 @@ @pytest.mark.parametrize('desc', ['hmc', 'coreg']) def test_baseline_found_as_str(tmp_path: Path, desc: str): subject = '01' - task = 'rest' to_find = tmp_path.joinpath( - f'sub-{subject}', 'pet', f'sub-{subject}_task-{task}_desc-{desc}_petref.nii.gz' + f'sub-{subject}', 'pet', f'sub-{subject}_desc-{desc}_petref.nii.gz' ) to_find.parent.mkdir(parents=True) to_find.touch() entities = { 'subject': subject, - 'task': task, 'suffix': 'pet', 'extension': '.nii.gz', } @@ -30,21 +28,19 @@ def test_baseline_found_as_str(tmp_path: Path, desc: str): @pytest.mark.parametrize('xfm', ['petref2anat', 'hmc']) def test_transforms_found_as_str(tmp_path: Path, xfm: str): subject = '01' - task = 'rest' fromto = { 'hmc': 'from-orig_to-petref', 'petref2anat': 'from-petref_to-anat', }[xfm] to_find = tmp_path.joinpath( - f'sub-{subject}', 'pet', f'sub-{subject}_task-{task}_{fromto}_mode-image_xfm.txt' + f'sub-{subject}', 'pet', f'sub-{subject}_{fromto}_mode-image_xfm.txt' ) to_find.parent.mkdir(parents=True) to_find.touch() entities = { 'subject': subject, - 'task': task, 'suffix': 'pet', 'extension': '.nii.gz', } From a67e99a25ae58b3bdbbfb131b08bd46b19f48093 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 19:41:18 +0200 Subject: [PATCH 24/82] FIX: update test derivative cache --- petprep/data/io_spec.json | 8 ++++---- petprep/utils/tests/test_derivative_cache.py | 1 + 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/petprep/data/io_spec.json b/petprep/data/io_spec.json index 36c23f36..3d7d8df5 100644 --- a/petprep/data/io_spec.json +++ b/petprep/data/io_spec.json @@ -42,9 +42,9 @@ } }, "patterns": [ - "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_res-{res}][_label-{label}][_echo-{echo}][_space-{space}][_desc-{desc}]_{suffix}.{extension|nii.gz}", - "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}]_from-{from}_to-{to}_mode-{mode|image}_{suffix|xfm}.{extension}", - "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_part-{part}][_desc-{desc}]_{suffix}.{extension}", - "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_space-{space}][_res-{res}][_den-{den}][_hemi-{hemi}[_label-{label}][_desc-{desc}]_{suffix<|boldref|petref|dseg|mask>}.{extension}" + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}][_task-{task}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_res-{res}][_label-{label}][_echo-{echo}][_space-{space}][_desc-{desc}]_{suffix}.{extension|nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}][_task-{task}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}]_from-{from}_to-{to}_mode-{mode|image}_{suffix|xfm}.{extension}", + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}][_task-{task}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_part-{part}][_desc-{desc}]_{suffix}.{extension}", + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}][_task-{task}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_space-{space}][_res-{res}][_den-{den}][_hemi-{hemi}[_label-{label}][_desc-{desc}]_{suffix<|boldref|petref|dseg|mask>}.{extension}" ] } diff --git a/petprep/utils/tests/test_derivative_cache.py b/petprep/utils/tests/test_derivative_cache.py index d0d65501..08e0c234 100644 --- a/petprep/utils/tests/test_derivative_cache.py +++ b/petprep/utils/tests/test_derivative_cache.py @@ -19,6 +19,7 @@ def test_baseline_found_as_str(tmp_path: Path, desc: str): 'subject': subject, 'suffix': 'pet', 'extension': '.nii.gz', + 'desc': desc, } derivs = bids.collect_derivatives(derivatives_dir=tmp_path, entities=entities) From fa238a41a296b108a1537b094fcf0593199bbc6b Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 19:54:20 +0200 Subject: [PATCH 25/82] FIX: update test to match filename --- petprep/utils/tests/test_derivative_cache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/petprep/utils/tests/test_derivative_cache.py b/petprep/utils/tests/test_derivative_cache.py index 08e0c234..ba539c1b 100644 --- a/petprep/utils/tests/test_derivative_cache.py +++ b/petprep/utils/tests/test_derivative_cache.py @@ -17,7 +17,7 @@ def test_baseline_found_as_str(tmp_path: Path, desc: str): entities = { 'subject': subject, - 'suffix': 'pet', + 'suffix': 'petref', 'extension': '.nii.gz', 'desc': desc, } From f7f8fdcc17df84c764c332efad96918e990bedf3 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 20:03:05 +0200 Subject: [PATCH 26/82] FIX: remove desc entity --- petprep/utils/tests/test_derivative_cache.py | 1 - 1 file changed, 1 deletion(-) diff --git a/petprep/utils/tests/test_derivative_cache.py b/petprep/utils/tests/test_derivative_cache.py index ba539c1b..85eb3d90 100644 --- a/petprep/utils/tests/test_derivative_cache.py +++ b/petprep/utils/tests/test_derivative_cache.py @@ -19,7 +19,6 @@ def test_baseline_found_as_str(tmp_path: Path, desc: str): 'subject': subject, 'suffix': 'petref', 'extension': '.nii.gz', - 'desc': desc, } derivs = bids.collect_derivatives(derivatives_dir=tmp_path, entities=entities) From f2319516a75c8b55d22069b805112177ad2395c5 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 20:04:09 +0200 Subject: [PATCH 27/82] FIX: add pet suffix to io_spec.json --- petprep/data/io_spec.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/petprep/data/io_spec.json b/petprep/data/io_spec.json index 3d7d8df5..ed67e6ef 100644 --- a/petprep/data/io_spec.json +++ b/petprep/data/io_spec.json @@ -42,7 +42,7 @@ } }, "patterns": [ - "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}][_task-{task}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_res-{res}][_label-{label}][_echo-{echo}][_space-{space}][_desc-{desc}]_{suffix}.{extension|nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}][_task-{task}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_res-{res}][_label-{label}][_echo-{echo}][_space-{space}][_desc-{desc}]_{suffix}.{extension|nii.gz}", "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}][_task-{task}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}]_from-{from}_to-{to}_mode-{mode|image}_{suffix|xfm}.{extension}", "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}][_task-{task}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_part-{part}][_desc-{desc}]_{suffix}.{extension}", "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}][_task-{task}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_space-{space}][_res-{res}][_den-{den}][_hemi-{hemi}[_label-{label}][_desc-{desc}]_{suffix<|boldref|petref|dseg|mask>}.{extension}" From 846efcbd3da6eaae5127e91c5790d81da128424e Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 20:09:32 +0200 Subject: [PATCH 28/82] Update test_derivative_cache.py --- petprep/utils/tests/test_derivative_cache.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/petprep/utils/tests/test_derivative_cache.py b/petprep/utils/tests/test_derivative_cache.py index 85eb3d90..d84b1e3f 100644 --- a/petprep/utils/tests/test_derivative_cache.py +++ b/petprep/utils/tests/test_derivative_cache.py @@ -17,11 +17,13 @@ def test_baseline_found_as_str(tmp_path: Path, desc: str): entities = { 'subject': subject, - 'suffix': 'petref', + 'datatype': 'pet', + 'suffix': 'petref', 'extension': '.nii.gz', } derivs = bids.collect_derivatives(derivatives_dir=tmp_path, entities=entities) + assert dict(derivs) == {f'{desc}_petref': str(to_find), 'transforms': {}} From 9046e47b70c95a6f8d5cc2c5bddd6fcbbc80ffa4 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 20:17:01 +0200 Subject: [PATCH 29/82] FIX: update collect derivatives --- petprep/utils/bids.py | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/petprep/utils/bids.py b/petprep/utils/bids.py index 83e9f8a5..c1471a92 100644 --- a/petprep/utils/bids.py +++ b/petprep/utils/bids.py @@ -40,11 +40,16 @@ @cache -def _get_layout(derivatives_dir: Path) -> BIDSLayout: +def _get_layout(derivatives_dir: Path, patterns: list[str] | None = None) -> BIDSLayout: import niworkflows.data + config_files = [niworkflows.data.load('nipreps.json')] + # explicitly pass patterns if provided return BIDSLayout( - derivatives_dir, config=[niworkflows.data.load('nipreps.json')], validate=False + derivatives_dir, config=config_files, validate=False, + regex_search=True, + derivatives=True, + patterns=patterns if patterns else None ) @@ -54,21 +59,17 @@ def collect_derivatives( spec: dict | None = None, patterns: list[str] | None = None, ): - """Gather existing derivatives and compose a cache.""" if spec is None or patterns is None: _spec, _patterns = tuple( json.loads(load_data.readable('io_spec.json').read_text()).values() ) - if spec is None: - spec = _spec - if patterns is None: - patterns = _patterns + spec = spec or _spec + patterns = patterns or _patterns derivs_cache = defaultdict(list, {}) - layout = _get_layout(derivatives_dir) + layout = _get_layout(derivatives_dir, patterns) # <-- Important fix here! - # search for precomputed references for k, q in spec['baseline'].items(): query = {**entities, **q} item = layout.get(return_type='filename', **query) @@ -79,15 +80,12 @@ def collect_derivatives( transforms_cache = {} for xfm, q in spec['transforms'].items(): - # Transform extension will often not match provided entities - # (e.g., ".nii.gz" vs ".txt"). - # And transform suffixes will be "xfm", - # whereas relevant src file will be "bold". query = {**entities, **q} item = layout.get(return_type='filename', **query) if not item: continue transforms_cache[xfm] = item[0] if len(item) == 1 else item + derivs_cache['transforms'] = transforms_cache return derivs_cache From 78383a2ee02e21a90c56263dd5878a6a94eaf633 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 20:21:01 +0200 Subject: [PATCH 30/82] FIX: update collect derivatives --- petprep/utils/bids.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/petprep/utils/bids.py b/petprep/utils/bids.py index c1471a92..e43553ee 100644 --- a/petprep/utils/bids.py +++ b/petprep/utils/bids.py @@ -40,16 +40,17 @@ @cache -def _get_layout(derivatives_dir: Path, patterns: list[str] | None = None) -> BIDSLayout: +def _get_layout(derivatives_dir: Path, patterns: tuple[str, ...] | None = None) -> BIDSLayout: import niworkflows.data config_files = [niworkflows.data.load('nipreps.json')] - # explicitly pass patterns if provided return BIDSLayout( - derivatives_dir, config=config_files, validate=False, - regex_search=True, + derivatives_dir, + config=config_files, + validate=False, + regex_search=True, derivatives=True, - patterns=patterns if patterns else None + patterns=list(patterns) if patterns else None ) @@ -68,7 +69,7 @@ def collect_derivatives( patterns = patterns or _patterns derivs_cache = defaultdict(list, {}) - layout = _get_layout(derivatives_dir, patterns) # <-- Important fix here! + layout = _get_layout(derivatives_dir, tuple(patterns) if patterns else None) for k, q in spec['baseline'].items(): query = {**entities, **q} From 77f796c2edaf3d09be123261974915cede30ca7d Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 20:24:51 +0200 Subject: [PATCH 31/82] FIX: modify collect derivatives to grab PET files --- petprep/utils/bids.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/petprep/utils/bids.py b/petprep/utils/bids.py index e43553ee..e8ef5d66 100644 --- a/petprep/utils/bids.py +++ b/petprep/utils/bids.py @@ -44,13 +44,14 @@ def _get_layout(derivatives_dir: Path, patterns: tuple[str, ...] | None = None) import niworkflows.data config_files = [niworkflows.data.load('nipreps.json')] + # Pass patterns explicitly to initialization return BIDSLayout( derivatives_dir, config=config_files, validate=False, regex_search=True, derivatives=True, - patterns=list(patterns) if patterns else None + patterns=list(patterns) if patterns else None, ) @@ -69,8 +70,11 @@ def collect_derivatives( patterns = patterns or _patterns derivs_cache = defaultdict(list, {}) + + # Initialize layout correctly with patterns layout = _get_layout(derivatives_dir, tuple(patterns) if patterns else None) + # Now safely query with the correct filters only for k, q in spec['baseline'].items(): query = {**entities, **q} item = layout.get(return_type='filename', **query) From c84f313fb519c80bb7b057255e1e3589760c7af8 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 20:56:29 +0200 Subject: [PATCH 32/82] FIX: get layout and add nipreps.json to data dir --- petprep/data/nipreps.json | 193 ++++++++++++++++++++++++++++++++++++++ petprep/utils/bids.py | 37 ++++---- 2 files changed, 209 insertions(+), 21 deletions(-) create mode 100644 petprep/data/nipreps.json diff --git a/petprep/data/nipreps.json b/petprep/data/nipreps.json new file mode 100644 index 00000000..2e28de20 --- /dev/null +++ b/petprep/data/nipreps.json @@ -0,0 +1,193 @@ +{ + "name": "nipreps", + "entities": [ + { + "name": "subject", + "pattern": "[/\\\\]+sub-([a-zA-Z0-9+]+)", + "directory": "{subject}" + }, + { + "name": "session", + "pattern": "[_/\\\\]+ses-([a-zA-Z0-9+]+)", + "mandatory": false, + "directory": "{subject}{session}" + }, + { + "name": "task", + "pattern": "[_/\\\\]+task-([a-zA-Z0-9+]+)" + }, + { + "name": "acquisition", + "pattern": "[_/\\\\]+acq-([a-zA-Z0-9+]+)" + }, + { + "name": "ceagent", + "pattern": "[_/\\\\]+ce-([a-zA-Z0-9+]+)" + }, + { + "name": "reconstruction", + "pattern": "[_/\\\\]+rec-([a-zA-Z0-9+]+)" + }, + { + "name": "direction", + "pattern": "[_/\\\\]+dir-([a-zA-Z0-9+]+)" + }, + { + "name": "run", + "pattern": "[_/\\\\]+run-(\\d+)", + "dtype": "int" + }, + { + "name": "proc", + "pattern": "[_/\\\\]+proc-([a-zA-Z0-9+]+)" + }, + { + "name": "modality", + "pattern": "[_/\\\\]+mod-([a-zA-Z0-9+]+)" + }, + { + "name": "echo", + "pattern": "[_/\\\\]+echo-([0-9]+)" + }, + { + "name": "flip", + "pattern": "[_/\\\\]+flip-([0-9]+)" + }, + { + "name": "inv", + "pattern": "[_/\\\\]+inv-([0-9]+)" + }, + { + "name": "mt", + "pattern": "[_/\\\\]+mt-(on|off)" + }, + { + "name": "part", + "pattern": "[_/\\\\]+part-(mag|phase|real|imag)" + }, + { + "name": "recording", + "pattern": "[_/\\\\]+recording-([a-zA-Z0-9+]+)" + }, + { + "name": "space", + "pattern": "[_/\\\\]+space-([a-zA-Z0-9+]+)" + }, + { + "name": "suffix", + "pattern": "[._]*([a-zA-Z0-9]*?)\\.[^/\\\\]+$" + }, + { + "name": "scans", + "pattern": "(.*\\_scans.tsv)$" + }, + { + "name": "fmap", + "pattern": "(phasediff|magnitude[1-2]|phase[1-2]|fieldmap|epi)\\.nii" + }, + { + "name": "datatype", + "pattern": "[/\\\\]+(func|anat|pet|fmap|dwi|meg|eeg|perf|figures)[/\\\\]+" + }, + { + "name": "extension", + "pattern": "[._]*[a-zA-Z0-9]*?(\\.[^/\\\\]+)$" + }, + { + "name": "atlas", + "pattern": "[_/\\\\]+atlas-([a-zA-Z0-9+]+)" + }, + { + "name": "roi", + "pattern": "[_/\\\\]+roi-([a-zA-Z0-9+]+)" + }, + { + "name": "label", + "pattern": "[_/\\\\]+label-([a-zA-Z0-9+]+)" + }, + { + "name": "fmapid", + "pattern": "[_/\\\\]+fmapid-([a-zA-Z0-9+]+)" + }, + { + "name": "desc", + "pattern": "[_/\\\\]+desc-([a-zA-Z0-9+]+)" + }, + { + "name": "from", + "pattern": "(?:^|_)from-([a-zA-Z0-9+]+).*xfm" + }, + { + "name": "to", + "pattern": "(?:^|_)to-([a-zA-Z0-9+]+).*xfm" + }, + { + "name": "mode", + "pattern": "(?:^|_)mode-(image|points).*xfm" + }, + { + "name": "hemi", + "pattern": "hemi-(L|R)" + }, + { + "name": "model", + "pattern": "model-([a-zA-Z0-9+]+)" + }, + { + "name": "subset", + "pattern": "subset-([a-zA-Z0-9+]+)" + }, + { + "name": "resolution", + "pattern": "res-([a-zA-Z0-9+]+)" + }, + { + "name": "density", + "pattern": "res-([a-zA-Z0-9+]+)" + }, + { + "name": "cohort", + "pattern": "[_/\\\\]+cohort-0*(\\d+)", + "dtype": "int" + } + ], + "default_path_patterns": [ + "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_space-{space}][_cohort-{cohort}][_res-{resolution}][_desc-{desc}]_{suffix}{extension<.nii|.nii.gz|.json>|.nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_hemi-{hemi}]_from-{from}_to-{to}_mode-{mode|image}_{suffix|xfm}{extension<.txt|.h5>}", + "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}]_hemi-{hemi}[_space-{space}][_cohort-{cohort}][_den-{density}][_desc-{desc}]_{suffix}{extension<.surf.gii|.shape.gii>}", + "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_space-{space}][_cohort-{cohort}][_den-{density}][_desc-{desc}]_{suffix}{extension<.dscalar.nii|.json>}", + "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_space-{space}][_cohort-{cohort}][_res-{resolution}]_desc-{desc}_{suffix|mask}{extension<.nii|.nii.gz|.json>|.nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_space-{space}][_cohort-{cohort}][_res-{resolution}]_label-{label}[_desc-{desc}]_{suffix|probseg}{extension<.nii|.nii.gz|.json>|.nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_echo-{echo}][_part-{part}][_space-{space}][_cohort-{cohort}][_res-{resolution}][_desc-{desc}]_{suffix}{extension<.nii|.nii.gz|.json>|.nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_echo-{echo}][_space-{space}][_cohort-{cohort}][_res-{resolution}][_desc-{desc}]_{suffix}{extension<.nii|.nii.gz|.json>|.nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_hemi-{hemi}]_from-{from}_to-{to}_mode-{mode|image}[_desc-{desc}]_{suffix|xfm}{extension<.txt|.h5>}", + "sub-{subject}[/ses-{session}]/{datatype|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_echo-{echo}][_part-{part}][_space-{space}][_cohort-{cohort}][_res-{resolution}]_desc-{desc}_{suffix|mask}{extension<.nii|.nii.gz|.json>|.nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_echo-{echo}][_part-{part}][_space-{space}][_cohort-{cohort}][_desc-{desc}]_{suffix|AROMAnoiseICs}{extension<.csv|.tsv>|.csv}", + "sub-{subject}[/ses-{session}]/{datatype|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_echo-{echo}][_part-{part}][_space-{space}][_cohort-{cohort}][_desc-{desc}]_{suffix|timeseries}{extension<.json|.tsv>|.tsv}", + "sub-{subject}[/ses-{session}]/{datatype|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_echo-{echo}][_part-{part}][_space-{space}][_cohort-{cohort}][_desc-{desc}]_{suffix|components}{extension<.json|.tsv|.nii|.nii.gz>|.tsv}", + "sub-{subject}[/ses-{session}]/{datatype|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_echo-{echo}][_part-{part}][_space-{space}][_cohort-{cohort}][_desc-{desc}]_{suffix|decomposition}{extension<.json>|.json}", + "sub-{subject}[/ses-{session}]/{datatype|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_echo-{echo}][_hemi-{hemi}][_space-{space}][_cohort-{cohort}][_den-{density}][_desc-{desc}]_{suffix}{extension<.dtseries.nii|.dtseries.json|.func.gii|.func.json>}", + "sub-{subject}[/ses-{session}]/{datatype}/sub-{subject}[_ses-{session}][_task-{task}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_space-{space}][_cohort-{cohort}][_res-{resolution}][_desc-{desc}]_{suffix}{extension<.nii|.nii.gz|.json>|.nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|dwi}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_space-{space}][_cohort-{cohort}][_res-{resolution}][_desc-{desc}]_{suffix}{extension<.json|.nii.gz|.nii>|.nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|dwi}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_space-{space}][_cohort-{cohort}][_res-{resolution}]_desc-{desc}_{suffix}{extension<.json|.nii.gz|.nii>|.nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|dwi}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_space-{space}][_cohort-{cohort}][_res-{resolution}][_desc-{desc}]_{suffix}{extension<.tsv|.bval|.bvec|.b>|.tsv}", + "sub-{subject}[/ses-{session}]/{datatype|dwi}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_rec-{reconstruction}][_dir-{direction}][_run-{run}]_from-{from}_to-{to}_mode-{mode|image}[_desc-{desc}]_{suffix|xfm}{extension<.txt|.h5>}", + "sub-{subject}[/ses-{session}]/{datatype|perf}/sub-{subject}[_ses-{session}][_task-{task}][_acq-{acquisition}][_rec-{reconstruction}][_dir-{direction}][_run-{run}]_{suffix}{extension<.tsv|.json>|.tsv}", + "sub-{subject}[/ses-{session}]/{datatype|perf}/sub-{subject}[_ses-{session}][_task-{task}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_dir-{direction}][_run-{run}]_from-{from}_to-{to}_mode-{mode|image}_{suffix|xfm}{extension<.txt|.h5>}", + "sub-{subject}[/ses-{session}]/{datatype|perf}/sub-{subject}[_ses-{session}][_task-{task}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_space-{space}][_atlas-{atlas}][_cohort-{cohort}][_desc-{desc}]_{suffix}{extension<.json|.tsv>|.tsv}", + "sub-{subject}[/ses-{session}]/{datatype|perf}/sub-{subject}[_ses-{session}][_task-{task}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_space-{space}][_atlas-{atlas}][_cohort-{cohort}][_desc-{desc}]_{suffix}{extension<.nii|.nii.gz|.json|.tsv>|.tsv}", + "sub-{subject}[/ses-{session}]/{datatype|fmap}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_dir-{direction}][_run-{run}][_part-{part}][_space-{space}][_cohort-{cohort}][_res-{resolution}][_fmapid-{fmapid}][_desc-{desc}]_{suffix}{extension<.nii|.nii.gz|.json>|.nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_space-{space}][_cohort-{cohort}][_res-{resolution}][_desc-{desc}]_{suffix}{extension<.nii|.nii.gz|.json>|.nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_hemi-{hemi}]_from-{from}_to-{to}_mode-{mode|image}_{suffix|xfm}{extension<.txt|.h5>}", + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}]_hemi-{hemi}[_space-{space}][_cohort-{cohort}][_den-{density}][_desc-{desc}]_{suffix}{extension<.surf.gii|.shape.gii>}", + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_space-{space}][_cohort-{cohort}][_den-{density}][_desc-{desc}]_{suffix}{extension<.dscalar.nii|.json>}", + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_space-{space}][_cohort-{cohort}][_res-{resolution}]_desc-{desc}_{suffix|mask}{extension<.nii|.nii.gz|.json>|.nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_space-{space}][_cohort-{cohort}][_res-{resolution}]_label-{label}[_desc-{desc}]_{suffix|probseg}{extension<.nii|.nii.gz|.json>|.nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|pet}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_part-{part}][_space-{space}][_atlas-{atlas}][_cohort-{cohort}][_desc-{desc}]_{suffix|timeseries}{extension<.json|.tsv>|.tsv}", + "sub-{subject}/{datatype}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_space-{space}][_cohort-{cohort}][_desc-{desc}]_{suffix}{extension<.html|.svg>|.svg}", + "sub-{subject}/{datatype}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_space-{space}][_cohort-{cohort}][_fmapid-{fmapid}][_desc-{desc}]_{suffix}{extension<.html|.svg>|.svg}", + "sub-{subject}/{datatype}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_echo-{echo}][_part-{part}][_space-{space}][_cohort-{cohort}][_desc-{desc}]_{suffix}{extension<.html|.svg>|.svg}", + "sub-{subject}/{datatype}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_dir-{direction}][_run-{run}][_echo-{echo}][_part-{part}][_space-{space}][_cohort-{cohort}][_desc-{desc}]_{suffix}{extension<.html|.svg>|.svg}", + "sub-{subject}/{datatype}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_rec-{reconstruction}][_run-{run}][_space-{space}][_cohort-{cohort}][_desc-{desc}]_{suffix}{extension<.html|.svg|.png>|.html}" + ] +} diff --git a/petprep/utils/bids.py b/petprep/utils/bids.py index e8ef5d66..5dfb44d8 100644 --- a/petprep/utils/bids.py +++ b/petprep/utils/bids.py @@ -40,18 +40,11 @@ @cache -def _get_layout(derivatives_dir: Path, patterns: tuple[str, ...] | None = None) -> BIDSLayout: - import niworkflows.data - config_files = [niworkflows.data.load('nipreps.json')] +def _get_layout(derivatives_dir: Path) -> BIDSLayout: + from petprep.data import load as load_data - # Pass patterns explicitly to initialization return BIDSLayout( - derivatives_dir, - config=config_files, - validate=False, - regex_search=True, - derivatives=True, - patterns=list(patterns) if patterns else None, + derivatives_dir, config=[load_data('nipreps.json')], validate=False ) @@ -61,36 +54,39 @@ def collect_derivatives( spec: dict | None = None, patterns: list[str] | None = None, ): + """Gather existing derivatives and compose a cache.""" if spec is None or patterns is None: _spec, _patterns = tuple( json.loads(load_data.readable('io_spec.json').read_text()).values() ) - spec = spec or _spec - patterns = patterns or _patterns + if spec is None: + spec = _spec + if patterns is None: + patterns = _patterns derivs_cache = defaultdict(list, {}) - - # Initialize layout correctly with patterns - layout = _get_layout(derivatives_dir, tuple(patterns) if patterns else None) + layout = _get_layout(derivatives_dir) - # Now safely query with the correct filters only + # search for both petrefs for k, q in spec['baseline'].items(): query = {**entities, **q} item = layout.get(return_type='filename', **query) if not item: continue - suffix = q.get('suffix', entities.get('suffix', '')) - derivs_cache[f'{k}_{suffix}'] = item[0] if len(item) == 1 else item + derivs_cache[f'{k}_petref'] = item[0] if len(item) == 1 else item transforms_cache = {} for xfm, q in spec['transforms'].items(): + # Transform extension will often not match provided entities + # (e.g., ".nii.gz" vs ".txt"). + # And transform suffixes will be "xfm", + # whereas relevant src file will be "bold". query = {**entities, **q} item = layout.get(return_type='filename', **query) if not item: continue transforms_cache[xfm] = item[0] if len(item) == 1 else item - derivs_cache['transforms'] = transforms_cache return derivs_cache @@ -103,9 +99,8 @@ def write_bidsignore(deriv_dir): '*_xfm.*', # Unspecified transform files '*.surf.gii', # Unspecified structural outputs # Unspecified functional outputs - '*_boldref.nii.gz', '*_petref.nii.gz', - '*_bold.func.gii', + '*_pet.pet.gii', '*_mixing.tsv', '*_timeseries.tsv', ) From 83ae00e28c632f8215d596b189dbbeba61caed77 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 21:08:10 +0200 Subject: [PATCH 33/82] fix estimate_pet_mem_usage --- petprep/workflows/pet/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/petprep/workflows/pet/base.py b/petprep/workflows/pet/base.py index 0725bd4e..747782d3 100644 --- a/petprep/workflows/pet/base.py +++ b/petprep/workflows/pet/base.py @@ -156,7 +156,7 @@ def init_pet_wf( omp_nthreads = config.nipype.omp_nthreads all_metadata = [config.execution.layout.get_metadata(file) for file in pet_series] - nvols, mem_gb = estimate_pet_mem_usage(pet_file) + nvols, mem_gb = estimate_pet_mem_usage(pet_file[0]) if nvols <= 5 - config.execution.sloppy: config.loggers.workflow.warning( f'Too short PET series (<= 5 timepoints). Skipping processing of <{pet_file}>.' From 5342188ac88b897a08690d775a23eb328223deb5 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Wed, 4 Jun 2025 21:13:35 +0200 Subject: [PATCH 34/82] FIX: handle a single filename --- petprep/workflows/pet/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/petprep/workflows/pet/base.py b/petprep/workflows/pet/base.py index 747782d3..c6535206 100644 --- a/petprep/workflows/pet/base.py +++ b/petprep/workflows/pet/base.py @@ -173,7 +173,7 @@ def init_pet_wf( mem_gb['largemem'], ) - workflow = Workflow(name=_get_wf_name(pet_file, 'pet')) + workflow = Workflow(name=_get_wf_name(pet_file[0], 'pet')) workflow.__postdesc__ = """\ All resamplings can be performed with *a single interpolation step* by composing all the pertinent transformations (i.e. head-motion From cdc0de13f8488eed76ba07a5f00266718c06214c Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 09:54:48 +0200 Subject: [PATCH 35/82] FIX: update base workflow to distinguish between pet_series and pet_file --- petprep/workflows/pet/base.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/petprep/workflows/pet/base.py b/petprep/workflows/pet/base.py index c6535206..34d183dd 100644 --- a/petprep/workflows/pet/base.py +++ b/petprep/workflows/pet/base.py @@ -75,7 +75,7 @@ def init_pet_wf( Parameters ---------- - pet_file + pet_series List of paths to NIfTI files. precomputed Dictionary containing precomputed derivatives to reuse, if possible. @@ -150,13 +150,13 @@ def init_pet_wf( if precomputed is None: precomputed = {} - pet_file = pet_series + pet_file = pet_series[0] petprep_dir = config.execution.petprep_dir omp_nthreads = config.nipype.omp_nthreads all_metadata = [config.execution.layout.get_metadata(file) for file in pet_series] - nvols, mem_gb = estimate_pet_mem_usage(pet_file[0]) + nvols, mem_gb = estimate_pet_mem_usage(pet_file) if nvols <= 5 - config.execution.sloppy: config.loggers.workflow.warning( f'Too short PET series (<= 5 timepoints). Skipping processing of <{pet_file}>.' @@ -173,7 +173,7 @@ def init_pet_wf( mem_gb['largemem'], ) - workflow = Workflow(name=_get_wf_name(pet_file[0], 'pet')) + workflow = Workflow(name=_get_wf_name(pet_file, 'pet')) workflow.__postdesc__ = """\ All resamplings can be performed with *a single interpolation step* by composing all the pertinent transformations (i.e. head-motion @@ -224,7 +224,7 @@ def init_pet_wf( # pet_fit_wf = init_pet_fit_wf( - pet_file=pet_file, + pet_series=pet_series, precomputed=precomputed, omp_nthreads=omp_nthreads, ) @@ -254,7 +254,7 @@ def init_pet_wf( # pet_native_wf = init_pet_native_wf( - pet_file=pet_file, + pet_series=pet_series, omp_nthreads=omp_nthreads, ) @@ -294,6 +294,7 @@ def init_pet_wf( # Resample to anatomical space pet_anat_wf = init_pet_volumetric_resample_wf( + metadata=all_metadata[0], omp_nthreads=omp_nthreads, mem_gb=mem_gb, name='pet_anat_wf', @@ -354,7 +355,7 @@ def init_pet_wf( metadata=all_metadata[0], name='ds_pet_std_wf', ) - ds_pet_std_wf.inputs.inputnode.source_files = pet_file + ds_pet_std_wf.inputs.inputnode.source_files = pet_series workflow.connect([ (inputnode, pet_std_wf, [ From 210806a5b545cefd298b1e40a468c3d716d990dd Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 10:09:53 +0200 Subject: [PATCH 36/82] FIX: update fit to match pet_series and pet_file --- petprep/workflows/pet/fit.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/petprep/workflows/pet/fit.py b/petprep/workflows/pet/fit.py index b06360e9..d1b516e6 100644 --- a/petprep/workflows/pet/fit.py +++ b/petprep/workflows/pet/fit.py @@ -48,7 +48,7 @@ def init_pet_fit_wf( *, - pet_file: str, + pet_series: list[str], precomputed: dict = None, omp_nthreads: int = 1, name: str = 'pet_fit_wf', @@ -128,6 +128,8 @@ def init_pet_fit_wf( precomputed = {} layout = config.execution.layout + pet_file = pet_series[0] + # Get metadata from PET file(s) metadata = layout.get_metadata(pet_file) orientation = ''.join(nb.aff2axcodes(nb.load(pet_file).affine)) @@ -159,7 +161,7 @@ def init_pet_fit_wf( ), name='inputnode', ) - inputnode.inputs.pet_file = pet_file + inputnode.inputs.pet_file = pet_series outputnode = pe.Node( niu.IdentityInterface( @@ -402,7 +404,7 @@ def init_pet_fit_wf( def init_pet_native_wf( *, - pet_file: str, + pet_series: list[str], omp_nthreads: int = 1, name: str = 'pet_native_wf', ) -> pe.Workflow: @@ -428,8 +430,8 @@ def init_pet_native_wf( Parameters ---------- - pet_file - Path to NIfTI file. + pet_series + List of paths to NIfTI files. Inputs ------ @@ -457,7 +459,10 @@ def init_pet_native_wf( layout = config.execution.layout - metadata = layout.get_metadata(pet_file) + all_metadata = [layout.get_metadata(pet_file) for pet_file in pet_series] + + pet_file = pet_series[0] + metadata = all_metadata[0] _, mem_gb = estimate_pet_mem_usage(pet_file) @@ -495,7 +500,7 @@ def init_pet_native_wf( # The Select interface requires an index to choose from ``inlist``. Since # ``pet_file`` is a single path, explicitly set the index to ``0`` to avoid # missing mandatory input errors when the node runs. - pet_source = pe.Node(niu.Select(inlist=[pet_file], index=0), name='pet_source') + pet_source = pe.Node(niu.Select(inlist=pet_series, index=0), name='pet_source') validate_pet = pe.Node(ValidateImage(), name='validate_pet') workflow.connect([ (pet_source, validate_pet, [('out', 'in_file')]), From 3ae5399b5ef8e8a0e660ebeecd7fe39f526613ce Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 10:15:23 +0200 Subject: [PATCH 37/82] FIX: allow parsing of metadata to apply --- petprep/workflows/pet/apply.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/petprep/workflows/pet/apply.py b/petprep/workflows/pet/apply.py index ba66ab62..c50eb215 100644 --- a/petprep/workflows/pet/apply.py +++ b/petprep/workflows/pet/apply.py @@ -9,6 +9,7 @@ def init_pet_volumetric_resample_wf( *, + metadata: dict, mem_gb: dict[str, float], omp_nthreads: int = 1, name: str = 'pet_volumetric_resample_wf', @@ -27,6 +28,8 @@ def init_pet_volumetric_resample_wf( Parameters ---------- + metadata + BIDS metadata for PET file. omp_nthreads Maximum number of threads an individual process may use. name From e4a33a95ff2feaac7902a9a5c590795bc5d6e9fc Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 10:19:41 +0200 Subject: [PATCH 38/82] FIX: remove metadata parsing in init_pet_volumetric_resample_wf --- petprep/workflows/pet/apply.py | 1 - petprep/workflows/pet/base.py | 1 - 2 files changed, 2 deletions(-) diff --git a/petprep/workflows/pet/apply.py b/petprep/workflows/pet/apply.py index c50eb215..747a7260 100644 --- a/petprep/workflows/pet/apply.py +++ b/petprep/workflows/pet/apply.py @@ -9,7 +9,6 @@ def init_pet_volumetric_resample_wf( *, - metadata: dict, mem_gb: dict[str, float], omp_nthreads: int = 1, name: str = 'pet_volumetric_resample_wf', diff --git a/petprep/workflows/pet/base.py b/petprep/workflows/pet/base.py index 34d183dd..53bfe2b2 100644 --- a/petprep/workflows/pet/base.py +++ b/petprep/workflows/pet/base.py @@ -294,7 +294,6 @@ def init_pet_wf( # Resample to anatomical space pet_anat_wf = init_pet_volumetric_resample_wf( - metadata=all_metadata[0], omp_nthreads=omp_nthreads, mem_gb=mem_gb, name='pet_anat_wf', From 4e5be1b678723ed46158592dc283a38423706394 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 10:29:11 +0200 Subject: [PATCH 39/82] Update test_base.py --- petprep/workflows/pet/tests/test_base.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/petprep/workflows/pet/tests/test_base.py b/petprep/workflows/pet/tests/test_base.py index 1d5377fe..e2bbfaa5 100644 --- a/petprep/workflows/pet/tests/test_base.py +++ b/petprep/workflows/pet/tests/test_base.py @@ -50,10 +50,10 @@ def test_pet_wf( img = nb.Nifti1Image(np.zeros((10, 10, 10, 10)), np.eye(4)) - if task == 'rest': - pet_series = [ - str(bids_root / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz'), - ] + + pet_series = [ + str(bids_root / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz'), + ] # The workflow will attempt to read file headers From 4758d05277eb98f39d46590c09ea0e7ca5515b75 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 10:31:58 +0200 Subject: [PATCH 40/82] FIX: update import in resampling to be petprep --- petprep/workflows/pet/resampling.py | 2 +- petprep/workflows/pet/tests/test_base.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/petprep/workflows/pet/resampling.py b/petprep/workflows/pet/resampling.py index a63f9356..b36d663d 100644 --- a/petprep/workflows/pet/resampling.py +++ b/petprep/workflows/pet/resampling.py @@ -112,7 +112,7 @@ def init_pet_surf_wf( from niworkflows.interfaces.nitransforms import ConcatenateXFMs from niworkflows.interfaces.surf import GiftiSetAnatomicalStructure - from fmriprep.interfaces import DerivativesDataSink + from petprep.interfaces import DerivativesDataSink timing_parameters = prepare_timing_parameters(metadata) diff --git a/petprep/workflows/pet/tests/test_base.py b/petprep/workflows/pet/tests/test_base.py index e2bbfaa5..1d5377fe 100644 --- a/petprep/workflows/pet/tests/test_base.py +++ b/petprep/workflows/pet/tests/test_base.py @@ -50,10 +50,10 @@ def test_pet_wf( img = nb.Nifti1Image(np.zeros((10, 10, 10, 10)), np.eye(4)) - - pet_series = [ - str(bids_root / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz'), - ] + if task == 'rest': + pet_series = [ + str(bids_root / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz'), + ] # The workflow will attempt to read file headers From 32023b2ba17d963fb51defe9e0d1f08149254f74 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 10:41:23 +0200 Subject: [PATCH 41/82] FIX: update test_fit to match pet_series --- petprep/workflows/pet/tests/test_fit.py | 46 ++++++++++++++++++------- 1 file changed, 33 insertions(+), 13 deletions(-) diff --git a/petprep/workflows/pet/tests/test_fit.py b/petprep/workflows/pet/tests/test_fit.py index 96e8e7c1..09ad2d06 100644 --- a/petprep/workflows/pet/tests/test_fit.py +++ b/petprep/workflows/pet/tests/test_fit.py @@ -74,10 +74,13 @@ def test_pet_fit_precomputes( img = nb.Nifti1Image(np.zeros((10, 10, 10, 10)), np.eye(4)) if task == 'rest': - pet_file = str(bids_root / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz') + pet_series = [ + str(bids_root / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz') + ] # The workflow will attempt to read file headers - img.to_filename(pet_file) + for path in pet_series: + img.to_filename(path) dummy_nifti = str(tmp_path / 'dummy.nii') dummy_affine = str(tmp_path / 'dummy.txt') @@ -95,7 +98,7 @@ def test_pet_fit_precomputes( with mock_config(bids_dir=bids_root): wf = init_pet_fit_wf( - pet_file=pet_file, + pet_series=pet_series, precomputed=precomputed, omp_nthreads=1, ) @@ -104,7 +107,7 @@ def test_pet_fit_precomputes( generate_expanded_graph(flatgraph) -@pytest.mark.parametrize('task', ['rest', 'nback']) +@pytest.mark.parametrize('task', ['rest']) @pytest.mark.parametrize('run_stc', [True, False]) def test_pet_native_precomputes( bids_root: Path, @@ -120,15 +123,18 @@ def test_pet_native_precomputes( img = nb.Nifti1Image(np.zeros((10, 10, 10, 10)), np.eye(4)) if task == 'rest': - pet_file = str(bids_root / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz') + pet_series = [ + str(bids_root / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz') + ] # The workflow will attempt to read file headers - img.to_filename(pet_file) + for path in pet_series: + img.to_filename(path) with mock_config(bids_dir=bids_root): config.workflow.ignore = ['slicetiming'] if not run_stc else [] wf = init_pet_native_wf( - pet_file=pet_file, + pet_series=pet_series, omp_nthreads=1, ) @@ -138,12 +144,19 @@ def test_pet_native_precomputes( def test_pet_fit_mask_connections(bids_root: Path, tmp_path: Path): """Ensure the PET mask is generated and connected correctly.""" - pet_file = str(bids_root / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz') + pet_series = [ + str(bids_root / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz') + ] img = nb.Nifti1Image(np.zeros((2, 2, 2, 1)), np.eye(4)) - img.to_filename(pet_file) + + for path in pet_series: + img.to_filename(path) with mock_config(bids_dir=bids_root): - wf = init_pet_fit_wf(pet_file=pet_file, precomputed={}, omp_nthreads=1) + wf = init_pet_fit_wf( + pet_series=pet_series, + precomputed={}, + omp_nthreads=1) assert 'merge_mask' in wf.list_node_names() assert 'ds_petmask_wf.ds_petmask' in wf.list_node_names() @@ -158,12 +171,19 @@ def test_pet_fit_mask_connections(bids_root: Path, tmp_path: Path): def test_petref_report_connections(bids_root: Path, tmp_path: Path): """Ensure the PET reference is passed to the reports workflow.""" - pet_file = str(bids_root / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz') + pet_series = [ + str(bids_root / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz') + ] img = nb.Nifti1Image(np.zeros((2, 2, 2, 1)), np.eye(4)) - img.to_filename(pet_file) + + for path in pet_series: + img.to_filename(path) with mock_config(bids_dir=bids_root): - wf = init_pet_fit_wf(pet_file=pet_file, precomputed={}, omp_nthreads=1) + wf = init_pet_fit_wf( + pet_series=pet_series, + precomputed={}, + mp_nthreads=1) petref_buffer = wf.get_node('petref_buffer') edge = wf._graph.get_edge_data(petref_buffer, wf.get_node('func_fit_reports_wf')) From f183be7a7df068afd144dee1a1936fcfae305fec Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 10:44:52 +0200 Subject: [PATCH 42/82] FIX: fix typo --- petprep/workflows/pet/tests/test_fit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/petprep/workflows/pet/tests/test_fit.py b/petprep/workflows/pet/tests/test_fit.py index 09ad2d06..2c731e59 100644 --- a/petprep/workflows/pet/tests/test_fit.py +++ b/petprep/workflows/pet/tests/test_fit.py @@ -183,7 +183,7 @@ def test_petref_report_connections(bids_root: Path, tmp_path: Path): wf = init_pet_fit_wf( pet_series=pet_series, precomputed={}, - mp_nthreads=1) + omp_nthreads=1) petref_buffer = wf.get_node('petref_buffer') edge = wf._graph.get_edge_data(petref_buffer, wf.get_node('func_fit_reports_wf')) From 88a3699ae0f35718564631eac9a42cfabfae4068 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 10:52:33 +0200 Subject: [PATCH 43/82] FIX: update mask test --- petprep/workflows/pet/tests/test_pet_mask.py | 28 ++++++++++++++------ 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/petprep/workflows/pet/tests/test_pet_mask.py b/petprep/workflows/pet/tests/test_pet_mask.py index b135f2c8..d2199387 100644 --- a/petprep/workflows/pet/tests/test_pet_mask.py +++ b/petprep/workflows/pet/tests/test_pet_mask.py @@ -6,16 +6,28 @@ from ...tests.test_base import BASE_LAYOUT from ..base import init_pet_wf - -def test_pet_mask_flow(tmp_path): - bids_dir = tmp_path / 'bids' +@pytest.fixture(scope='module') +def bids_root(tmp_path_factory): + base = tmp_path_factory.mktemp('petfit') + bids_dir = base / 'bids' generate_bids_skeleton(bids_dir, BASE_LAYOUT) - img = nb.Nifti1Image(np.zeros((2, 2, 2, 10)), np.eye(4)) - pet_file = bids_dir / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz' - img.to_filename(pet_file) + return bids_dir + + +def test_pet_mask_flow(bids_root: Path, tmp_path: Path): + pet_series = [ + str(bids_root / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz') + ] + img = nb.Nifti1Image(np.zeros((2, 2, 2, 1)), np.eye(4)) + + for path in pet_series: + img.to_filename(path) - with mock_config(bids_dir=bids_dir): - wf = init_pet_wf(pet_series=str(pet_file), precomputed={}) + with mock_config(bids_dir=bids_root): + wf = init_pet_wf( + pet_series=pet_series, + precomputed={} + ) edge = wf._graph.get_edge_data( wf.get_node('pet_fit_wf'), wf.get_node('pet_confounds_wf') From ee573a423e02ee5694d40b0662cc5c09277c6fc6 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 10:55:12 +0200 Subject: [PATCH 44/82] FIX: add pytest and Path to mask test --- petprep/workflows/pet/tests/test_pet_mask.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/petprep/workflows/pet/tests/test_pet_mask.py b/petprep/workflows/pet/tests/test_pet_mask.py index d2199387..0afef05a 100644 --- a/petprep/workflows/pet/tests/test_pet_mask.py +++ b/petprep/workflows/pet/tests/test_pet_mask.py @@ -1,6 +1,9 @@ +from pathlib import Path + import nibabel as nb import numpy as np from niworkflows.utils.testing import generate_bids_skeleton +import pytest from ...tests import mock_config from ...tests.test_base import BASE_LAYOUT From 980cec161766982e11f4f645a15cff26bf7f859b Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 11:01:36 +0200 Subject: [PATCH 45/82] FIX: change threshold for n pet_series volumes --- petprep/workflows/pet/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/petprep/workflows/pet/base.py b/petprep/workflows/pet/base.py index 53bfe2b2..92286d15 100644 --- a/petprep/workflows/pet/base.py +++ b/petprep/workflows/pet/base.py @@ -157,7 +157,7 @@ def init_pet_wf( all_metadata = [config.execution.layout.get_metadata(file) for file in pet_series] nvols, mem_gb = estimate_pet_mem_usage(pet_file) - if nvols <= 5 - config.execution.sloppy: + if nvols <= 1 - config.execution.sloppy: config.loggers.workflow.warning( f'Too short PET series (<= 5 timepoints). Skipping processing of <{pet_file}>.' ) From bd00e6d9675a9c14c437208da746a88155775aaf Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 11:01:48 +0200 Subject: [PATCH 46/82] FIX: update data in mask test --- petprep/workflows/pet/tests/test_pet_mask.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/petprep/workflows/pet/tests/test_pet_mask.py b/petprep/workflows/pet/tests/test_pet_mask.py index 0afef05a..b952bbbf 100644 --- a/petprep/workflows/pet/tests/test_pet_mask.py +++ b/petprep/workflows/pet/tests/test_pet_mask.py @@ -21,7 +21,7 @@ def test_pet_mask_flow(bids_root: Path, tmp_path: Path): pet_series = [ str(bids_root / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz') ] - img = nb.Nifti1Image(np.zeros((2, 2, 2, 1)), np.eye(4)) + img = nb.Nifti1Image(np.zeros((2, 2, 2, 5)), np.eye(4)) for path in pet_series: img.to_filename(path) From 570330ae71ba7686348a5cf2595feab133c40b76 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 11:05:59 +0200 Subject: [PATCH 47/82] FIX: update mask test --- petprep/workflows/pet/tests/test_pet_mask.py | 27 +++++++++++++------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/petprep/workflows/pet/tests/test_pet_mask.py b/petprep/workflows/pet/tests/test_pet_mask.py index b952bbbf..019ffa9c 100644 --- a/petprep/workflows/pet/tests/test_pet_mask.py +++ b/petprep/workflows/pet/tests/test_pet_mask.py @@ -21,7 +21,7 @@ def test_pet_mask_flow(bids_root: Path, tmp_path: Path): pet_series = [ str(bids_root / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz') ] - img = nb.Nifti1Image(np.zeros((2, 2, 2, 5)), np.eye(4)) + img = nb.Nifti1Image(np.zeros((2, 2, 2, 10)), np.eye(4)) for path in pet_series: img.to_filename(path) @@ -30,15 +30,24 @@ def test_pet_mask_flow(bids_root: Path, tmp_path: Path): wf = init_pet_wf( pet_series=pet_series, precomputed={} - ) + ) - edge = wf._graph.get_edge_data( - wf.get_node('pet_fit_wf'), wf.get_node('pet_confounds_wf') - ) - assert ('pet_mask', 'inputnode.pet_mask') in edge['connect'] + assert wf is not None, "Workflow was not initialized." + + pet_fit_node = wf.get_node('pet_fit_wf') + pet_confounds_node = wf.get_node('pet_confounds_wf') + + assert pet_fit_node is not None, "pet_fit_wf node missing" + assert pet_confounds_node is not None, "pet_confounds_wf node missing" + + edge = wf._graph.get_edge_data(pet_fit_node, pet_confounds_node) + assert edge is not None, "Edge missing between pet_fit_wf and pet_confounds_wf" + + # Correct assertion: + assert ('outputnode.pet_mask', 'inputnode.pet_mask') in edge['connect'] - conf_wf = wf.get_node('pet_confounds_wf') - conf_edge = conf_wf._graph.get_edge_data( - conf_wf.get_node('inputnode'), conf_wf.get_node('dvars') + conf_edge = pet_confounds_node._graph.get_edge_data( + pet_confounds_node.get_node('inputnode'), pet_confounds_node.get_node('dvars') ) + assert conf_edge is not None, "Confound edge is missing." assert ('pet_mask', 'in_mask') in conf_edge['connect'] \ No newline at end of file From a6fa7bc76bd74f80570695a5076ca90760b6a476 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 11:12:11 +0200 Subject: [PATCH 48/82] FIX: update test_base with fmriprep to petprep --- petprep/workflows/tests/test_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/petprep/workflows/tests/test_base.py b/petprep/workflows/tests/test_base.py index 29fa4b70..3ab73433 100644 --- a/petprep/workflows/tests/test_base.py +++ b/petprep/workflows/tests/test_base.py @@ -156,7 +156,7 @@ def test_init_petprep_wf( config.workflow.run_reconall = freesurfer config.workflow.ignore = ignore config.workflow.force = force - with patch.dict('fmriprep.config.execution.bids_filters', bids_filters): + with patch.dict('petprep.config.execution.bids_filters', bids_filters): wf = init_petprep_wf() generate_expanded_graph(wf._create_flat_graph()) From e1dcb771088115cceb5523ee23d395cde033cf12 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 11:21:01 +0200 Subject: [PATCH 49/82] FIX: update base test to match PET data --- petprep/workflows/tests/test_base.py | 53 +++++++++++++++++++--------- 1 file changed, 37 insertions(+), 16 deletions(-) diff --git a/petprep/workflows/tests/test_base.py b/petprep/workflows/tests/test_base.py index 3ab73433..d96671b7 100644 --- a/petprep/workflows/tests/test_base.py +++ b/petprep/workflows/tests/test_base.py @@ -1,5 +1,6 @@ from pathlib import Path from unittest.mock import patch +import json import nibabel as nb import numpy as np @@ -14,20 +15,19 @@ BASE_LAYOUT = { '01': { 'anat': [ - {'run': 1, 'suffix': 'T1w'}, - {'run': 2, 'suffix': 'T1w'}, - {'suffix': 'T2w'}, + {'suffix': 'T1w'}, + {'suffix': 'inplaneT2'}, ], 'pet': [ - *( - { - 'task': 'rest', - 'run': i, - 'suffix': 'pet', - 'metadata': {}, - } - for i in range(1, 3) - ), + { + 'suffix': 'pet', + 'metadata': {}, + }, + ], + 'func': [ + {'task': 'mixedgamblestask', 'run': 1, 'suffix': 'bold'}, + {'task': 'mixedgamblestask', 'run': 2, 'suffix': 'bold'}, + {'task': 'mixedgamblestask', 'run': 3, 'suffix': 'bold'}, ], }, } @@ -52,8 +52,31 @@ def bids_root(tmp_path_factory): img = nb.Nifti1Image(np.zeros((10, 10, 10, 10)), np.eye(4)) - for img_path in bids_dir.glob('sub-01/*/*.nii.gz'): - img.to_filename(img_path) + # anat files + anat_dir = bids_dir / "sub-01" / "anat" + anat_dir.mkdir(parents=True, exist_ok=True) + img.to_filename(anat_dir / "sub-01_T1w.nii.gz") + img.to_filename(anat_dir / "sub-01_inplaneT2.nii.gz") + + # pet file + pet_dir = bids_dir / "sub-01" / "pet" + pet_dir.mkdir(parents=True, exist_ok=True) + pet_path = pet_dir / "sub-01_pet.nii.gz" + img.to_filename(pet_path) + + # Add metadata explicitly + metadata = {} + json_path = pet_dir / "sub-01_pet.json" + json_path.write_text(json.dumps(metadata)) + + # func files (optional for PET workflow but included for consistency) + func_dir = bids_dir / "sub-01" / "func" + func_dir.mkdir(parents=True, exist_ok=True) + for run in range(1, 4): + func_path = func_dir / f"sub-01_task-mixedgamblestask_run-0{run}_bold.nii.gz" + img.to_filename(func_path) + events_path = func_dir / f"sub-01_task-mixedgamblestask_run-0{run}_events.tsv" + events_path.write_text("onset\tduration\ttrial_type\n") return bids_dir @@ -82,7 +105,6 @@ def _make_params( cifti_output, run_msmsulc, skull_strip_t1w, - use_syn_sdc, freesurfer, ignore, force, @@ -139,7 +161,6 @@ def test_init_petprep_wf( cifti_output: bool | str, run_msmsulc: bool, skull_strip_t1w: str, - use_syn_sdc: str | bool, freesurfer: bool, ignore: list[str], force: list[str], From cf72706db7195c34744b1f8d0ff3016815b9bbc6 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 11:23:45 +0200 Subject: [PATCH 50/82] FIX: SDC related stuff --- petprep/workflows/tests/test_base.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/petprep/workflows/tests/test_base.py b/petprep/workflows/tests/test_base.py index d96671b7..bf9795b7 100644 --- a/petprep/workflows/tests/test_base.py +++ b/petprep/workflows/tests/test_base.py @@ -87,7 +87,6 @@ def _make_params( cifti_output: bool | str = False, run_msmsulc: bool = True, skull_strip_t1w: str = 'auto', - use_syn_sdc: str | bool = False, freesurfer: bool = True, ignore: list[str] = None, force: list[str] = None, @@ -121,7 +120,6 @@ def _make_params( 'cifti_output', 'run_msmsulc', 'skull_strip_t1w', - 'use_syn_sdc', 'freesurfer', 'ignore', 'force', From 8dd3360f033626d9102fc1981a449f2e47ccc43b Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 11:28:26 +0200 Subject: [PATCH 51/82] FIX: add PET query to base --- petprep/workflows/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/petprep/workflows/base.py b/petprep/workflows/base.py index 19538b05..6d435c87 100644 --- a/petprep/workflows/base.py +++ b/petprep/workflows/base.py @@ -204,6 +204,7 @@ def init_single_subject_wf(subject_id: str): queries = copy.deepcopy(DEFAULT_BIDS_QUERIES) queries['t1w'].pop('datatype', None) + queries['pet'] = {'datatype': 'pet', 'suffix': 'pet'} subject_data = collect_data( config.execution.bids_dir, From da5db440ff3e6435c42c6f7745b9366c96cf51e6 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 11:35:45 +0200 Subject: [PATCH 52/82] FIX: update query to grab PET data --- petprep/workflows/base.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/petprep/workflows/base.py b/petprep/workflows/base.py index 6d435c87..387b81c5 100644 --- a/petprep/workflows/base.py +++ b/petprep/workflows/base.py @@ -202,15 +202,23 @@ def init_single_subject_wf(subject_id: str): from niworkflows.utils.bids import DEFAULT_BIDS_QUERIES import copy - queries = copy.deepcopy(DEFAULT_BIDS_QUERIES) - queries['t1w'].pop('datatype', None) - queries['pet'] = {'datatype': 'pet', 'suffix': 'pet'} + custom_queries = { + 't1w': {'datatype': 'anat', 'suffix': 'T1w', 'part': ['mag', None]}, + 't2w': {'datatype': 'anat', 'suffix': 'T2w', 'part': ['mag', None]}, + 'flair': {'datatype': 'anat', 'suffix': 'FLAIR', 'part': ['mag', None]}, + 'bold': {'datatype': 'func', 'suffix': 'bold', 'part': ['mag', None]}, + 'pet': {'datatype': 'pet', 'suffix': 'pet'}, + 'roi': {'datatype': 'anat', 'suffix': 'roi'}, + 'sbref': {'datatype': 'func', 'suffix': 'sbref', 'part': ['mag', None]}, + 'dwi': {'suffix': 'dwi'}, + 'asl': {'datatype': 'perf', 'suffix': 'asl'}, + } subject_data = collect_data( config.execution.bids_dir, subject_id, bids_filters=config.execution.bids_filters, - queries=queries, + queries=custom_queries, )[0] From 0ce3bbe58ef477ee7d8c5b73f16abcdb9d78502d Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 11:38:16 +0200 Subject: [PATCH 53/82] FIX: add PET json example --- .../tests/ds000005/sub-01/pet/sub-01_pet.json | 235 ++++++++++++++++++ 1 file changed, 235 insertions(+) create mode 100644 petprep/data/tests/ds000005/sub-01/pet/sub-01_pet.json diff --git a/petprep/data/tests/ds000005/sub-01/pet/sub-01_pet.json b/petprep/data/tests/ds000005/sub-01/pet/sub-01_pet.json new file mode 100644 index 00000000..c69fbb7d --- /dev/null +++ b/petprep/data/tests/ds000005/sub-01/pet/sub-01_pet.json @@ -0,0 +1,235 @@ +{ + "Manufacturer": "Siemens", + "ManufacturersModelName": "HR+", + "Units": "Bq/mL", + "BodyPart": "Brain", + "TracerName": "DASB", + "TracerRadionuclide": "C11", + "TracerMolecularWeight": 282.39, + "TracerMolecularWeightUnits": "g/mol", + "InjectedRadioactivity": 629.74, + "InjectedRadioactivityUnits": "MBq", + "MolarActivity": 55.5, + "MolarActivityUnits": "MBq/nmol", + "SpecificRadioactivity": 196.53670455752683, + "SpecificRadioactivityUnits": "MBq/ug", + "Purity": 99, + "ModeOfAdministration": "bolus", + "InjectedMass": 3.2041852, + "InjectedMassUnits": "ug", + "AcquisitionMode": "list mode", + "ImageDecayCorrected": true, + "ImageDecayCorrectionTime": 0, + "TimeZero": "17:28:40", + "ScanStart": 0, + "InjectionStart": 0, + "FrameDuration": [ + 20, + 20, + 20, + 60, + 60, + 60, + 120, + 120, + 120, + 300, + 300.066, + 600, + 600, + 600, + 600, + 600, + 600, + 600, + 600, + 600, + 600 + ], + "FrameTimesStart": [ + 0, + 20, + 40, + 60, + 120, + 180, + 240, + 360, + 480, + 600, + 900, + 1200.066, + 1800.066, + 2400.066, + 3000.066, + 3600.066, + 4200.066, + 4800.066, + 5400.066, + 6000.066, + 6600.066 + ], + "ReconMethodParameterLabels": [ + "lower_threshold", + "upper_threshold", + "recon_zoom" + ], + "ReconMethodParameterUnits": [ + "keV", + "keV", + "none" + ], + "ReconMethodParameterValues": [ + 0, + 650, + 3 + ], + "ScaleFactor": [ + 8.548972374455843e-08, + 1.7544691388593492e-07, + 1.3221580275057931e-07, + 1.2703590357432404e-07, + 1.1155360368775291e-07, + 2.2050951997698576e-07, + 2.184752503353593e-07, + 1.7056818535365892e-07, + 1.6606901453997125e-07, + 1.5532630470715958e-07, + 2.19175134930083e-07, + 2.0248222654117853e-07, + 2.277063231304055e-07, + 2.425933018912474e-07, + 2.3802238047210267e-07, + 2.514642005735368e-07, + 2.802861729378492e-07, + 2.797820570776821e-07, + 3.5299004252919985e-07, + 4.6313422785715375e-07, + 4.904185857412813e-07 + ], + "ScatterFraction": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ], + "DecayCorrectionFactor": [ + 1.0056782960891724, + 1.0171427726745605, + 1.0287377834320068, + 1.0522810220718384, + 1.0886797904968262, + 1.1263376474380493, + 1.1851094961166382, + 1.2685142755508423, + 1.3577889204025269, + 1.5278561115264893, + 1.811025857925415, + 2.328737735748291, + 3.271937131881714, + 4.597157001495361, + 6.459125518798828, + 9.075239181518555, + 12.750947952270508, + 17.915414810180664, + 25.1716251373291, + 35.36678695678711, + 49.69125747680664 + ], + "PromptRate": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ], + "RandomRate": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ], + "SinglesRate": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ], + "ReconMethodName": "Vendor", + "ReconFilterType": [ + "Shepp 0.5", + "All-pass 0.4" + ], + "ReconFilterSize": [ + 2.5, + 2 + ], + "AttenuationCorrection": "transmission scan" +} From 4d89a6ed21b934ae13cf74d6f107978cdab7854f Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 11:44:50 +0200 Subject: [PATCH 54/82] FIX: update test_base to collect PET data --- petprep/workflows/tests/test_base.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/petprep/workflows/tests/test_base.py b/petprep/workflows/tests/test_base.py index bf9795b7..f1fc7dd8 100644 --- a/petprep/workflows/tests/test_base.py +++ b/petprep/workflows/tests/test_base.py @@ -8,6 +8,9 @@ from nipype.pipeline.engine.utils import generate_expanded_graph from niworkflows.utils.testing import generate_bids_skeleton +from niworkflows.utils.bids import DEFAULT_BIDS_QUERIES +import copy + from ... import config from ..base import init_petprep_wf from ..tests import mock_config @@ -32,6 +35,12 @@ }, } +@pytest.fixture(scope='module') +def custom_queries(): + queries = copy.deepcopy(DEFAULT_BIDS_QUERIES) + queries['pet'] = {'datatype': 'pet', 'suffix': 'pet'} + return queries + @pytest.fixture(scope='module', autouse=True) def _quiet_logger(): @@ -163,6 +172,7 @@ def test_init_petprep_wf( ignore: list[str], force: list[str], bids_filters: dict, + custom_queries: dict, ): with mock_config(bids_dir=bids_root): config.workflow.level = level @@ -176,6 +186,13 @@ def test_init_petprep_wf( config.workflow.ignore = ignore config.workflow.force = force with patch.dict('petprep.config.execution.bids_filters', bids_filters): - wf = init_petprep_wf() + with patch('petprep.workflows.base.collect_data') as mock_collect_data: + mock_collect_data.return_value = collect_data( + config.execution.bids_dir, + '01', + bids_filters=config.execution.bids_filters, + queries=custom_queries, + ) + wf = init_petprep_wf() generate_expanded_graph(wf._create_flat_graph()) From a5372593bd3c519d97f11bde267bbf58033c47d3 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 11:50:04 +0200 Subject: [PATCH 55/82] FIX: update collect_data import --- petprep/workflows/tests/test_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/petprep/workflows/tests/test_base.py b/petprep/workflows/tests/test_base.py index f1fc7dd8..4cf1c0e8 100644 --- a/petprep/workflows/tests/test_base.py +++ b/petprep/workflows/tests/test_base.py @@ -186,7 +186,7 @@ def test_init_petprep_wf( config.workflow.ignore = ignore config.workflow.force = force with patch.dict('petprep.config.execution.bids_filters', bids_filters): - with patch('petprep.workflows.base.collect_data') as mock_collect_data: + with patch('niworkflows.utils.bids.collect_data') as mock_collect_data: mock_collect_data.return_value = collect_data( config.execution.bids_dir, '01', From f7a26facb40cc350fcf81d3a1f081814d4358937 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 13:08:28 +0200 Subject: [PATCH 56/82] FIX: bump niworkflows and pybids versions --- pyproject.toml | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 41367eb0..cfc4c52b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ dependencies = [ "packaging >= 24", "pandas >= 1.2", "psutil >= 5.4", - "pybids >= 0.16", + "pybids >= 0.19.0", "requests >= 2.27", "smriprep >= 0.17.0", "tedana >= 23.0.2", diff --git a/requirements.txt b/requirements.txt index d28a4014..0e95f865 100644 --- a/requirements.txt +++ b/requirements.txt @@ -245,7 +245,7 @@ nitransforms==24.1.1 # via # fmriprep (pyproject.toml) # niworkflows -niworkflows==1.12.2 +niworkflows==1.13.3 # via # fmriprep (pyproject.toml) # smriprep From ac5d4f1aab1c173f11258c0e5d2df0dc92ea346e Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 13:10:15 +0200 Subject: [PATCH 57/82] FIX: bump version and niworkflows version --- petprep/_version.py | 4 ++-- pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/petprep/_version.py b/petprep/_version.py index 5ec978cb..96b0a202 100644 --- a/petprep/_version.py +++ b/petprep/_version.py @@ -17,5 +17,5 @@ __version_tuple__: VERSION_TUPLE version_tuple: VERSION_TUPLE -__version__ = version = '25.0.0.dev172+gf8fd378' -__version_tuple__ = version_tuple = (25, 0, 0, 'dev172', 'gf8fd378') +__version__ = version = '25.0.0.dev237+gf7a26fa.d20250605' +__version_tuple__ = version_tuple = (25, 0, 0, 'dev237', 'gf7a26fa.d20250605') diff --git a/pyproject.toml b/pyproject.toml index cfc4c52b..0c01b75c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ dependencies = [ "nireports >= 24.1.0", "nitime >= 0.9", "nitransforms >= 24.1.1", - "niworkflows >= 1.12.2", + "niworkflows >= 1.13.3", "numpy >= 1.24", "packaging >= 24", "pandas >= 1.2", From e756609bdde809e5703684fa9390d600b2723e26 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 13:14:21 +0200 Subject: [PATCH 58/82] FIX: update base and test --- petprep/_version.py | 4 ++-- petprep/workflows/base.py | 15 +++------------ petprep/workflows/tests/test_base.py | 19 +------------------ 3 files changed, 6 insertions(+), 32 deletions(-) diff --git a/petprep/_version.py b/petprep/_version.py index 96b0a202..dffee790 100644 --- a/petprep/_version.py +++ b/petprep/_version.py @@ -17,5 +17,5 @@ __version_tuple__: VERSION_TUPLE version_tuple: VERSION_TUPLE -__version__ = version = '25.0.0.dev237+gf7a26fa.d20250605' -__version_tuple__ = version_tuple = (25, 0, 0, 'dev237', 'gf7a26fa.d20250605') +__version__ = version = '25.0.0.dev238+gac5d4f1.d20250605' +__version_tuple__ = version_tuple = (25, 0, 0, 'dev238', 'gac5d4f1.d20250605') diff --git a/petprep/workflows/base.py b/petprep/workflows/base.py index 387b81c5..19538b05 100644 --- a/petprep/workflows/base.py +++ b/petprep/workflows/base.py @@ -202,23 +202,14 @@ def init_single_subject_wf(subject_id: str): from niworkflows.utils.bids import DEFAULT_BIDS_QUERIES import copy - custom_queries = { - 't1w': {'datatype': 'anat', 'suffix': 'T1w', 'part': ['mag', None]}, - 't2w': {'datatype': 'anat', 'suffix': 'T2w', 'part': ['mag', None]}, - 'flair': {'datatype': 'anat', 'suffix': 'FLAIR', 'part': ['mag', None]}, - 'bold': {'datatype': 'func', 'suffix': 'bold', 'part': ['mag', None]}, - 'pet': {'datatype': 'pet', 'suffix': 'pet'}, - 'roi': {'datatype': 'anat', 'suffix': 'roi'}, - 'sbref': {'datatype': 'func', 'suffix': 'sbref', 'part': ['mag', None]}, - 'dwi': {'suffix': 'dwi'}, - 'asl': {'datatype': 'perf', 'suffix': 'asl'}, - } + queries = copy.deepcopy(DEFAULT_BIDS_QUERIES) + queries['t1w'].pop('datatype', None) subject_data = collect_data( config.execution.bids_dir, subject_id, bids_filters=config.execution.bids_filters, - queries=custom_queries, + queries=queries, )[0] diff --git a/petprep/workflows/tests/test_base.py b/petprep/workflows/tests/test_base.py index 4cf1c0e8..bf9795b7 100644 --- a/petprep/workflows/tests/test_base.py +++ b/petprep/workflows/tests/test_base.py @@ -8,9 +8,6 @@ from nipype.pipeline.engine.utils import generate_expanded_graph from niworkflows.utils.testing import generate_bids_skeleton -from niworkflows.utils.bids import DEFAULT_BIDS_QUERIES -import copy - from ... import config from ..base import init_petprep_wf from ..tests import mock_config @@ -35,12 +32,6 @@ }, } -@pytest.fixture(scope='module') -def custom_queries(): - queries = copy.deepcopy(DEFAULT_BIDS_QUERIES) - queries['pet'] = {'datatype': 'pet', 'suffix': 'pet'} - return queries - @pytest.fixture(scope='module', autouse=True) def _quiet_logger(): @@ -172,7 +163,6 @@ def test_init_petprep_wf( ignore: list[str], force: list[str], bids_filters: dict, - custom_queries: dict, ): with mock_config(bids_dir=bids_root): config.workflow.level = level @@ -186,13 +176,6 @@ def test_init_petprep_wf( config.workflow.ignore = ignore config.workflow.force = force with patch.dict('petprep.config.execution.bids_filters', bids_filters): - with patch('niworkflows.utils.bids.collect_data') as mock_collect_data: - mock_collect_data.return_value = collect_data( - config.execution.bids_dir, - '01', - bids_filters=config.execution.bids_filters, - queries=custom_queries, - ) - wf = init_petprep_wf() + wf = init_petprep_wf() generate_expanded_graph(wf._create_flat_graph()) From 7554e2b54ff53a5f50fdc29046c7458445334ac8 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 13:27:59 +0200 Subject: [PATCH 59/82] FIX: update test_base --- petprep/workflows/tests/test_base.py | 32 +++++++++++++++++++++------- 1 file changed, 24 insertions(+), 8 deletions(-) diff --git a/petprep/workflows/tests/test_base.py b/petprep/workflows/tests/test_base.py index bf9795b7..7142fdfc 100644 --- a/petprep/workflows/tests/test_base.py +++ b/petprep/workflows/tests/test_base.py @@ -12,6 +12,10 @@ from ..base import init_petprep_wf from ..tests import mock_config +from niworkflows.utils.bids import DEFAULT_BIDS_QUERIES +import copy +from niworkflows.utils.bids import collect_data + BASE_LAYOUT = { '01': { 'anat': [ @@ -33,6 +37,14 @@ } +@pytest.fixture(scope='module') +def custom_queries(): + queries = copy.deepcopy(DEFAULT_BIDS_QUERIES) + queries['pet'] = {'datatype': 'pet', 'suffix': 'pet'} + queries['t1w'].pop('datatype', None) + return queries + + @pytest.fixture(scope='module', autouse=True) def _quiet_logger(): import logging @@ -133,8 +145,6 @@ def _make_params( _make_params(force=['bbr']), _make_params(force=['no-bbr']), _make_params(pet2anat_init='header', force=['bbr']), - # Currently disabled - # _make_params(pet2anat_init="header", force=['no-bbr']), _make_params(medial_surface_nan=True), _make_params(cifti_output='91k'), _make_params(cifti_output='91k', run_msmsulc=False), @@ -143,10 +153,6 @@ def _make_params( _make_params(freesurfer=False), _make_params(freesurfer=False, force=['bbr']), _make_params(freesurfer=False, force=['no-bbr']), - # Currently unsupported: - # _make_params(freesurfer=False, pet2anat_init="header"), - # _make_params(freesurfer=False, pet2anat_init="header", force=['bbr']), - # _make_params(freesurfer=False, pet2anat_init="header", force=['no-bbr']), ], ) def test_init_petprep_wf( @@ -163,6 +169,7 @@ def test_init_petprep_wf( ignore: list[str], force: list[str], bids_filters: dict, + custom_queries: dict, ): with mock_config(bids_dir=bids_root): config.workflow.level = level @@ -175,7 +182,16 @@ def test_init_petprep_wf( config.workflow.run_reconall = freesurfer config.workflow.ignore = ignore config.workflow.force = force + with patch.dict('petprep.config.execution.bids_filters', bids_filters): - wf = init_petprep_wf() + with patch('petprep.workflows.base.collect_data') as mock_collect_data: + mock_collect_data.return_value = collect_data( + config.execution.bids_dir, + '01', + bids_filters=config.execution.bids_filters, + queries=custom_queries, + ) + + wf = init_petprep_wf() - generate_expanded_graph(wf._create_flat_graph()) + generate_expanded_graph(wf._create_flat_graph()) \ No newline at end of file From df86069e246d1035e77e1f158a844241539e799a Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 13:32:14 +0200 Subject: [PATCH 60/82] FIX: import niworkflows --- petprep/workflows/tests/test_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/petprep/workflows/tests/test_base.py b/petprep/workflows/tests/test_base.py index 7142fdfc..2292153c 100644 --- a/petprep/workflows/tests/test_base.py +++ b/petprep/workflows/tests/test_base.py @@ -184,7 +184,7 @@ def test_init_petprep_wf( config.workflow.force = force with patch.dict('petprep.config.execution.bids_filters', bids_filters): - with patch('petprep.workflows.base.collect_data') as mock_collect_data: + with patch('niworkflows.utils.bids.collect_data') as mock_collect_data: mock_collect_data.return_value = collect_data( config.execution.bids_dir, '01', From 91a29a2a298a58e8bdb987a11b7656b97bcaab3f Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 13:38:06 +0200 Subject: [PATCH 61/82] FIX: update collect_data part --- petprep/workflows/tests/test_base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/petprep/workflows/tests/test_base.py b/petprep/workflows/tests/test_base.py index 2292153c..e98c6690 100644 --- a/petprep/workflows/tests/test_base.py +++ b/petprep/workflows/tests/test_base.py @@ -185,7 +185,7 @@ def test_init_petprep_wf( with patch.dict('petprep.config.execution.bids_filters', bids_filters): with patch('niworkflows.utils.bids.collect_data') as mock_collect_data: - mock_collect_data.return_value = collect_data( + mock_collect_data = collect_data( config.execution.bids_dir, '01', bids_filters=config.execution.bids_filters, @@ -194,4 +194,4 @@ def test_init_petprep_wf( wf = init_petprep_wf() - generate_expanded_graph(wf._create_flat_graph()) \ No newline at end of file + generate_expanded_graph(wf._create_flat_graph()) From ac3372061a07495042fd90bfe7dcc1135113cd32 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 13:43:49 +0200 Subject: [PATCH 62/82] Update test_base.py --- petprep/workflows/tests/test_base.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/petprep/workflows/tests/test_base.py b/petprep/workflows/tests/test_base.py index e98c6690..6566674a 100644 --- a/petprep/workflows/tests/test_base.py +++ b/petprep/workflows/tests/test_base.py @@ -182,16 +182,16 @@ def test_init_petprep_wf( config.workflow.run_reconall = freesurfer config.workflow.ignore = ignore config.workflow.force = force - - with patch.dict('petprep.config.execution.bids_filters', bids_filters): - with patch('niworkflows.utils.bids.collect_data') as mock_collect_data: - mock_collect_data = collect_data( - config.execution.bids_dir, - '01', - bids_filters=config.execution.bids_filters, - queries=custom_queries, - ) - - wf = init_petprep_wf() + + # Set the custom queries explicitly + with patch('niworkflows.utils.bids.collect_data') as mock_collect_data: + mock_collect_data.return_value = collect_data( + config.execution.bids_dir, + '01', + bids_filters=config.execution.bids_filters, + queries=custom_queries, + ) + + wf = init_petprep_wf() generate_expanded_graph(wf._create_flat_graph()) From b180c6e72b5690ce52e6fb1f39a457dc0db48309 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 13:45:44 +0200 Subject: [PATCH 63/82] FIX: remove collect_data part from test --- petprep/workflows/tests/test_base.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/petprep/workflows/tests/test_base.py b/petprep/workflows/tests/test_base.py index 6566674a..e962845e 100644 --- a/petprep/workflows/tests/test_base.py +++ b/petprep/workflows/tests/test_base.py @@ -14,7 +14,6 @@ from niworkflows.utils.bids import DEFAULT_BIDS_QUERIES import copy -from niworkflows.utils.bids import collect_data BASE_LAYOUT = { '01': { @@ -182,16 +181,7 @@ def test_init_petprep_wf( config.workflow.run_reconall = freesurfer config.workflow.ignore = ignore config.workflow.force = force - - # Set the custom queries explicitly - with patch('niworkflows.utils.bids.collect_data') as mock_collect_data: - mock_collect_data.return_value = collect_data( - config.execution.bids_dir, - '01', - bids_filters=config.execution.bids_filters, - queries=custom_queries, - ) - + with patch.dict('petprep.config.execution.bids_filters', bids_filters): wf = init_petprep_wf() generate_expanded_graph(wf._create_flat_graph()) From 16ec28a09dd39d25b9db23325a1ff21c30609512 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 13:50:33 +0200 Subject: [PATCH 64/82] Update test_base.py --- petprep/workflows/tests/test_base.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/petprep/workflows/tests/test_base.py b/petprep/workflows/tests/test_base.py index e962845e..0198f130 100644 --- a/petprep/workflows/tests/test_base.py +++ b/petprep/workflows/tests/test_base.py @@ -13,6 +13,7 @@ from ..tests import mock_config from niworkflows.utils.bids import DEFAULT_BIDS_QUERIES +from niworkflows.utils.bids import collect_data as original_collect_data import copy BASE_LAYOUT = { @@ -181,7 +182,17 @@ def test_init_petprep_wf( config.workflow.run_reconall = freesurfer config.workflow.ignore = ignore config.workflow.force = force - with patch.dict('petprep.config.execution.bids_filters', bids_filters): - wf = init_petprep_wf() - generate_expanded_graph(wf._create_flat_graph()) + with patch.dict('petprep.config.execution.bids_filters', bids_filters): + # Patch the correct function with the correct return value explicitly + with patch('niworkflows.utils.bids.collect_data') as mock_collect_data: + mock_collect_data.return_value = original_collect_data( + bids_root, + '01', + bids_filters=bids_filters, + queries=custom_queries, + ) + + wf = init_petprep_wf() + + generate_expanded_graph(wf._create_flat_graph()) \ No newline at end of file From 3da085ee481ac3df7dbea2b6c69e2d0fdc49bb3d Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 14:58:23 +0200 Subject: [PATCH 65/82] FIX: listify pet_series --- petprep/workflows/pet/base.py | 1 + petprep/workflows/pet/fit.py | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/petprep/workflows/pet/base.py b/petprep/workflows/pet/base.py index 92286d15..daf415dc 100644 --- a/petprep/workflows/pet/base.py +++ b/petprep/workflows/pet/base.py @@ -150,6 +150,7 @@ def init_pet_wf( if precomputed is None: precomputed = {} + pet_series = listify(pet_series) pet_file = pet_series[0] petprep_dir = config.execution.petprep_dir diff --git a/petprep/workflows/pet/fit.py b/petprep/workflows/pet/fit.py index d1b516e6..7221addf 100644 --- a/petprep/workflows/pet/fit.py +++ b/petprep/workflows/pet/fit.py @@ -25,7 +25,7 @@ from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe from niworkflows.interfaces.header import ValidateImage - +from niworkflows.utils.connections import listify from ... import config from ...interfaces.reports import FunctionalSummary @@ -126,6 +126,7 @@ def init_pet_fit_wf( if precomputed is None: precomputed = {} + pet_series = listify(pet_series) layout = config.execution.layout pet_file = pet_series[0] @@ -458,6 +459,7 @@ def init_pet_native_wf( """ layout = config.execution.layout + pet_series = listify(pet_series) all_metadata = [layout.get_metadata(pet_file) for pet_file in pet_series] From ae8d4c53b09f07fff59d99707566f933cc8b4354 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 15:03:32 +0200 Subject: [PATCH 66/82] FIX: update fmriprep naming to petprep in resampling --- petprep/workflows/pet/resampling.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/petprep/workflows/pet/resampling.py b/petprep/workflows/pet/resampling.py index b36d663d..9c670920 100644 --- a/petprep/workflows/pet/resampling.py +++ b/petprep/workflows/pet/resampling.py @@ -311,7 +311,7 @@ def init_pet_fsLR_resampling_wf( from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.utility import KeySelect - from fmriprep.interfaces.workbench import VolumeToSurfaceMapping + from petprep.interfaces.workbench import VolumeToSurfaceMapping fslr_density = '32k' if grayord_density == '91k' else '59k' @@ -505,7 +505,7 @@ def init_pet_grayords_wf( """ from niworkflows.engine.workflows import LiterateWorkflow as Workflow - from fmriprep.interfaces import GeneratePetCifti + from petprep.interfaces import GeneratePetCifti import numpy as np workflow = Workflow(name=name) From 486a7426f365a92e41fb94f5b96969e82f8c852e Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 15:48:23 +0200 Subject: [PATCH 67/82] FIX: remove TR related parts from workflows --- petprep/interfaces/cifti.py | 7 ++-- petprep/workflows/pet/base.py | 2 +- petprep/workflows/pet/fit.py | 5 --- petprep/workflows/pet/outputs.py | 64 +---------------------------- petprep/workflows/pet/resampling.py | 16 +------- 5 files changed, 7 insertions(+), 87 deletions(-) diff --git a/petprep/interfaces/cifti.py b/petprep/interfaces/cifti.py index 6006c6ac..bef7bee5 100644 --- a/petprep/interfaces/cifti.py +++ b/petprep/interfaces/cifti.py @@ -21,8 +21,9 @@ class _GeneratePetCiftiInputSpec(BaseInterfaceInputSpec): usedefault=True, desc="CIFTI surface target space", ) - grayordinates = traits.Enum("91k", "170k", usedefault=True, desc="Final CIFTI grayordinates") - TR = traits.Float(mandatory=True, desc="Repetition time") + grayordinates = traits.Enum( + "91k", "170k", usedefault=True, desc="Final CIFTI grayordinates" + ) surface_pets = traits.List( File(exists=True), mandatory=True, @@ -43,7 +44,7 @@ def _run_interface(self, runtime): volume_labels, self.inputs.surface_pets, surface_labels, - self.inputs.TR, + 1.0, metadata, ) metadata_file = Path("pet.dtseries.json").absolute() diff --git a/petprep/workflows/pet/base.py b/petprep/workflows/pet/base.py index daf415dc..872b5ffb 100644 --- a/petprep/workflows/pet/base.py +++ b/petprep/workflows/pet/base.py @@ -165,7 +165,7 @@ def init_pet_wf( return config.loggers.workflow.debug( - 'Creating pet processing workflow for <%s> (%.2f GB / %d TRs). ' + 'Creating pet processing workflow for <%s> (%.2f GB / %d frames). ' 'Memory resampled/largemem=%.2f/%.2f GB.', pet_file, mem_gb['filesize'], diff --git a/petprep/workflows/pet/fit.py b/petprep/workflows/pet/fit.py index 7221addf..ab21ba68 100644 --- a/petprep/workflows/pet/fit.py +++ b/petprep/workflows/pet/fit.py @@ -193,11 +193,6 @@ def init_pet_fit_wf( config.loggers.workflow.debug('Reusing motion correction transforms: %s', hmc_xforms) timing_parameters = prepare_timing_parameters(metadata) - tr = timing_parameters.get('RepetitionTime') - if tr is None and 'VolumeTiming' in timing_parameters: - vt = timing_parameters['VolumeTiming'] - if len(vt) > 1 and np.allclose(np.diff(vt), np.diff(vt)[0]): - tr = float(np.diff(vt)[0]) summary = pe.Node( FunctionalSummary( diff --git a/petprep/workflows/pet/outputs.py b/petprep/workflows/pet/outputs.py index 04d9786e..4810945b 100644 --- a/petprep/workflows/pet/outputs.py +++ b/petprep/workflows/pet/outputs.py @@ -48,69 +48,13 @@ def prepare_timing_parameters(metadata: dict): Examples -------- - .. testsetup:: - - >>> from unittest import mock - - If SliceTiming metadata is absent, then the only change is to note that - STC has not been applied: - - >>> prepare_timing_parameters(dict(RepetitionTime=2)) - {'RepetitionTime': 2, 'SliceTimingCorrected': False} - >>> prepare_timing_parameters(dict(RepetitionTime=2, DelayTime=0.5)) - {'RepetitionTime': 2, 'DelayTime': 0.5, 'SliceTimingCorrected': False} - >>> prepare_timing_parameters(dict(VolumeTiming=[0.0, 1.0, 2.0, 5.0, 6.0, 7.0], - ... AcquisitionDuration=1.0)) #doctest: +NORMALIZE_WHITESPACE - {'VolumeTiming': [0.0, 1.0, 2.0, 5.0, 6.0, 7.0], 'AcquisitionDuration': 1.0, - 'SliceTimingCorrected': False} - - When SliceTiming is available and used, then ``SliceTimingCorrected`` is ``True`` - and the ``StartTime`` indicates a series offset. - - >>> with mock.patch("fmriprep.config.workflow.ignore", []): - ... prepare_timing_parameters(dict(RepetitionTime=2, SliceTiming=[0.0, 0.2, 0.4, 0.6])) - {'RepetitionTime': 2, 'SliceTimingCorrected': True, 'DelayTime': 1.2, 'StartTime': 0.3} - >>> with mock.patch("fmriprep.config.workflow.ignore", []): - ... prepare_timing_parameters( - ... dict(VolumeTiming=[0.0, 1.0, 2.0, 5.0, 6.0, 7.0], - ... SliceTiming=[0.0, 0.2, 0.4, 0.6, 0.8])) #doctest: +NORMALIZE_WHITESPACE - {'VolumeTiming': [0.0, 1.0, 2.0, 5.0, 6.0, 7.0], 'SliceTimingCorrected': True, - 'AcquisitionDuration': 1.0, 'StartTime': 0.4} - - When SliceTiming is available and not used, then ``SliceTimingCorrected`` is ``False`` - and TA is indicated with ``DelayTime`` or ``AcquisitionDuration``. - - >>> with mock.patch("fmriprep.config.workflow.ignore", ["slicetiming"]): - ... prepare_timing_parameters(dict(RepetitionTime=2, SliceTiming=[0.0, 0.2, 0.4, 0.6])) - {'RepetitionTime': 2, 'SliceTimingCorrected': False, 'DelayTime': 1.2} - >>> with mock.patch("fmriprep.config.workflow.ignore", ["slicetiming"]): - ... prepare_timing_parameters( - ... dict(VolumeTiming=[0.0, 1.0, 2.0, 5.0, 6.0, 7.0], - ... SliceTiming=[0.0, 0.2, 0.4, 0.6, 0.8])) #doctest: +NORMALIZE_WHITESPACE - {'VolumeTiming': [0.0, 1.0, 2.0, 5.0, 6.0, 7.0], 'SliceTimingCorrected': False, - 'AcquisitionDuration': 1.0} - - If SliceTiming metadata is present but empty, then treat it as missing: - - >>> with mock.patch("fmriprep.config.workflow.ignore", []): - ... prepare_timing_parameters(dict(RepetitionTime=2, SliceTiming=[])) - {'RepetitionTime': 2, 'SliceTimingCorrected': False} - >>> with mock.patch("fmriprep.config.workflow.ignore", []): - ... prepare_timing_parameters(dict(RepetitionTime=2, SliceTiming=[0.0])) - {'RepetitionTime': 2, 'SliceTimingCorrected': False} - - If ``RepetitionTime`` is not provided, ``FrameTimesStart`` and - ``FrameDuration`` will be used to compute ``VolumeTiming``: - >>> prepare_timing_parameters({'FrameTimesStart': [0, 2, 6], 'FrameDuration': [2, 4, 4]}) {'VolumeTiming': [0, 2, 6], 'AcquisitionDuration': [2, 4, 4], 'SliceTimingCorrected': False} """ timing_parameters = { key: metadata[key] for key in ( - 'RepetitionTime', 'VolumeTiming', - 'DelayTime', 'AcquisitionDuration', 'SliceTiming', 'FrameTimesStart', @@ -139,13 +83,7 @@ def prepare_timing_parameters(metadata: dict): if len(slice_timing) > 1: st = sorted(slice_timing) TA = st[-1] + (st[1] - st[0]) # Final slice onset + slice duration - # For constant TR paradigms, use DelayTime - if 'RepetitionTime' in timing_parameters: - TR = timing_parameters['RepetitionTime'] - if not np.isclose(TR, TA) and TA < TR: - timing_parameters['DelayTime'] = TR - TA - # For variable TR paradigms, use AcquisitionDuration - elif 'VolumeTiming' in timing_parameters: + if 'VolumeTiming' in timing_parameters: timing_parameters['AcquisitionDuration'] = TA if run_stc: diff --git a/petprep/workflows/pet/resampling.py b/petprep/workflows/pet/resampling.py index 9c670920..aaa07f3e 100644 --- a/petprep/workflows/pet/resampling.py +++ b/petprep/workflows/pet/resampling.py @@ -528,22 +528,8 @@ def init_pet_grayords_wf( name='outputnode', ) - timing_parameters = prepare_timing_parameters(metadata) - tr = timing_parameters.get('RepetitionTime') - if tr is None and 'VolumeTiming' in timing_parameters: - vt = timing_parameters['VolumeTiming'] - if len(vt) > 1: - diffs = np.diff(vt) - if np.allclose(diffs, diffs[0]): - tr = float(diffs[0]) - else: - tr = float(np.mean(diffs)) - gen_cifti = pe.Node( - GeneratePetCifti( - TR=tr, - grayordinates=grayord_density, - ), + GeneratePetCifti(grayordinates=grayord_density), name='gen_cifti', mem_gb=mem_gb, ) From 4538b2a8cf726bb96dfff23421687ba8a14e8a81 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 16:05:29 +0200 Subject: [PATCH 68/82] FIX: Fix InvalidVersion error in test_fsl6.py --- petprep/tests/test_fsl6.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/petprep/tests/test_fsl6.py b/petprep/tests/test_fsl6.py index 65b4a179..bae83d29 100644 --- a/petprep/tests/test_fsl6.py +++ b/petprep/tests/test_fsl6.py @@ -5,13 +5,19 @@ import templateflow.api as tf from nipype.interfaces import fsl from packaging.version import Version +import re fslversion = fsl.Info.version() +_fslver_num = None +if fslversion: + match = re.match(r"[0-9.]+", fslversion) + if match: + _fslver_num = match.group() TEMPLATE = tf.get('MNI152NLin2009cAsym', resolution=2, desc=None, suffix='T1w') -@pytest.mark.skipif(fslversion is None, reason='fsl required') -@pytest.mark.skipif(fslversion and Version(fslversion) < Version('6.0.0'), reason='FSL6 test') +@pytest.mark.skipif(_fslver_num is None, reason='fsl required') +@pytest.mark.skipif(_fslver_num and Version(_fslver_num) < Version('6.0.0'), reason='FSL6 test') @pytest.mark.parametrize( ('path_parent', 'filename'), [ From b62148c0dedd0f3596a6bb1cacaa2c1db81bb966 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 20:03:20 +0200 Subject: [PATCH 69/82] FIX: _version.py added to .gitignore --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 2ecb16a0..f4b56379 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,8 @@ __pycache__/ *.py[cod] *$py.class +*.pyo +*.pyd # Coverage reports .coverage @@ -29,3 +31,4 @@ dist/ # Additional .ipynb_checkpoints/ +_version.py From fda75439503d01275a12452382b89f28d2e67319 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 20:11:09 +0200 Subject: [PATCH 70/82] FIX: apply suggestions from code review --- pyproject.toml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 0c01b75c..7ecd23ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -137,6 +137,10 @@ addopts = [ "-ra", "--strict-config", "--strict-markers", + "--doctest-modules", + "--cov=petprep", + "--cov-report=xml", + "--cov-config=pyproject.toml", ] doctest_optionflags = "ALLOW_UNICODE NORMALIZE_WHITESPACE ELLIPSIS" From d194eafe39f7cff9efc20d112b0c18147c6c95c1 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 20:36:06 +0200 Subject: [PATCH 71/82] FIX: Delete _slice_time_ref from parser.py --- petprep/cli/parser.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/petprep/cli/parser.py b/petprep/cli/parser.py index 5465e265..20e99968 100644 --- a/petprep/cli/parser.py +++ b/petprep/cli/parser.py @@ -130,21 +130,6 @@ def _bids_filter(value, parser): else: raise parser.error(f'Path does not exist: <{value}>.') - def _slice_time_ref(value, parser): - if value == 'start': - value = 0 - elif value == 'middle': - value = 0.5 - try: - value = float(value) - except ValueError: - raise parser.error( - f"Slice time reference must be number, 'start', or 'middle'. Received {value}." - ) from None - if not 0 <= value <= 1: - raise parser.error(f'Slice time reference must be in range 0-1. Received {value}.') - return value - def _reference_frame(value, parser): if value == 'average': return 'average' From 03aaa031857fda02542bc1acd3fc7c5a15ce6064 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 20:40:25 +0200 Subject: [PATCH 72/82] FIX: removed slice timing from outputs Removed handling of 'SliceTiming' information and dropped slice-timing correction logic. The function now simply reports 'SliceTimingCorrected': False and retains only relevant timing fields Simplified the unit test by removing the run_stc parameter and related configuration changes --- petprep/workflows/pet/outputs.py | 27 ++++--------------------- petprep/workflows/pet/tests/test_fit.py | 4 +--- 2 files changed, 5 insertions(+), 26 deletions(-) diff --git a/petprep/workflows/pet/outputs.py b/petprep/workflows/pet/outputs.py index 4810945b..18d0c2f2 100644 --- a/petprep/workflows/pet/outputs.py +++ b/petprep/workflows/pet/outputs.py @@ -37,13 +37,10 @@ def prepare_timing_parameters(metadata: dict): - """Convert initial timing metadata to post-realignment timing metadata + """Convert initial timing metadata to derivative timing parameters. - In particular, SliceTiming metadata is invalid once STC or any realignment is applied, - as a matrix of voxels no longer corresponds to an acquisition slice. - Therefore, if SliceTiming is present in the metadata dictionary, and a sparse - acquisition paradigm is detected, DelayTime or AcquisitionDuration must be derived to - preserve the timing interpretation. + Slice timing information is ignored and outputs will always indicate that + slice timing correction was not performed. Examples -------- @@ -56,15 +53,12 @@ def prepare_timing_parameters(metadata: dict): for key in ( 'VolumeTiming', 'AcquisitionDuration', - 'SliceTiming', 'FrameTimesStart', 'FrameDuration', ) if key in metadata } - # Treat SliceTiming of [] or length 1 as equivalent to missing and remove it in any case - slice_timing = timing_parameters.pop('SliceTiming', []) frame_times = timing_parameters.pop('FrameTimesStart', None) frame_duration = timing_parameters.pop('FrameDuration', None) @@ -77,20 +71,7 @@ def prepare_timing_parameters(metadata: dict): else: timing_parameters.setdefault('AcquisitionDuration', frame_duration) - run_stc = len(slice_timing) > 1 and 'slicetiming' not in config.workflow.ignore - timing_parameters['SliceTimingCorrected'] = run_stc - - if len(slice_timing) > 1: - st = sorted(slice_timing) - TA = st[-1] + (st[1] - st[0]) # Final slice onset + slice duration - if 'VolumeTiming' in timing_parameters: - timing_parameters['AcquisitionDuration'] = TA - - if run_stc: - first, last = st[0], st[-1] - frac = config.workflow.slice_time_ref - tzero = np.round(first + frac * (last - first), 3) - timing_parameters['StartTime'] = tzero + timing_parameters['SliceTimingCorrected'] = False return timing_parameters diff --git a/petprep/workflows/pet/tests/test_fit.py b/petprep/workflows/pet/tests/test_fit.py index 2c731e59..43f0f65b 100644 --- a/petprep/workflows/pet/tests/test_fit.py +++ b/petprep/workflows/pet/tests/test_fit.py @@ -108,12 +108,11 @@ def test_pet_fit_precomputes( @pytest.mark.parametrize('task', ['rest']) -@pytest.mark.parametrize('run_stc', [True, False]) + def test_pet_native_precomputes( bids_root: Path, tmp_path: Path, task: str, - run_stc: bool, ): """Test as many combinations of precomputed files and input configurations as possible.""" @@ -132,7 +131,6 @@ def test_pet_native_precomputes( img.to_filename(path) with mock_config(bids_dir=bids_root): - config.workflow.ignore = ['slicetiming'] if not run_stc else [] wf = init_pet_native_wf( pet_series=pet_series, omp_nthreads=1, From 8a293868f92cc699de4c7b7031ec69a4a9823290 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 20:42:59 +0200 Subject: [PATCH 73/82] FIX: Remove slice-timing correction references from documentation --- docs/faq.rst | 24 +----------------------- docs/outputs.rst | 19 ------------------- docs/workflows.rst | 6 ++---- 3 files changed, 3 insertions(+), 46 deletions(-) diff --git a/docs/faq.rst b/docs/faq.rst index 1a83e2fa..4e13fefa 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -321,26 +321,4 @@ finer control can be achieved of what files are visible to PETPrep. Note that any discrepancies between the pre-indexed database and the BIDS dataset complicate the provenance of PETPrep derivatives. If ``--bids-database-dir`` is used, the referenced directory should be -preserved for the sake of reporting and reproducibility. - -Error in slice timing correction: *insufficient length of BOLD data after discarding nonsteady-states* ------------------------------------------------------------------------------------------------------- -Typically, the scanner will be in a *nonsteady state* during a few initial time points of the acquisition, -until it stabilizes. -These *nonsteady states* (also called *dummy* scans) typically show greater T1 contrast and higher average -intensity, and therefore potentially are detrimental if used in the interpolation of slice timing corrections. -Hence, *nonsteady states* are discarded by the slice timing correction tool (in this case, AFNI's ``3dTShift``). -However, ``3dTShift`` requires that at least five (5) time points are present in the target series, after -dismissing the initial *nonsteady states*. - -*PETPrep* estimates the number of *nonsteady states* within the pipeline, unless the parameter is provided -by the user with the argument ``--dummy-scans ``. -Either way, if the number of *nonsteady states* is, say 4, then the length of the BOLD series must be greater -than 8. -If you encounter this error, first check that the number of *nonsteady states* is not suspiciously large -(it typically ranges from zero to five). -Next, if the number of *nonsteady states* is reasonable, consider why your BOLD time series are so short -and whether slice timing correction is appropriate under these conditions. -Finally, you can either skip the slice-timing correction with the argument ``--ignore slicetiming`` or -enforce a number of *nonsteady states* lower than the maximum for your data with ``--dummy-scans ``. -Please note that both strategies will apply to all tasks and runs that are to be processed. +preserved for the sake of reporting and reproducibility. \ No newline at end of file diff --git a/docs/outputs.rst b/docs/outputs.rst index 202bfdfd..05260910 100644 --- a/docs/outputs.rst +++ b/docs/outputs.rst @@ -363,25 +363,6 @@ Then the output will include:: These may then be used independently with multi-echo tools, such as `tedana`_, to perform more advanced denoising or alternative combination strategies. -.. danger:: - Slice timing correction in *PETPrep* is referenced to the middle slice by default, - which leads to a time shift in the volume onsets by 0.5 TR (repetition time). - For example, assuming a TR of 2s, original onsets of 0, 2, and 4s would be shifted - to 1, 3, and 5s, respectively. - In case you did execute slice timing correction, you must check that subsequent - analyses (e.g., general linear modeling) consider the right onset shifts. - For example, when specifying a first-level model, you should set parameters in your - software package or first-level model function accordingly (e.g., select the middle - slice as reference). - Alternatively, you could manually adjust the volume onsets (e.g. as mentioned in - the example above from [0, 2, 4] to [1, 3, 5]) or the event onsets accordingly. - In contrast to volume onsets, event onsets need to be shifted *backward* by half a TR, - for example, from [5, 10, 15] to [4, 9, 14]. - - Further information on this issue is found at - `this blog post (with thanks to Russell Poldrack and Jeanette Mumford) - `__. - Confounds --------- The :abbr:`BOLD (blood-oxygen level dependent)` signal measured with fMRI is a mixture of fluctuations diff --git a/docs/workflows.rst b/docs/workflows.rst index 157adc4f..281ffa39 100644 --- a/docs/workflows.rst +++ b/docs/workflows.rst @@ -5,9 +5,8 @@ Processing pipeline details =========================== *PETPrep* adapts its pipeline depending on what data and metadata are available and are used as the input. -For example, slice timing correction will be -performed only if the ``SliceTiming`` metadata field is found for the input -dataset. +Certain processing steps will run only when the required metadata is +available in the input dataset. A (very) high-level view of the simplest pipeline (for a single-band dataset with only one task, single-run, with no slice-timing information nor fieldmap acquisitions) @@ -571,7 +570,6 @@ Confounds estimation metadata={ "FrameTimesStart": [0, 2, 4, 6], "FrameDuration": [2, 2, 2, 2], - "SliceTiming": [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9], }, regressors_all_comps=False, regressors_dvars_th=1.5, From a7b6de9295fda9345be14e1f38e193556d513e3b Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 20:44:17 +0200 Subject: [PATCH 74/82] FIX: remove config from test_fit --- petprep/workflows/pet/tests/test_fit.py | 1 - 1 file changed, 1 deletion(-) diff --git a/petprep/workflows/pet/tests/test_fit.py b/petprep/workflows/pet/tests/test_fit.py index 43f0f65b..0b15931c 100644 --- a/petprep/workflows/pet/tests/test_fit.py +++ b/petprep/workflows/pet/tests/test_fit.py @@ -6,7 +6,6 @@ from nipype.pipeline.engine.utils import generate_expanded_graph from niworkflows.utils.testing import generate_bids_skeleton -from .... import config from ...tests import mock_config from ...tests.test_base import BASE_LAYOUT from ..fit import init_pet_fit_wf, init_pet_native_wf From 3de19fa618ed65092f13faf24dea3773775e3873 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Thu, 5 Jun 2025 21:03:38 +0200 Subject: [PATCH 75/82] FIX: [doctest] petprep.interfaces.workbench.MetricFillHoles --- petprep/interfaces/workbench.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/petprep/interfaces/workbench.py b/petprep/interfaces/workbench.py index e666830c..951a645d 100644 --- a/petprep/interfaces/workbench.py +++ b/petprep/interfaces/workbench.py @@ -569,7 +569,7 @@ class VolumeToSurfaceMapping(WBCommand, OpenMPCommandMixin): intended to be larger than where the cylinder cutoff should have been. Examples: - >>> from fmriprep.interfaces.workbench import VolumeToSurfaceMapping + >>> from petprep.interfaces.workbench import VolumeToSurfaceMapping >>> vol2surf = VolumeToSurfaceMapping() >>> vol2surf.inputs.volume_file = 'bold.nii.gz' >>> vol2surf.inputs.surface_file = 'lh.midthickness.surf.gii' @@ -659,7 +659,7 @@ class MetricMask(WBCommand): Examples - >>> from fmriprep.interfaces.workbench import MetricMask + >>> from petprep.interfaces.workbench import MetricMask >>> metric_mask = MetricMask() >>> metric_mask.inputs.in_file = 'lh.bold.func.gii' >>> metric_mask.inputs.mask = 'lh.roi.shape.gii' @@ -725,7 +725,7 @@ class MetricFillHoles(WBCommand): Examples - >>> from fmriprep.interfaces.workbench import MetricFillHoles + >>> from petprep.interfaces.workbench import MetricFillHoles >>> fill_holes = MetricFillHoles() >>> fill_holes.inputs.surface_file = 'lh.midthickness.surf.gii' >>> fill_holes.inputs.metric_file = 'lh.roi.shape.gii' @@ -792,7 +792,7 @@ class MetricRemoveIslands(WBCommand): Examples - >>> from fmriprep.interfaces.workbench import MetricRemoveIslands + >>> from petprep.interfaces.workbench import MetricRemoveIslands >>> remove_islands = MetricRemoveIslands() >>> remove_islands.inputs.surface_file = 'lh.midthickness.surf.gii' >>> remove_islands.inputs.metric_file = 'lh.roi.shape.gii' From 6d15a801dfb912e1770ef45e125de740ea174511 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Fri, 6 Jun 2025 09:04:03 +0200 Subject: [PATCH 76/82] FIX: remove coverage.yml --- .github/workflows/coverage.yml | 55 ---------------------------------- 1 file changed, 55 deletions(-) delete mode 100644 .github/workflows/coverage.yml diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml deleted file mode 100644 index dc159ce6..00000000 --- a/.github/workflows/coverage.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: Code Coverage - -on: - push: - branches: - - main - pull_request: - branches: - - main - -jobs: - build: - runs-on: macos-latest - steps: - - name: Check out repository - uses: actions/checkout@v4 - - - name: Set up Conda - uses: conda-incubator/setup-miniconda@v2 - with: - miniconda-version: "latest" - activate-environment: petprep - environment-file: env.yml - auto-activate-base: true - - - name: Install dependencies - run: | - conda activate petprep - pip install -e . - shell: bash -l {0} - - - name: Install coverage and pytest - run: | - conda activate petprep - conda install -c conda-forge pytest coverage - shell: bash -l {0} - - - name: Run tests with coverage - run: | - conda activate petprep - pytest --cov=petprep --cov-report=xml - shell: bash -l {0} - - - name: Upload coverage report - uses: actions/upload-artifact@v4 - with: - name: coverage - path: coverage.xml - - - name: Upload coverage reports to Codecov - uses: codecov/codecov-action@v4 - with: - file: ./coverage.xml - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} From 3cdd5e0a8e32df9258872f52f4d73c84f46490f5 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Fri, 6 Jun 2025 09:13:18 +0200 Subject: [PATCH 77/82] FIX: update github tests.yml to be more in line with fmriprep --- .github/workflows/tests.yml | 82 +++++++++++++++++++++++++++++++++---- 1 file changed, 73 insertions(+), 9 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index cbe18a62..0e66a0dc 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,4 +1,4 @@ -name: Tests +name: Stable tests on: push: @@ -8,18 +8,82 @@ on: branches: - main +defaults: + run: + shell: bash + +env: + FORCE_COLOR: true + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: read + jobs: - tests: - runs-on: ubuntu-latest + test: + runs-on: ${{ matrix.os }} strategy: matrix: - python-version: ["3.10", "3.11", "3.12"] + os: [ 'ubuntu-latest' ] + python-version: ['3.10', '3.11', '3.12', '3.13'] + dependencies: ['latest', 'pre'] + include: + - os: ubuntu-latest + python-version: '3.10' + dependencies: 'min' + env: + DEPENDS: ${{ matrix.dependencies }} steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + with: + submodules: recursive + fetch-depth: 0 + - uses: actions/cache@v4 + with: + path: ~/.cache/templateflow + key: templateflow-v1 + - name: Install dependencies + run: | + sudo apt update + sudo apt install -y --no-install-recommends graphviz + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v6 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - - run: pip install -e .[tests] - - run: pytest -sv - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v4 + - name: Display Python version + run: python -c "import sys; print(sys.version)" + - name: Install tox + run: | + uv tool install --with=tox-uv --with=tox-gh-actions tox + - name: Show tox config + run: tox c + - name: Run tox + run: tox -v --exit-and-dump-after 1200 + - uses: codecov/codecov-action@v5 + with: + token: ${{ secrets.CODECOV_TOKEN }} + if: ${{ always() }} + + checks: + runs-on: ubuntu-latest + continue-on-error: true + strategy: + matrix: + check: ['style', 'spellcheck'] + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v6 + - name: Install tox + run: uv tool install tox --with=tox-uv + - name: Show tox config + run: tox c -e ${{ matrix.check }} + - name: Run check + run: tox -e ${{ matrix.check }} From 5b68753ff069665daae85a8b4246a74c3dcf4cdd Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Fri, 6 Jun 2025 09:32:32 +0200 Subject: [PATCH 78/82] FIX: apply suggested changes from tox --- .tox/.pkg/file.lock | 0 env.yml | 2 +- notebooks/01 - Figure 2 and Figure SF3.ipynb | 101 ++++++++--------- notebooks/02 - Figure 3.ipynb | 36 +++--- notebooks/02 - Figure 4.ipynb | 24 ++-- notebooks/02 - Figure SF4.ipynb | 33 +++--- ...ion - FEAT Comparison (no smoothing).ipynb | 107 +++++++++--------- .../04 - Group_t_map_distributions.ipynb | 50 ++++---- ...ROMA confounds - issue-817 [J. Kent].ipynb | 35 +++--- petprep/cli/tests/test_parser.py | 1 - petprep/interfaces/__init__.py | 1 + petprep/interfaces/cifti.py | 33 +++--- petprep/interfaces/reports.py | 2 +- petprep/interfaces/tests/test_reports.py | 2 +- petprep/reports/core.py | 2 +- petprep/reports/tests/test_reports.py | 2 +- petprep/tests/test_fsl6.py | 4 +- petprep/utils/bids.py | 1 - petprep/utils/tests/test_derivative_cache.py | 4 +- petprep/workflows/base.py | 8 +- petprep/workflows/pet/confounds.py | 3 +- petprep/workflows/pet/fit.py | 3 +- petprep/workflows/pet/outputs.py | 1 - petprep/workflows/pet/reference.py | 2 +- petprep/workflows/pet/resampling.py | 2 +- petprep/workflows/pet/tests/test_confounds.py | 2 +- petprep/workflows/pet/tests/test_fit.py | 12 +- petprep/workflows/pet/tests/test_mem.py | 6 +- petprep/workflows/pet/tests/test_outputs.py | 30 ++--- petprep/workflows/pet/tests/test_pet_mask.py | 21 ++-- petprep/workflows/pet/tests/test_reference.py | 2 +- .../pet/tests/test_smooth_binarize.py | 2 +- petprep/workflows/tests/test_base.py | 33 +++--- 33 files changed, 274 insertions(+), 293 deletions(-) create mode 100644 .tox/.pkg/file.lock diff --git a/.tox/.pkg/file.lock b/.tox/.pkg/file.lock new file mode 100644 index 00000000..e69de29b diff --git a/env.yml b/env.yml index 13fc97b7..b961ada4 100644 --- a/env.yml +++ b/env.yml @@ -1,4 +1,4 @@ -name: fmriprep +name: petprep channels: - https://fsl.fmrib.ox.ac.uk/fsldownloads/fslconda/public/ - conda-forge diff --git a/notebooks/01 - Figure 2 and Figure SF3.ipynb b/notebooks/01 - Figure 2 and Figure SF3.ipynb index ba13df9b..8b6f3e7e 100644 --- a/notebooks/01 - Figure 2 and Figure SF3.ipynb +++ b/notebooks/01 - Figure 2 and Figure SF3.ipynb @@ -9,7 +9,8 @@ "outputs": [], "source": [ "import warnings\n", - "warnings.filterwarnings(\"ignore\")" + "\n", + "warnings.filterwarnings('ignore')" ] }, { @@ -33,12 +34,12 @@ "source": [ "#%matplotlib inline\n", "import os\n", - "import json\n", - "import pandas as pd\n", - "import glob\n", - "import numpy as np\n", "from pathlib import Path\n", + "\n", "import matplotlib as mpl\n", + "import numpy as np\n", + "import pandas as pd\n", + "\n", "mpl.use('pgf')\n", "\n", "import matplotlib.pyplot as plt\n", @@ -234,17 +235,17 @@ ], "source": [ "from collections import Counter\n", - "from matplotlib.colors import LinearSegmentedColormap, Normalize\n", + "\n", "import seaborn as sns\n", - "from scipy import stats\n", + "from matplotlib.colors import LinearSegmentedColormap, Normalize\n", "\n", "plt.clf()\n", "\n", - "sns.set_style(\"whitegrid\", {\n", + "sns.set_style('whitegrid', {\n", " 'ytick.major.size': 5,\n", " 'xtick.major.size': 5,\n", "})\n", - "sns.set_context(\"notebook\", font_scale=1)\n", + "sns.set_context('notebook', font_scale=1)\n", "\n", "pgf_with_custom_preamble = {\n", "# 'font.sans-serif': ['Helvetica Light'],\n", @@ -253,7 +254,7 @@ " 'pgf.rcfonts': False, # don't setup fonts from rc parameters\n", " 'pgf.texsystem': 'xelatex',\n", " 'verbose.level': 'debug-annoying',\n", - " \"pgf.preamble\": [\n", + " 'pgf.preamble': [\n", "# r'\\renewcommand{\\sfdefault}{phv}',\n", "# r'\\usepackage[scaled=.92]{helvet}',\n", " r'\\usepackage{fontspec}',\n", @@ -301,20 +302,20 @@ "# Plot qc1\n", "df0 = df[df.version.str.contains('1.0.0')]\n", "unique, counts = np.unique(df0[['overall']].values.ravel(), return_counts=True)\n", - "qc1_counts = dict(zip(unique, counts))\n", + "qc1_counts = dict(zip(unique, counts, strict=False))\n", "ax.scatter([1] * len(unique), unique, c='w', s=counts * factor, zorder=2)\n", "qc1_scatter = ax.scatter([1] * len(unique), unique, c=cm(norm(unique)), alpha=0.7, s=counts * factor, zorder=3)\n", "\n", - "print(dict(zip(unique, counts)))\n", + "print(dict(zip(unique, counts, strict=False)))\n", "\n", "# Plot qc2\n", "df1 = df[df.version.str.contains('1.0.7')]\n", "unique, counts = np.unique(df1[['overall']].values.ravel(), return_counts=True)\n", - "qc2_counts = dict(zip(unique, counts))\n", + "qc2_counts = dict(zip(unique, counts, strict=False))\n", "ax.scatter([2] * len(unique), unique, c='w', s=counts * factor, zorder=2)\n", "qc2_scatter = ax.scatter([2] * len(unique), unique, c=cm(norm(unique)), alpha=0.7, s=counts * factor, zorder=3)\n", "\n", - "print(dict(zip(unique, counts)))\n", + "print(dict(zip(unique, counts, strict=False)))\n", "\n", "ax.set(yticks=[0.0, 1.0, 2.0, 3.0])\n", "ax.set(yticklabels=['Critical', 'Poor', 'Acceptable', 'Excellent'])\n", @@ -329,14 +330,14 @@ " plt.scatter([],[], s=factor * 30, color='gray', edgecolors='none', alpha=.3)\n", "]\n", "\n", - "labels = [\"1 dataset\", \"4 datasets\", \"10 datasets\", \"30 datasets\"]\n", + "labels = ['1 dataset', '4 datasets', '10 datasets', '30 datasets']\n", "\n", "leg = plt.legend(sc, labels, ncol=1, frameon=False, fontsize=12,\n", " handlelength=2, loc=1, borderpad = 1.2,\n", " handletextpad=1, scatterpoints = 1,\n", " bbox_to_anchor=(2.0, 1.0))\n", "\n", - "sns.despine(offset=60, trim=True);\n", + "sns.despine(offset=60, trim=True)\n", "sns.despine(bottom=True)\n", "\n", "\n", @@ -360,7 +361,7 @@ "DS000108 and DS000148 \\\\textbf{improved the most} after addressing particular \\\n", "issues\"\"\"\n", "\n", - "annotfmt = r\"\\noindent\\parbox{{{0:.1f}cm}}{{\\raggedright \\textit{{{1}}}}}\".format\n", + "annotfmt = r'\\noindent\\parbox{{{0:.1f}cm}}{{\\raggedright \\textit{{{1}}}}}'.format\n", "# ax.annotate(\n", "# annotfmt(5.0, annot1),\n", "# xy=(2.05, 2), xycoords='data', xytext=(2.5, 1.9),\n", @@ -407,11 +408,8 @@ }, "outputs": [], "source": [ - "from scipy import stats\n", - "from seaborn.categorical import _CategoricalScatterPlotter\n", - "from seaborn.axisgrid import PairGrid, utils\n", "from matplotlib.colors import LinearSegmentedColormap, Normalize\n", - "from matplotlib import gridspec\n", + "from seaborn.axisgrid import PairGrid, utils\n", "\n", "# class PairGrid(Grid):\n", "# def __init__(self, data, hue=None, hue_order=None, palette=None,\n", @@ -420,7 +418,7 @@ "# despine=True, dropna=True, size=None):\n", "\n", "\n", - " \n", + "\n", "\n", "class MyPairGrid(PairGrid):\n", " def __init__(self, data, data0, hue=None, hue_order=None, palette=None,\n", @@ -438,7 +436,7 @@ " y_vars = list(vars)\n", " elif (x_vars is not None) or (y_vars is not None):\n", " if (x_vars is None) or (y_vars is None):\n", - " raise ValueError(\"Must specify `x_vars` and `y_vars`\")\n", + " raise ValueError('Must specify `x_vars` and `y_vars`')\n", " else:\n", " numeric_cols = self._find_numeric_cols(data)\n", " x_vars = numeric_cols\n", @@ -464,9 +462,9 @@ "\n", " fig, axes = plt.subplots(len(y_vars), len(x_vars),\n", " figsize=figsize,\n", - " sharex=\"col\", sharey=\"row\",\n", + " sharex='col', sharey='row',\n", " squeeze=False)\n", - " \n", + "\n", " self.fig = fig\n", " self.axes = axes\n", " self.data = data\n", @@ -481,8 +479,8 @@ " # Sort out the hue variable\n", " self._hue_var = hue\n", " if hue is None:\n", - " self.hue_names = [\"_nolegend_\"]\n", - " self.hue_vals = pd.Series([\"_nolegend_\"] * len(data),\n", + " self.hue_names = ['_nolegend_']\n", + " self.hue_vals = pd.Series(['_nolegend_'] * len(data),\n", " index=data.index)\n", " else:\n", " hue_names = utils.categorical_order(data[hue], hue_order)\n", @@ -502,14 +500,14 @@ " if despine:\n", " utils.despine(fig=fig)\n", " fig.tight_layout()\n", - " \n", - " \n", + "\n", + "\n", " data.index = data.dataset\n", " data0.index = data0.dataset\n", " self.data0 = data0.reindex(data.index)\n", " self.hue_names = None\n", " self.plots = None\n", - " \n", + "\n", " def map(self, func, **kwargs):\n", " \"\"\"Plot with the same function in every subplot.\n", " Parameters\n", @@ -518,7 +516,7 @@ " Must take x, y arrays as positional arguments and draw onto the\n", " \"currently active\" matplotlib Axes.\n", " \"\"\"\n", - " kw_color = kwargs.pop(\"color\", None)\n", + " kw_color = kwargs.pop('color', None)\n", "\n", " self.plots = []\n", " for j, x_var in enumerate(self.x_vars):\n", @@ -531,21 +529,21 @@ "\n", " self._clean_axis(ax)\n", " self._update_legend_data(ax)\n", - " \n", - " \n", + "\n", + "\n", "def stripplot(x=None, y=None, hue=None, data=None, order=None, hue_order=None,\n", " jitter=False, dodge=False, orient=None, color=None, palette=None,\n", - " size=5, edgecolor=\"gray\", linewidth=0, ax=None, **kwargs):\n", + " size=5, edgecolor='gray', linewidth=0, ax=None, **kwargs):\n", " plotter = _StripPlotter(x, y, hue, data, order, hue_order,\n", " jitter, dodge, orient, color, palette)\n", " if ax is None:\n", " ax = plt.gca()\n", "\n", - " kwargs.setdefault(\"zorder\", 3)\n", - " size = kwargs.get(\"s\", size)\n", + " kwargs.setdefault('zorder', 3)\n", + " size = kwargs.get('s', size)\n", " if linewidth is None:\n", " linewidth = size / 10\n", - " if edgecolor == \"gray\":\n", + " if edgecolor == 'gray':\n", " edgecolor = plotter.gray\n", " kwargs.update(dict(s=size ** 2,\n", " edgecolor=edgecolor,\n", @@ -557,7 +555,7 @@ "\n", "def mystripplot(x=None, x0=None, y=None, hue=None, data=None, order=None, hue_order=None,\n", " jitter=False, dodge=False, orient=None, color=None, palette=None,\n", - " size=5, edgecolor=\"gray\", linewidth=0, ax=None, **kwargs):\n", + " size=5, edgecolor='gray', linewidth=0, ax=None, **kwargs):\n", "\n", " if ax is None:\n", " ax = plt.gca()\n", @@ -566,7 +564,7 @@ " colors = ['red', 'goldenrod', 'green']\n", " cm = LinearSegmentedColormap.from_list('ratings', colors, N=50)\n", " norm = Normalize(vmin=0.5, vmax=3.0)\n", - " \n", + "\n", " y = range(len(y))[::-1]\n", " ecs = [cm(norm(v)) for v in x0.values.ravel()]\n", " ax.scatter(x0, y, c='w', linewidth=2, edgecolors=ecs, s=120, zorder=2, clip_on=False)\n", @@ -594,29 +592,30 @@ "import matplotlib.patches as mpatches\n", "from matplotlib.collections import PatchCollection\n", "\n", + "\n", "def gradient_patch(xy, width, height, cmap='viridis', colors=None, N=20, zorder=10):\n", " x0, y0 = xy\n", " if isinstance(colors, (list, tuple)):\n", " cmap = LinearSegmentedColormap.from_list('ratings', colors, N=50)\n", - " \n", + "\n", " patches = []\n", " levels = np.linspace(0.0, 1.0, N)\n", " elw = width / N\n", " for i, l in enumerate(levels):\n", - " rect = mpatches.Rectangle((x0 + i * elw, y0), elw, height, ec=\"none\", fc=cm(l), clip_on=False)\n", + " rect = mpatches.Rectangle((x0 + i * elw, y0), elw, height, ec='none', fc=cm(l), clip_on=False)\n", " patches.append(rect)\n", "\n", " return PatchCollection(patches, match_original=True, zorder=zorder, clip_on=False)\n", - " \n", "\n", - "class AnyObject(object):\n", + "\n", + "class AnyObject:\n", " def __init__(self, label):\n", " self.label = label\n", - " \n", + "\n", " def get_label(self):\n", " return self.label\n", "\n", - "class AnyObjectHandler(object):\n", + "class AnyObjectHandler:\n", " def legend_artist(self, legend, orig_handle, fontsize, handlebox):\n", " x0, y0 = handlebox.xdescent, handlebox.ydescent\n", " width, height = handlebox.width, handlebox.height\n", @@ -642,7 +641,7 @@ } ], "source": [ - "sns.set_context(\"notebook\", font_scale=2)\n", + "sns.set_context('notebook', font_scale=2)\n", "\n", "pgf_with_custom_preamble = {\n", " 'ytick.major.size': 0,\n", @@ -663,11 +662,11 @@ " height=25, aspect=.10)\n", "\n", "# Draw a dot plot using the stripplot function\n", - "g.map(mystripplot, size=15, orient=\"h\")\n", + "g.map(mystripplot, size=15, orient='h')\n", "\n", "# Use semantically meaningful titles for the columns\n", - "titles = [\"Overall\", \"Surf. recon.\", \"T1w ROIs\", \n", - " \"T1w to MNI\", \"BOLD ROIs\", \"BOLD to T1w\", \"SyN SDC\"]\n", + "titles = ['Overall', 'Surf. recon.', 'T1w ROIs',\n", + " 'T1w to MNI', 'BOLD ROIs', 'BOLD to T1w', 'SyN SDC']\n", "\n", "# Use the same x axis limits on all columns and add better labels\n", "g.set(xlim=(-0.2, 3.2), xlabel='', ylabel='')\n", @@ -676,12 +675,12 @@ "g.axes.flat[0].set(yticks=range(len(datasets))[::-1])\n", "g.axes.flat[0].set(yticklabels=datasets)\n", "g.axes.flat[0].tick_params(axis='y', which='major', pad=45)\n", - "for ax, title in zip(g.axes.flat, titles):\n", + "for ax, title in zip(g.axes.flat, titles, strict=False):\n", " # Set a different title for each axes\n", " ax.set(title=title)\n", "# ax.set(xlabel=title)\n", " ax.set(xticks=[])\n", - " \n", + "\n", " # Make the grid horizontal instead of vertical\n", " ax.xaxis.grid(False)\n", " ax.yaxis.grid(True)\n", diff --git a/notebooks/02 - Figure 3.ipynb b/notebooks/02 - Figure 3.ipynb index 53688c8f..1841fdc8 100644 --- a/notebooks/02 - Figure 3.ipynb +++ b/notebooks/02 - Figure 3.ipynb @@ -74,24 +74,22 @@ "# %autoreload 2\n", "# %matplotlib inline\n", "import os\n", - "from pathlib import Path\n", "import warnings\n", + "from pathlib import Path\n", "\n", - "import numpy as np\n", + "import matplotlib as mpl\n", "import nibabel as nb\n", + "import numpy as np\n", "import pandas as pd\n", "\n", - "import matplotlib as mpl\n", "mpl.use('pgf')\n", - "from matplotlib import pyplot as plt\n", - "from matplotlib import gridspec\n", - "import seaborn as sn\n", "import palettable\n", - "\n", - "from niworkflows.data import get_template\n", - "\n", - "from nilearn.image import concat_imgs, mean_img\n", + "import seaborn as sn\n", + "from matplotlib import gridspec\n", + "from matplotlib import pyplot as plt\n", "from nilearn import plotting\n", + "from nilearn.image import concat_imgs, mean_img\n", + "from niworkflows.data import get_template\n", "\n", "warnings.simplefilter('ignore')\n", "\n", @@ -145,7 +143,7 @@ " 'pgf.rcfonts': False, # don't setup fonts from rc parameters\n", " 'pgf.texsystem': 'xelatex',\n", " 'verbose.level': 'debug-annoying',\n", - " \"pgf.preamble\": [\n", + " 'pgf.preamble': [\n", " r\"\"\"\\usepackage{fontspec}\n", "\\setsansfont{HelveticaLTStd-Light}[\n", "Extension=.otf,\n", @@ -189,7 +187,7 @@ " if lazy:\n", " all_mus = [nb.load(str(f)) for f in pipe_home.glob(\n", " 'sub-*/func/sub-*_task-stopsignal_bold_space-MNI152NLin2009cAsym_avgpreproc.nii.gz')]\n", - " \n", + "\n", " if not all_mus:\n", " print('Generating means file')\n", " pipe_files = list(pipe_home.glob(\n", @@ -201,7 +199,7 @@ " sigma = np.percentile(data[meanmask], 50) / maskval\n", " data /= sigma\n", " all_mus.append(nb.Nifti1Image(data, mean.affine, mean.header))\n", - " \n", + "\n", " meannii = concat_imgs(all_mus, auto_resample=False)\n", " meannii.to_filename(str(pipe_mean))\n", " force = True\n", @@ -210,7 +208,7 @@ " print('Generating standard deviation map')\n", " meannii = nb.load(str(pipe_mean))\n", " nb.Nifti1Image(meannii.get_data().std(3), meannii.affine, meannii.header).to_filename(str(pipe_std))\n", - " \n", + "\n", " return pipe_mean, pipe_std" ] }, @@ -408,8 +406,8 @@ "# b_ax4.axis('off')\n", "\n", "\n", - "a_ax1.set_title('A', fontdict={'fontsize': 24}, loc='left', x=-0.2);\n", - "b_ax1.set_title('B', fontdict={'fontsize': 24}, loc='left');\n", + "a_ax1.set_title('A', fontdict={'fontsize': 24}, loc='left', x=-0.2)\n", + "b_ax1.set_title('B', fontdict={'fontsize': 24}, loc='left')\n", "\n", "plt.savefig(str(out_folder / 'figure03.pdf'),\n", " format='pdf', bbox_inches='tight', pad_inches=0.2, dpi=300)" @@ -437,13 +435,13 @@ "plotting.plot_anat('newfeat.nii.gz', cut_coords=coords, colorbar=True, cmap='cividis',\n", " threshold=thres, vmin=vmin, vmax=vmax, title='feat',\n", " axes=plt.subplot(2,2,1)\n", - ");\n", + ")\n", "plotting.plot_anat(str(fprep_std), cut_coords=coords, colorbar=True, cmap='cividis',\n", " threshold=thres, vmin=vmin, vmax=vmax, title='fmriprep',\n", " axes=plt.subplot(2,2,3)\n", - ");\n", + ")\n", "plotting.plot_glass_brain(str(feat_std), threshold=200, colorbar=True, title='feat',\n", - " axes=plt.subplot(2,2,2));\n", + " axes=plt.subplot(2,2,2))\n", "plotting.plot_glass_brain(str(fprep_std), threshold=200, colorbar=True, title='fmriprep',\n", " axes=plt.subplot(2,2,4));" ] diff --git a/notebooks/02 - Figure 4.ipynb b/notebooks/02 - Figure 4.ipynb index 0fcf36b7..36786b0e 100644 --- a/notebooks/02 - Figure 4.ipynb +++ b/notebooks/02 - Figure 4.ipynb @@ -74,24 +74,18 @@ "%autoreload 2\n", "# %matplotlib inline\n", "import os\n", - "from pathlib import Path\n", "import warnings\n", + "from pathlib import Path\n", "\n", - "import numpy as np\n", + "import matplotlib as mpl\n", "import nibabel as nb\n", - "import pandas as pd\n", + "import numpy as np\n", "\n", - "import matplotlib as mpl\n", "mpl.use('pgf')\n", + "from matplotlib import colors, gridspec\n", "from matplotlib import pyplot as plt\n", - "from matplotlib import gridspec, colors\n", - "import seaborn as sn\n", - "import palettable\n", - "\n", - "from niworkflows.data import get_template\n", - "\n", - "from nilearn.image import concat_imgs, mean_img\n", "from nilearn import plotting\n", + "from niworkflows.data import get_template\n", "\n", "warnings.simplefilter('ignore')\n", "\n", @@ -145,7 +139,7 @@ " 'pgf.rcfonts': False, # don't setup fonts from rc parameters\n", " 'pgf.texsystem': 'xelatex',\n", " 'verbose.level': 'debug-annoying',\n", - " \"pgf.preamble\": [\n", + " 'pgf.preamble': [\n", " r\"\"\"\\usepackage{fontspec}\n", "\\setsansfont{HelveticaLTStd-Light}[\n", "Extension=.otf,\n", @@ -241,13 +235,13 @@ " 'fMRIPrep',\n", " xy=(0., .5), xycoords='axes fraction', xytext=(-40, .0),\n", " textcoords='offset points', va='center', color='k', size=24,\n", - " rotation=90);\n", + " rotation=90)\n", "\n", "ax2.annotate(\n", " r'\\texttt{feat}',\n", " xy=(0., .5), xycoords='axes fraction', xytext=(-40, .0),\n", " textcoords='offset points', va='center', color='k', size=24,\n", - " rotation=90);\n", + " rotation=90)\n", "\n", "ax3 = fig.add_subplot(gs[3, 2])\n", "\n", @@ -269,7 +263,7 @@ "ax3.imshow(gradient, aspect='auto', cmap=cmap)\n", "ax3.set_title(r'\\noindent\\parbox{7.5cm}{\\centering\\textbf{Fraction of participants} \\\\ with significant response}',\n", " size=18, position=(0.5, 3.0))\n", - "ax3.xaxis.set_ticklabels(['80\\%', '25\\%', '25\\%', '80\\%'], size=20)\n", + "ax3.xaxis.set_ticklabels([r'80\\%', r'25\\%', r'25\\%', r'80\\%'], size=20)\n", "ax3.xaxis.set_ticks([0, th_index, cmap.N - th_index - 1, cmap.N])\n", "ax3.yaxis.set_ticklabels([])\n", "ax3.yaxis.set_ticks([])\n", diff --git a/notebooks/02 - Figure SF4.ipynb b/notebooks/02 - Figure SF4.ipynb index b1a4fa0d..930d1b96 100644 --- a/notebooks/02 - Figure SF4.ipynb +++ b/notebooks/02 - Figure SF4.ipynb @@ -12,22 +12,19 @@ "%autoreload 2\n", "# %matplotlib inline\n", "import os\n", - "from pathlib import Path\n", "import warnings\n", + "from pathlib import Path\n", "\n", - "import numpy as np\n", + "import matplotlib as mpl\n", "import nibabel as nb\n", - "import pandas as pd\n", + "import numpy as np\n", "\n", - "import matplotlib as mpl\n", "mpl.use('pgf')\n", - "from matplotlib import pyplot as plt\n", - "from matplotlib import gridspec\n", "import seaborn as sn\n", - "import palettable\n", - "\n", - "from nilearn.image import concat_imgs, mean_img\n", + "from matplotlib import gridspec\n", + "from matplotlib import pyplot as plt\n", "from nilearn import plotting\n", + "from nilearn.image import concat_imgs, mean_img\n", "\n", "warnings.simplefilter('ignore')\n", "\n", @@ -64,7 +61,7 @@ " 'pgf.rcfonts': False, # don't setup fonts from rc parameters\n", " 'pgf.texsystem': 'xelatex',\n", " 'verbose.level': 'debug-annoying',\n", - " \"pgf.preamble\": [\n", + " 'pgf.preamble': [\n", " r\"\"\"\\usepackage{fontspec}\n", "\\setsansfont{HelveticaLTStd-Light}[\n", "Extension=.otf,\n", @@ -107,7 +104,7 @@ " if lazy:\n", " all_mus = [nb.load(str(f)) for f in pipe_home.glob(\n", " 'sub-*/func/sub-*_task-stopsignal_bold_space-MNI152NLin2009cAsym_avgpreproc.nii.gz')]\n", - " \n", + "\n", " if not all_mus:\n", " print('Generating means file')\n", " pipe_files = list(pipe_home.glob(\n", @@ -119,7 +116,7 @@ " sigma = np.percentile(data[meanmask], 50) / maskval\n", " data /= sigma\n", " all_mus.append(nb.Nifti1Image(data, mean.affine, mean.header))\n", - " \n", + "\n", " meannii = concat_imgs(all_mus, auto_resample=False)\n", " meannii.to_filename(str(pipe_mean))\n", " force = True\n", @@ -128,7 +125,7 @@ " print('Generating standard deviation map')\n", " meannii = nb.load(str(pipe_mean))\n", " nb.Nifti1Image(meannii.get_data().std(3), meannii.affine, meannii.header).to_filename(str(pipe_std))\n", - " \n", + "\n", " return pipe_mean, pipe_std\n", "\n", "# Use the WM mask to normalize intensities of EPI means\n", @@ -405,19 +402,19 @@ " disp.add_contours('wm_clip.nii.gz', colors=['w'], levels=[0.8], linewidths=[1], alpha=0.7)\n", " disp.add_contours('bm_clip.nii.gz', colors=['k'], levels=[0.8], linewidths=[3], alpha=.7)\n", "# disp.annotate(size=42, left_right=False, positions=True, scalebar=False)\n", - " \n", + "\n", " ax1.axis('on')\n", " ax1.set_xticklabels([])\n", " ax1.set_xticks([])\n", " ax1.set_yticklabels([])\n", " ax1.set_yticks([])\n", - " ax1.set_ylabel(\"z = %d\" % coord, size=fs)\n", + " ax1.set_ylabel('z = %d' % coord, size=fs)\n", " for pos in ['top', 'bottom', 'left', 'right']:\n", " ax1.spines[pos].set_color(colorboxes[i])\n", " ax1.spines[pos].set_visible(True)\n", " ax1.spines[pos].set_linewidth(4)\n", " ax1.spines[pos].set_position(('outward', 2))\n", - " \n", + "\n", " ax2 = fig.add_subplot(inner_grid[i, 1])\n", " disp = plotting.plot_anat(\n", " 'feat_clip.nii.gz', display_mode='z', annotate=False,\n", @@ -426,7 +423,7 @@ " disp.add_contours('csf_clip.nii.gz', colors=['k'], levels=[0.8])\n", " disp.add_contours('wm_clip.nii.gz', colors=['w'], levels=[0.8], linewidths=[1], alpha=0.7)\n", " disp.add_contours('bm_clip.nii.gz', colors=['k'], levels=[0.8], linewidths=[3], alpha=.7)\n", - " \n", + "\n", " ax2.axis('on')\n", " ax2.set_xticklabels([])\n", " ax2.set_xticks([])\n", @@ -437,7 +434,7 @@ " ax2.spines[pos].set_visible(True)\n", " ax2.spines[pos].set_linewidth(4)\n", " ax2.spines[pos].set_position(('outward', 2))\n", - " \n", + "\n", " if i == 0:\n", " ax1.set_title('fMRIPrep', size=fs+2, position=(0.5, 1.02))\n", " ax2.set_title(r'\\texttt{feat}', size=fs+2, position=(0.5, 1.02))\n", diff --git a/notebooks/03 - Evaluation - FEAT Comparison (no smoothing).ipynb b/notebooks/03 - Evaluation - FEAT Comparison (no smoothing).ipynb index a8269e7e..0f5e468a 100644 --- a/notebooks/03 - Evaluation - FEAT Comparison (no smoothing).ipynb +++ b/notebooks/03 - Evaluation - FEAT Comparison (no smoothing).ipynb @@ -35,21 +35,17 @@ "source": [ "%matplotlib inline\n", "import os\n", - "from pathlib import Path\n", "import warnings\n", + "from pathlib import Path\n", "\n", - "import numpy as np\n", "import nibabel as nb\n", + "import numpy as np\n", + "import palettable\n", "import pandas as pd\n", - "\n", - "from nilearn.image import concat_imgs, mean_img\n", - "from nilearn import plotting\n", - "\n", - "import matplotlib as mpl\n", - "from matplotlib import pyplot as plt\n", - "from matplotlib import gridspec\n", "import seaborn as sn\n", - "import palettable\n", + "from matplotlib import pyplot as plt\n", + "from nilearn import plotting\n", + "from nilearn.image import concat_imgs, mean_img\n", "\n", "warnings.simplefilter('ignore')\n", "\n", @@ -90,7 +86,7 @@ " if lazy:\n", " all_mus = [nb.load(str(f)) for f in pipe_home.glob(\n", " 'sub-*/func/sub-*_task-stopsignal_bold_space-MNI152NLin2009cAsym_avgpreproc.nii.gz')]\n", - " \n", + "\n", " if not all_mus:\n", " print('Generating means file')\n", " pipe_files = list(pipe_home.glob(\n", @@ -102,7 +98,7 @@ " sigma = np.percentile(data[meanmask], 50) / maskval\n", " data /= sigma\n", " all_mus.append(nb.Nifti1Image(data, mean.affine, mean.header))\n", - " \n", + "\n", " meannii = concat_imgs(all_mus, auto_resample=False)\n", " meannii.to_filename(str(pipe_mean))\n", " force = True\n", @@ -111,7 +107,7 @@ " print('Generating standard deviation map')\n", " meannii = nb.load(str(pipe_mean))\n", " nb.Nifti1Image(meannii.get_data().std(3), meannii.affine, meannii.header).to_filename(str(pipe_std))\n", - " \n", + "\n", " return pipe_mean, pipe_std\n", "\n", "# Use the WM mask to normalize intensities of EPI means\n", @@ -171,13 +167,13 @@ "plotting.plot_anat(str(feat_std), cut_coords=coords, colorbar=True, cmap='cividis',\n", " threshold=thres, vmin=vmin, vmax=vmax, title='feat',\n", " axes=plt.subplot(2,2,1)\n", - ");\n", + ")\n", "plotting.plot_anat(str(fprep_std), cut_coords=coords, colorbar=True, cmap='cividis',\n", " threshold=thres, vmin=vmin, vmax=vmax, title='fmriprep',\n", " axes=plt.subplot(2,2,3)\n", - ");\n", + ")\n", "plotting.plot_glass_brain(str(feat_std), threshold=200, colorbar=True, title='feat',\n", - " axes=plt.subplot(2,2,2));\n", + " axes=plt.subplot(2,2,2))\n", "plotting.plot_glass_brain(str(fprep_std), threshold=200, colorbar=True, title='fmriprep',\n", " axes=plt.subplot(2,2,4));" ] @@ -246,13 +242,13 @@ "bg_images = {}\n", "preptpl = '{0}_task-stopsignal_bold_space-MNI152NLin2009cAsym_preproc.nii.gz'.format\n", "for pipeline in pipelines:\n", - " z11 = ANALYSIS_HOME / subject / 'func' / '{}_task-stopsignal_variant-{}_zstat11.nii.gz'.format(subject, pipeline)\n", + " z11 = ANALYSIS_HOME / subject / 'func' / f'{subject}_task-stopsignal_variant-{pipeline}_zstat11.nii.gz'\n", " images[pipeline] = str(z11)\n", - " \n", + "\n", " im = nb.load(str(home[pipeline] / subject / 'func' / preptpl(subject)))\n", " bg_images[pipeline] = nb.Nifti1Image(im.get_data().mean(3), im.affine, im.header)\n", "\n", - " \n", + "\n", "# Plot\n", "plt.clf()\n", "fig = plt.figure(figsize=(20,10))\n", @@ -312,7 +308,7 @@ "feat_vals = nb.load(images['fslfeat']).get_data()[mask2mm]\n", "sn.distplot(fprep_vals[np.abs(fprep_vals) > 1.68], label='fmriprep', kde=False, norm_hist=True)\n", "sn.distplot(feat_vals[np.abs(feat_vals) > 1.68], label='feat', kde=False, norm_hist=True)\n", - "plt.title(\"Distribution of Z-values of 1st level analysis - GO-StopSuccess contrast\")\n", + "plt.title('Distribution of Z-values of 1st level analysis - GO-StopSuccess contrast')\n", "plt.legend()" ] }, @@ -412,7 +408,7 @@ "plt.figure(figsize=(10,7))\n", "sn.distplot(fmriprep_vals,label='fmriprep')\n", "sn.distplot(feat_vals,label='feat')\n", - "plt.title(\"Distribution of Z-values of 1st level analysis - GO-StopSuccess contrast\")\n", + "plt.title('Distribution of Z-values of 1st level analysis - GO-StopSuccess contrast')\n", "plt.legend()" ] }, @@ -458,7 +454,7 @@ "sn.distplot(fmriprep_vals[mask2mm4d], label='fmriprep')\n", "mask2mm4d = np.repeat(mask2mm[..., np.newaxis], feat_vals.shape[-1], -1)\n", "sn.distplot(feat_vals[mask2mm4d], label='feat')\n", - "plt.title(\"Distribution of T-values of 1st level analysis - GO-StopSuccess contrast\")\n", + "plt.title('Distribution of T-values of 1st level analysis - GO-StopSuccess contrast')\n", "plt.legend()" ] }, @@ -479,8 +475,9 @@ }, "outputs": [], "source": [ - "from tempfile import TemporaryDirectory\n", "from shutil import copy\n", + "from tempfile import TemporaryDirectory\n", + "\n", "from nipype.algorithms.stats import ActivationCount as ACM\n", "\n", "cwd = os.getcwd()\n", @@ -520,10 +517,10 @@ } ], "source": [ - "plotting.plot_glass_brain(str(ANALYSIS_HOME / 'acm_feat.nii.gz'), \n", + "plotting.plot_glass_brain(str(ANALYSIS_HOME / 'acm_feat.nii.gz'),\n", " title='feat', vmin=-0.8, vmax=0.8, colorbar=True,\n", " cmap='RdYlBu_r', symmetric_cbar=True, plot_abs=False)\n", - "plotting.plot_glass_brain(str(ANALYSIS_HOME / 'acm_fpre.nii.gz'), \n", + "plotting.plot_glass_brain(str(ANALYSIS_HOME / 'acm_fpre.nii.gz'),\n", " title='fmriprep', vmin=-0.8, vmax=0.8, colorbar=True,\n", " cmap='RdYlBu_r', symmetric_cbar=True, plot_abs=False)\n", "plotting.show()" @@ -683,7 +680,7 @@ "source": [ "cut_coords = [-15, -8, 6, 30, 46, 62]\n", "plotting.plot_stat_map(str(group_dir / 'fslfeat_stopsignal_N120_R101_S0' / 'zstat1.nii.gz'),\n", - " title='feat-S0', threshold=2.98, bg_img=atlas, \n", + " title='feat-S0', threshold=2.98, bg_img=atlas,\n", " display_mode='z', cut_coords=cut_coords, vmax=14)\n", "plotting.plot_stat_map(str(group_dir / 'fslfeat_stopsignal_N120_R101_S1' / 'zstat1.nii.gz'),\n", " title='feat-S1', threshold=2.98, bg_img=atlas,\n", @@ -692,7 +689,7 @@ " title='fmriprep-S0', threshold=2.98, bg_img=atlas,\n", " display_mode='z', cut_coords=cut_coords, vmax=14)\n", "plotting.plot_stat_map(str(group_dir / 'fmriprep_stopsignal_N120_R101_S1' / 'zstat1.nii.gz'),\n", - " title='fmriprep-S1', threshold=2.98, bg_img=atlas, \n", + " title='fmriprep-S1', threshold=2.98, bg_img=atlas,\n", " display_mode='z', cut_coords=cut_coords, vmax=14)" ] }, @@ -813,27 +810,27 @@ ], "source": [ "cols = palettable.tableau.ColorBlind_10.hex_colors\n", - "sn.set_style(\"whitegrid\")\n", + "sn.set_style('whitegrid')\n", "\n", "plt.clf()\n", "fig = plt.figure(figsize=(20,8))\n", "plt.subplot(1,3,1)\n", - "sn.boxplot(x=\"N\", y=\"bdice\", hue='pipeline', hue_order=['fmriprep', 'fslfeat'],\n", + "sn.boxplot(x='N', y='bdice', hue='pipeline', hue_order=['fmriprep', 'fslfeat'],\n", " data=dataframe, palette=cols, linewidth=0.6)\n", "\n", - "plt.ylabel(\"Binary Dice\")\n", - "plt.xlabel(\"Sample size $N$\")\n", + "plt.ylabel('Binary Dice')\n", + "plt.xlabel('Sample size $N$')\n", "\n", "plt.subplot(1,3,2)\n", - "sn.boxplot(x=\"N\", y=\"fdice\", hue=\"pipeline\", hue_order=['fmriprep', 'fslfeat'],\n", + "sn.boxplot(x='N', y='fdice', hue='pipeline', hue_order=['fmriprep', 'fslfeat'],\n", " data=dataframe, palette=cols, linewidth=.6)\n", - "plt.ylabel(\"Fuzzy Dice\")\n", - "plt.xlabel(\"Sample size $N$\")\n", + "plt.ylabel('Fuzzy Dice')\n", + "plt.xlabel('Sample size $N$')\n", "plt.subplot(1,3,3)\n", - "sn.boxplot(x=\"N\", y=\"correlation\", hue=\"pipeline\", hue_order=['fmriprep', 'fslfeat'],\n", + "sn.boxplot(x='N', y='correlation', hue='pipeline', hue_order=['fmriprep', 'fslfeat'],\n", " data=dataframe, palette=cols, linewidth=.6)\n", - "plt.ylabel(\"Correlation\")\n", - "plt.xlabel(\"Sample size $N$\")" + "plt.ylabel('Correlation')\n", + "plt.xlabel('Sample size $N$')" ] }, { @@ -868,13 +865,13 @@ "for i in range(2, 201):\n", " feat0 = np.append(feat0, nb.load(str(group_dir / ('fslfeat_stopsignal_N120_R%03d_S0' % i) / 'tstat1.nii.gz')).get_data()[mask2mm])\n", " feat1 = np.append(feat1, nb.load(str(group_dir / ('fslfeat_stopsignal_N120_R%03d_S1' % i) / 'tstat1.nii.gz')).get_data()[mask2mm])\n", - " \n", + "\n", " fprep0 = np.append(fprep0, nb.load(str(group_dir / ('fmriprep_stopsignal_N120_R%03d_S0' % i) / 'tstat1.nii.gz')).get_data()[mask2mm])\n", " fprep1 = np.append(fprep1, nb.load(str(group_dir / ('fmriprep_stopsignal_N120_R%03d_S1' % i) / 'tstat1.nii.gz')).get_data()[mask2mm])\n", "\n", " feat2 = np.append(feat2, nb.load(str(group_dir / ('fslfeat_stopsignal_N010_R%03d_S0' % i) / 'tstat1.nii.gz')).get_data()[mask2mm])\n", " feat3 = np.append(feat3, nb.load(str(group_dir / ('fslfeat_stopsignal_N010_R%03d_S1' % i) / 'tstat1.nii.gz')).get_data()[mask2mm])\n", - " \n", + "\n", " fprep2 = np.append(fprep2, nb.load(str(group_dir / ('fmriprep_stopsignal_N010_R%03d_S0' % i) / 'tstat1.nii.gz')).get_data()[mask2mm])\n", " fprep3 = np.append(fprep3, nb.load(str(group_dir / ('fmriprep_stopsignal_N010_R%03d_S1' % i) / 'tstat1.nii.gz')).get_data()[mask2mm])" ] @@ -927,7 +924,7 @@ "feat_mask = np.abs(y) > 2\n", "print('feat: ', feat_mask.sum())\n", "\n", - "g = sn.jointplot(x[feat_mask], y[feat_mask], kind=\"hex\", stat_func=None, xlim=(-5, 5), ylim=(-5, 5))\n", + "g = sn.jointplot(x[feat_mask], y[feat_mask], kind='hex', stat_func=None, xlim=(-5, 5), ylim=(-5, 5))\n", "\n", "x = 0.5 * (fprep2 + fprep3)\n", "y = fprep2 - fprep3\n", @@ -935,7 +932,7 @@ "fprep_mask = np.abs(y) > 2\n", "print('fmriprep: ', fprep_mask.sum())\n", "\n", - "g = sn.jointplot(x[fprep_mask], y[fprep_mask], kind=\"hex\", stat_func=None, xlim=(-5, 5), ylim=(-5, 5))" + "g = sn.jointplot(x[fprep_mask], y[fprep_mask], kind='hex', stat_func=None, xlim=(-5, 5), ylim=(-5, 5))" ] }, { @@ -986,7 +983,7 @@ "feat_mask = np.abs(y) > 2\n", "print('feat: ', feat_mask.sum())\n", "\n", - "g = sn.jointplot(x[feat_mask], y[feat_mask], kind=\"hex\", stat_func=None, xlim=(-5, 5), ylim=(-5, 5))\n", + "g = sn.jointplot(x[feat_mask], y[feat_mask], kind='hex', stat_func=None, xlim=(-5, 5), ylim=(-5, 5))\n", "\n", "x = 0.5 * (fprep0 + fprep1)\n", "y = fprep0 - fprep1\n", @@ -994,7 +991,7 @@ "fprep_mask = np.abs(y) > 2\n", "print('fmriprep: ', fprep_mask.sum())\n", "\n", - "g = sn.jointplot(x[fprep_mask], y[fprep_mask], kind=\"hex\", stat_func=None, xlim=(-5, 5), ylim=(-5, 5))\n", + "g = sn.jointplot(x[fprep_mask], y[fprep_mask], kind='hex', stat_func=None, xlim=(-5, 5), ylim=(-5, 5))\n", "\n", "# plt.scatter(x, y)" ] @@ -1119,7 +1116,7 @@ "\n", "cut_coords = [-2,-4,-8,-10,-40,-45,-50,-55]\n", "plotting.plot_roi(str(ATLAS_HOME / 'l2-jd' / '2mm_atlas_rois.nii.gz'),\n", - " display_mode='x', cut_coords=cut_coords, \n", + " display_mode='x', cut_coords=cut_coords,\n", " cmap='rainbow', alpha=1, bg_img=atlas)" ] }, @@ -1258,7 +1255,7 @@ "for k, v in labels.items():\n", " zs_rois['feat'][v] = np.mean(feat_zs[atlas_data == k]) / sqrtN\n", " zs_rois['fmriprep'][v] = np.mean(fmriprep_zs[atlas_data == k]) / sqrtN\n", - " \n", + "\n", "pd.DataFrame(zs_rois)[['fmriprep', 'feat']]" ] }, @@ -1281,12 +1278,12 @@ "source": [ "import json\n", "\n", - "results = pd.read_csv(str(group_dir / \"group.csv\"), index_col=0)[\n", + "results = pd.read_csv(str(group_dir / 'group.csv'), index_col=0)[\n", " ['N', 'pipeline', 'IFG', 'PCG', 'STN', 'preSMA', 'repetition']]\n", "results.N = results.N.astype(int)\n", "results.repetition = results.repetition.astype(int)\n", "\n", - "with open(str(group_dir / \"tvals.json\")) as json_data:\n", + "with open(str(group_dir / 'tvals.json')) as json_data:\n", " allT = json.load(json_data)\n", " json_data.close()" ] @@ -1425,7 +1422,7 @@ } ], "source": [ - "sn.set_style(\"whitegrid\")\n", + "sn.set_style('whitegrid')\n", "\n", "plt.clf()\n", "fig = plt.figure(figsize=(20,8))\n", @@ -1433,30 +1430,30 @@ "xlim=[-0.6,0.6]\n", "\n", "reslong = pd.melt(results, id_vars=['pipeline','N'],\n", - " value_vars=labels.values(), var_name=\"ROI\", value_name=\"Cohen's D\")\n", + " value_vars=labels.values(), var_name='ROI', value_name=\"Cohen's D\")\n", "\n", "plt.subplot(1, 3, 1)\n", "samplesize = 120\n", - "sn.violinplot(x=\"Cohen's D\", y='ROI', hue='pipeline', \n", + "sn.violinplot(x=\"Cohen's D\", y='ROI', hue='pipeline',\n", " data=reslong[reslong.N==samplesize],\n", " split=True, inner='quartile')\n", - "plt.title(\"Distribution of effect sizes with samplesize %d\" % samplesize)\n", + "plt.title('Distribution of effect sizes with samplesize %d' % samplesize)\n", "plt.xlim(xlim)\n", "\n", "plt.subplot(1, 3, 2)\n", "samplesize = 50\n", - "sn.violinplot(x=\"Cohen's D\", y='ROI', hue='pipeline', \n", + "sn.violinplot(x=\"Cohen's D\", y='ROI', hue='pipeline',\n", " data=reslong[reslong.N==samplesize],\n", " split=True, inner='quartile')\n", - "plt.title(\"Distribution of effect sizes with samplesize %d\" % samplesize)\n", + "plt.title('Distribution of effect sizes with samplesize %d' % samplesize)\n", "plt.xlim(xlim)\n", "\n", "plt.subplot(1, 3, 3)\n", "samplesize = 10\n", - "sn.violinplot(x=\"Cohen's D\", y='ROI', hue='pipeline', \n", + "sn.violinplot(x=\"Cohen's D\", y='ROI', hue='pipeline',\n", " data=reslong[reslong.N==samplesize],\n", " split=True, inner='quartile')\n", - "plt.title(\"Distribution of effect sizes with samplesize %d\" % samplesize)\n", + "plt.title('Distribution of effect sizes with samplesize %d' % samplesize)\n", "plt.xlim(xlim);" ] }, diff --git a/notebooks/04 - Group_t_map_distributions.ipynb b/notebooks/04 - Group_t_map_distributions.ipynb index 9c8acbf0..962660af 100644 --- a/notebooks/04 - Group_t_map_distributions.ipynb +++ b/notebooks/04 - Group_t_map_distributions.ipynb @@ -9,7 +9,8 @@ "outputs": [], "source": [ "import warnings\n", - "warnings.filterwarnings(\"ignore\")" + "\n", + "warnings.filterwarnings('ignore')" ] }, { @@ -35,23 +36,16 @@ "source": [ "%matplotlib inline\n", "import os\n", - "import json\n", "from pathlib import Path\n", "\n", - "import numpy as np\n", + "import matplotlib as mpl\n", "import nibabel as nb\n", - "import pandas as pd\n", + "import numpy as np\n", "\n", - "import matplotlib as mpl\n", "mpl.use('pgf')\n", "\n", "import matplotlib.pyplot as plt\n", - "from matplotlib import gridspec\n", - "import seaborn as sn\n", - "import palettable\n", - "\n", - "from nilearn.image import concat_imgs, mean_img\n", - "from nilearn import plotting" + "import seaborn as sn" ] }, { @@ -93,11 +87,11 @@ "outputs": [], "source": [ "if 'inline' not in mpl.get_backend():\n", - " sn.set_style(\"whitegrid\", {\n", + " sn.set_style('whitegrid', {\n", " 'ytick.major.size': 0,\n", " 'xtick.major.size': 5,\n", " })\n", - " sn.set_context(\"notebook\", font_scale=1)\n", + " sn.set_context('notebook', font_scale=1)\n", "\n", " pgf_with_custom_preamble = {\n", " 'ytick.major.size': 0,\n", @@ -117,7 +111,7 @@ " 'pgf.rcfonts': False, # don't setup fonts from rc parameters\n", " 'pgf.texsystem': 'xelatex',\n", " 'verbose.level': 'debug-annoying',\n", - " \"pgf.preamble\": [\n", + " 'pgf.preamble': [\n", " # r'\\renewcommand{\\sfdefault}{phv}',\n", " # r'\\usepackage[scaled=.92]{helvet}',\n", " r\"\"\"\\usepackage{fontspec}\n", @@ -194,25 +188,25 @@ " for sample in list(range(2)):\n", " fprep_path = 'fmriprep_stopsignal_N050_R%03d_S%d' % (rep, sample)\n", " feat_path = 'fslfeat_stopsignal_N050_R%03d_S%d' % (rep, sample)\n", - " \n", + "\n", " group_dir = ANALYSIS_SMOOTHED_HOME / 'l2-jd'\n", " fprep_tvals, fprep_loc = extract_file(group_dir / fprep_path / 'tstat1.nii.gz', mask2mm)\n", " if fprep_tvals:\n", " means_t_fprep['smoothed'].append(fprep_loc)\n", " all_t_fprep['smoothed'] += fprep_tvals\n", - " \n", + "\n", " feat_tvals, feat_loc = extract_file(group_dir / feat_path / 'tstat1.nii.gz', mask2mm)\n", " if feat_tvals is not None:\n", " means_t_feat['smoothed'].append(feat_loc)\n", " all_t_feat['smoothed'] += feat_tvals\n", - " \n", + "\n", " group_dir = ANALYSIS_NOSMOOTH_HOME / 'l2-jd'\n", " fprep_tvals, fprep_loc = extract_file(group_dir / fprep_path / 'tstat1.nii.gz', mask2mm)\n", " if fprep_tvals is not None:\n", " means_t_fprep['nosmooth'].append(fprep_loc)\n", " all_t_fprep['nosmooth'] += fprep_tvals\n", - " \n", - " \n", + "\n", + "\n", " feat_tvals, feat_loc = extract_file(group_dir / feat_path / 'tstat1.nii.gz', mask2mm)\n", " if feat_tvals:\n", " means_t_feat['nosmooth'].append(feat_loc)\n", @@ -293,7 +287,7 @@ "sn.distplot(all_t_feat['nosmooth'][all_t_feat['nosmooth'] != 0],\n", "# sn.distplot(np.random.choice(all_t_feat['nosmooth'][all_t_feat['nosmooth'] != 0], replace=False, size=1000),\n", " label=r'\\texttt{feat}', ax=ax1, norm_hist=True, kde=True)\n", - "ax1.set_title('No smoothing');\n", + "ax1.set_title('No smoothing')\n", "ax1.set_xlim((-7.5, 7.5))\n", "ax1.set_ylim((0, 0.35))\n", "ax1.set_xticks([-2.98, 0, 2.98])\n", @@ -311,10 +305,10 @@ "sn.distplot(all_t_feat['smoothed'][all_t_feat['smoothed'] != 0],\n", "# sn.distplot(np.random.choice(all_t_feat['smoothed'][all_t_feat['smoothed'] != 0], replace=False, size=1000),\n", " label=r'\\texttt{feat}', ax=ax2, norm_hist=True, kde=True)\n", - "ax2.set_title('FWHM = 5.0mm');\n", + "ax2.set_title('FWHM = 5.0mm')\n", "ax2.set_xlim((-7.5, 7.5))\n", "ax2.set_ylim((0, 0.35))\n", - "ax2.set_yticklabels(['20\\%', '30\\%'])\n", + "ax2.set_yticklabels([r'20\\%', r'30\\%'])\n", "ax2.set_yticks([0.20, 0.30])\n", "ax2.set_xticks([-2.98, 0, 2.98])\n", "ax2.set_xticklabels(['-2.98', '0', '2.98'])\n", @@ -385,7 +379,7 @@ "diff = all_t_feat['nosmooth'][nonzero] - all_t_fprep['nosmooth'][nonzero]\n", "plt.scatter(avgs[nonzero][idxs], diff[idxs], alpha=0.05)\n", "\n", - "g = sn.jointplot(avgs[nonzero][idxs], diff[idxs], kind=\"kde\", height=7, space=0)" + "g = sn.jointplot(avgs[nonzero][idxs], diff[idxs], kind='kde', height=7, space=0)" ] }, { @@ -423,7 +417,7 @@ "nonzero = np.abs(avgs) > 2.0\n", "idxs = np.random.choice(list(range(len(avgs[nonzero]))), replace=False, size=5000)\n", "diff = all_t_feat['nosmooth'][nonzero] - all_t_fprep['nosmooth'][nonzero]\n", - "g = sn.jointplot(avgs[nonzero][idxs], diff[idxs], kind=\"kde\", height=7, space=0)" + "g = sn.jointplot(avgs[nonzero][idxs], diff[idxs], kind='kde', height=7, space=0)" ] }, { @@ -474,7 +468,7 @@ "diff = all_t_feat['smoothed'][nonzero] - all_t_fprep['smoothed'][nonzero]\n", "plt.scatter(avgs[nonzero][idxs], diff[idxs], alpha=0.05)\n", "\n", - "g = sn.jointplot(avgs[nonzero][idxs], diff[idxs], kind=\"kde\", height=7, space=0)" + "g = sn.jointplot(avgs[nonzero][idxs], diff[idxs], kind='kde', height=7, space=0)" ] }, { @@ -505,7 +499,7 @@ "nonzero = np.abs(avgs) > 2.0\n", "idxs = np.random.choice(list(range(len(avgs[nonzero]))), replace=False, size=5000)\n", "diff = all_t_feat['smoothed'][nonzero] - all_t_fprep['smoothed'][nonzero]\n", - "g = sn.jointplot(avgs[nonzero][idxs], diff[idxs], kind=\"kde\", height=7, space=0)" + "g = sn.jointplot(avgs[nonzero][idxs], diff[idxs], kind='kde', height=7, space=0)" ] }, { @@ -561,13 +555,13 @@ "ax1 = plt.subplot(1, 2, 1)\n", "sn.distplot(means_t_fprep['nosmooth'], label='fmriprep', ax=ax1)\n", "sn.distplot(means_t_feat['nosmooth'], label=r'\\texttt{feat}', ax=ax1)\n", - "ax1.set_title('Without smoothing');\n", + "ax1.set_title('Without smoothing')\n", "\n", "\n", "ax2 = plt.subplot(1, 2, 2)\n", "sn.distplot(means_t_fprep['smoothed'], label='fmriprep', ax=ax2)\n", "sn.distplot(means_t_feat['smoothed'], label=r'\\texttt{feat}', ax=ax2)\n", - "ax2.set_title('With smoothing');\n", + "ax2.set_title('With smoothing')\n", "plt.legend()" ] }, diff --git a/notebooks/05 - Discussion AROMA confounds - issue-817 [J. Kent].ipynb b/notebooks/05 - Discussion AROMA confounds - issue-817 [J. Kent].ipynb index 9755b999..7eb20aca 100644 --- a/notebooks/05 - Discussion AROMA confounds - issue-817 [J. Kent].ipynb +++ b/notebooks/05 - Discussion AROMA confounds - issue-817 [J. Kent].ipynb @@ -24,8 +24,9 @@ "outputs": [], "source": [ "%matplotlib inline\n", - "import numpy as np\n", "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "\n", "plt.style.use('ggplot')\n", "plt.rcParams['figure.figsize'] = [15, 5]\n", "import seaborn as sns\n", @@ -44,7 +45,7 @@ "tp = 200\n", "# create array that contains 200 elements\n", "ix = np.arange(tp)\n", - " \n", + "\n", "# 0.2 hz\n", "signal1 = np.sin(2*np.pi*ix*(4/tp))\n", "# 0.035 hz\n", @@ -455,15 +456,15 @@ "for _ in range(1000):\n", " random_signal1 = np.random.rand(200)\n", " random_signal2 = np.random.rand(200)\n", - " \n", + "\n", " random_motion1 = np.random.rand(200)\n", " random_motion2 = np.random.rand(200)\n", - " \n", + "\n", " random_csf = (np.random.rand(200) + random_motion1) / 2\n", - " \n", - " \n", + "\n", + "\n", " random_bold = random_signal1 + random_signal2 + random_motion1 + random_motion2 + random_csf\n", - " \n", + "\n", " # non-aggresive denoising\n", " Rdesign = np.ones((200, 5))\n", " Rdesign[:, 1] = random_signal1\n", @@ -478,7 +479,7 @@ "\n", " Rnonagg_residual = np.dot(np.stack((random_motion1, random_motion2), axis=1), Rnoisemap)\n", " random_bold_denoised = random_bold - Rnonagg_residual\n", - " \n", + "\n", " # apply the same regression to the csf component\n", " # same as extracting csf after nonaggressive denoising\n", " random_csf_clean = random_csf - Rnonagg_residual\n", @@ -487,25 +488,25 @@ " Rcsf_clean_design = np.stack((np.ones(200), random_csf_clean), axis=1)\n", "\n", " Rcsf_design = np.stack((np.ones(200), random_csf), axis=1)\n", - " \n", - " Rcsf_clean_residual = np.dot(Rcsf_clean_design, \n", + "\n", + " Rcsf_clean_residual = np.dot(Rcsf_clean_design,\n", " np.dot(np.linalg.pinv(Rcsf_clean_design), random_bold_denoised))\n", - " \n", + "\n", " # remove the denoised csf residual\n", " random_bold_denoised_nuisance = random_bold_denoised - Rcsf_clean_residual\n", "\n", - " Rcsf_residual = np.dot(Rcsf_design, \n", + " Rcsf_residual = np.dot(Rcsf_design,\n", " np.dot(np.linalg.pinv(Rcsf_design), random_bold_denoised))\n", - " \n", + "\n", " # remove the raw csf residual\n", " random_bold_nuisance_denoised = random_bold_denoised - Rcsf_residual\n", - " \n", + "\n", " signal_total = random_signal1 + random_signal2\n", - " \n", + "\n", " denoised_nuisance_collector.append(np.corrcoef(signal_total, random_bold_denoised_nuisance))\n", - " \n", + "\n", " nuisance_denoised_collector.append(np.corrcoef(signal_total, random_bold_nuisance_denoised))\n", - " \n", + "\n", "mean_denoised_nuisance_correlation = np.mean(np.array(denoised_nuisance_collector), axis=0)\n", "mean_nuisance_denoised_correlation = np.mean(np.array(nuisance_denoised_collector), axis=0)\n", "\n", diff --git a/petprep/cli/tests/test_parser.py b/petprep/cli/tests/test_parser.py index cf080cd9..8525ca0a 100644 --- a/petprep/cli/tests/test_parser.py +++ b/petprep/cli/tests/test_parser.py @@ -23,7 +23,6 @@ """Test parser.""" from argparse import ArgumentError -from contextlib import nullcontext import pytest from packaging.version import Version diff --git a/petprep/interfaces/__init__.py b/petprep/interfaces/__init__.py index b30f3b29..60edf01b 100644 --- a/petprep/interfaces/__init__.py +++ b/petprep/interfaces/__init__.py @@ -4,6 +4,7 @@ from .cifti import GeneratePetCifti + class DerivativesDataSink(_DDSink): out_path_base = '' diff --git a/petprep/interfaces/cifti.py b/petprep/interfaces/cifti.py index bef7bee5..179ff2ef 100644 --- a/petprep/interfaces/cifti.py +++ b/petprep/interfaces/cifti.py @@ -1,33 +1,38 @@ -from pathlib import Path import json +from pathlib import Path +from nipype.interfaces.base import ( + BaseInterfaceInputSpec, + File, + SimpleInterface, + traits, +) from niworkflows.interfaces.cifti import ( + _create_cifti_image, _GenerateCiftiOutputSpec, _prepare_cifti, - _create_cifti_image, ) -from nipype.interfaces.base import BaseInterfaceInputSpec, File, SimpleInterface, TraitedSpec, traits class _GeneratePetCiftiInputSpec(BaseInterfaceInputSpec): - pet_file = File(mandatory=True, exists=True, desc="input PET file") + pet_file = File(mandatory=True, exists=True, desc='input PET file') volume_target = traits.Enum( - "MNI152NLin6Asym", + 'MNI152NLin6Asym', usedefault=True, - desc="CIFTI volumetric output space", + desc='CIFTI volumetric output space', ) surface_target = traits.Enum( - "fsLR", + 'fsLR', usedefault=True, - desc="CIFTI surface target space", + desc='CIFTI surface target space', ) grayordinates = traits.Enum( - "91k", "170k", usedefault=True, desc="Final CIFTI grayordinates" + '91k', '170k', usedefault=True, desc='Final CIFTI grayordinates' ) surface_pets = traits.List( File(exists=True), mandatory=True, - desc="list of surface PET GIFTI files (length 2 with order [L,R])", + desc='list of surface PET GIFTI files (length 2 with order [L,R])', ) @@ -39,7 +44,7 @@ class GeneratePetCifti(SimpleInterface): def _run_interface(self, runtime): surface_labels, volume_labels, metadata = _prepare_cifti(self.inputs.grayordinates) - self._results["out_file"] = _create_cifti_image( + self._results['out_file'] = _create_cifti_image( self.inputs.pet_file, volume_labels, self.inputs.surface_pets, @@ -47,10 +52,10 @@ def _run_interface(self, runtime): 1.0, metadata, ) - metadata_file = Path("pet.dtseries.json").absolute() + metadata_file = Path('pet.dtseries.json').absolute() metadata_file.write_text(json.dumps(metadata, indent=2)) - self._results["out_metadata"] = str(metadata_file) + self._results['out_metadata'] = str(metadata_file) return runtime -__all__ = ("GeneratePetCifti",) \ No newline at end of file +__all__ = ('GeneratePetCifti',) diff --git a/petprep/interfaces/reports.py b/petprep/interfaces/reports.py index 37d7438c..137929c5 100644 --- a/petprep/interfaces/reports.py +++ b/petprep/interfaces/reports.py @@ -77,7 +77,7 @@ def get_world_pedir(orientation: str, pe_dir: str) -> str: start = _OPPOSITE[letter] end = letter - return f"{_ORI_TO_NAME[start]}-{_ORI_TO_NAME[end]}" + return f'{_ORI_TO_NAME[start]}-{_ORI_TO_NAME[end]}' SUBJECT_TEMPLATE = """\ \t
      diff --git a/petprep/interfaces/tests/test_reports.py b/petprep/interfaces/tests/test_reports.py index 0d7bf3a8..f02d146b 100644 --- a/petprep/interfaces/tests/test_reports.py +++ b/petprep/interfaces/tests/test_reports.py @@ -71,4 +71,4 @@ def test_subject_summary_handles_missing_task(tmp_path): segment = summary._generate_segment() assert 'Task: rest (1 run)' in segment - assert 'Task: (1 run)' in segment \ No newline at end of file + assert 'Task: (1 run)' in segment diff --git a/petprep/reports/core.py b/petprep/reports/core.py index 6898f193..942456a1 100644 --- a/petprep/reports/core.py +++ b/petprep/reports/core.py @@ -119,7 +119,7 @@ def generate_reports( # we separate the functional reports per session if session_list is None: all_filters = config.execution.bids_filters or {} - filters = all_filters.get("pet", all_filters.get("bold", {})) + filters = all_filters.get('pet', all_filters.get('bold', {})) session_list = config.execution.layout.get_sessions( subject=subject_label, **filters ) diff --git a/petprep/reports/tests/test_reports.py b/petprep/reports/tests/test_reports.py index 26fca42d..98c3befb 100644 --- a/petprep/reports/tests/test_reports.py +++ b/petprep/reports/tests/test_reports.py @@ -139,4 +139,4 @@ def test_pet_report(tmp_path, monkeypatch): html_file = tmp_path / 'sub-01.html' assert html_file.is_file() html_content = html_file.read_text() - assert '
      Path: - bids_dir = tmp_path / "bids" + bids_dir = tmp_path / 'bids' generate_bids_skeleton(bids_dir, BASE_LAYOUT) img = nb.Nifti1Image(np.zeros((2, 2, 2)), np.eye(4)) - for p in bids_dir.rglob("*.nii.gz"): + for p in bids_dir.rglob('*.nii.gz'): img.to_filename(p) return bids_dir def test_datasink_datatype(tmp_path: Path): bids_dir = _prep_bids(tmp_path) - out_dir = tmp_path / "out" + out_dir = tmp_path / 'out' with mock_config(bids_dir=bids_dir): - wf = init_ds_petref_wf(bids_root=bids_dir, output_dir=out_dir, desc="hmc") - assert wf.get_node("ds_petref").inputs.datatype == "pet" - wf = init_ds_petmask_wf(output_dir=out_dir, desc="brain") - assert wf.get_node("ds_petmask").inputs.datatype == "pet" + wf = init_ds_petref_wf(bids_root=bids_dir, output_dir=out_dir, desc='hmc') + assert wf.get_node('ds_petref').inputs.datatype == 'pet' + wf = init_ds_petmask_wf(output_dir=out_dir, desc='brain') + assert wf.get_node('ds_petmask').inputs.datatype == 'pet' wf = init_ds_pet_native_wf( bids_root=bids_dir, output_dir=out_dir, pet_output=True, all_metadata=[{}], ) - assert wf.get_node("ds_pet").inputs.datatype == "pet" + assert wf.get_node('ds_pet').inputs.datatype == 'pet' wf = init_ds_volumes_wf( bids_root=bids_dir, output_dir=out_dir, metadata={}, ) - assert wf.get_node("ds_pet").inputs.datatype == "pet" - assert wf.get_node("ds_ref").inputs.datatype == "pet" - assert wf.get_node("ds_mask").inputs.datatype == "pet" \ No newline at end of file + assert wf.get_node('ds_pet').inputs.datatype == 'pet' + assert wf.get_node('ds_ref').inputs.datatype == 'pet' + assert wf.get_node('ds_mask').inputs.datatype == 'pet' diff --git a/petprep/workflows/pet/tests/test_pet_mask.py b/petprep/workflows/pet/tests/test_pet_mask.py index 019ffa9c..e8706a65 100644 --- a/petprep/workflows/pet/tests/test_pet_mask.py +++ b/petprep/workflows/pet/tests/test_pet_mask.py @@ -2,13 +2,14 @@ import nibabel as nb import numpy as np -from niworkflows.utils.testing import generate_bids_skeleton import pytest +from niworkflows.utils.testing import generate_bids_skeleton from ...tests import mock_config from ...tests.test_base import BASE_LAYOUT from ..base import init_pet_wf + @pytest.fixture(scope='module') def bids_root(tmp_path_factory): base = tmp_path_factory.mktemp('petfit') @@ -22,32 +23,32 @@ def test_pet_mask_flow(bids_root: Path, tmp_path: Path): str(bids_root / 'sub-01' / 'pet' / 'sub-01_task-rest_run-1_pet.nii.gz') ] img = nb.Nifti1Image(np.zeros((2, 2, 2, 10)), np.eye(4)) - + for path in pet_series: img.to_filename(path) with mock_config(bids_dir=bids_root): wf = init_pet_wf( - pet_series=pet_series, + pet_series=pet_series, precomputed={} ) - assert wf is not None, "Workflow was not initialized." + assert wf is not None, 'Workflow was not initialized.' pet_fit_node = wf.get_node('pet_fit_wf') pet_confounds_node = wf.get_node('pet_confounds_wf') - assert pet_fit_node is not None, "pet_fit_wf node missing" - assert pet_confounds_node is not None, "pet_confounds_wf node missing" + assert pet_fit_node is not None, 'pet_fit_wf node missing' + assert pet_confounds_node is not None, 'pet_confounds_wf node missing' edge = wf._graph.get_edge_data(pet_fit_node, pet_confounds_node) - assert edge is not None, "Edge missing between pet_fit_wf and pet_confounds_wf" - + assert edge is not None, 'Edge missing between pet_fit_wf and pet_confounds_wf' + # Correct assertion: assert ('outputnode.pet_mask', 'inputnode.pet_mask') in edge['connect'] conf_edge = pet_confounds_node._graph.get_edge_data( pet_confounds_node.get_node('inputnode'), pet_confounds_node.get_node('dvars') ) - assert conf_edge is not None, "Confound edge is missing." - assert ('pet_mask', 'in_mask') in conf_edge['connect'] \ No newline at end of file + assert conf_edge is not None, 'Confound edge is missing.' + assert ('pet_mask', 'in_mask') in conf_edge['connect'] diff --git a/petprep/workflows/pet/tests/test_reference.py b/petprep/workflows/pet/tests/test_reference.py index 66c45784..abe36c22 100644 --- a/petprep/workflows/pet/tests/test_reference.py +++ b/petprep/workflows/pet/tests/test_reference.py @@ -24,4 +24,4 @@ def test_reference_frame_average(tmp_path): wf = init_raw_petref_wf(pet_file=str(pet_file), reference_frame='average') node_names = [n.name for n in wf._get_all_nodes()] - assert 'gen_avg' in node_names \ No newline at end of file + assert 'gen_avg' in node_names diff --git a/petprep/workflows/pet/tests/test_smooth_binarize.py b/petprep/workflows/pet/tests/test_smooth_binarize.py index 1d4073a6..a6df8209 100644 --- a/petprep/workflows/pet/tests/test_smooth_binarize.py +++ b/petprep/workflows/pet/tests/test_smooth_binarize.py @@ -16,4 +16,4 @@ def test_smooth_binarize_largest(tmp_path): out = _smooth_binarize(str(src), fwhm=0.0, thresh=0.5) result = nb.load(out).get_fdata() _, num = label(result > 0) - assert num == 1 \ No newline at end of file + assert num == 1 diff --git a/petprep/workflows/tests/test_base.py b/petprep/workflows/tests/test_base.py index 0198f130..32338769 100644 --- a/petprep/workflows/tests/test_base.py +++ b/petprep/workflows/tests/test_base.py @@ -1,21 +1,20 @@ +import copy +import json from pathlib import Path from unittest.mock import patch -import json import nibabel as nb import numpy as np import pytest from nipype.pipeline.engine.utils import generate_expanded_graph +from niworkflows.utils.bids import DEFAULT_BIDS_QUERIES +from niworkflows.utils.bids import collect_data as original_collect_data from niworkflows.utils.testing import generate_bids_skeleton from ... import config from ..base import init_petprep_wf from ..tests import mock_config -from niworkflows.utils.bids import DEFAULT_BIDS_QUERIES -from niworkflows.utils.bids import collect_data as original_collect_data -import copy - BASE_LAYOUT = { '01': { 'anat': [ @@ -65,30 +64,30 @@ def bids_root(tmp_path_factory): img = nb.Nifti1Image(np.zeros((10, 10, 10, 10)), np.eye(4)) # anat files - anat_dir = bids_dir / "sub-01" / "anat" + anat_dir = bids_dir / 'sub-01' / 'anat' anat_dir.mkdir(parents=True, exist_ok=True) - img.to_filename(anat_dir / "sub-01_T1w.nii.gz") - img.to_filename(anat_dir / "sub-01_inplaneT2.nii.gz") + img.to_filename(anat_dir / 'sub-01_T1w.nii.gz') + img.to_filename(anat_dir / 'sub-01_inplaneT2.nii.gz') # pet file - pet_dir = bids_dir / "sub-01" / "pet" + pet_dir = bids_dir / 'sub-01' / 'pet' pet_dir.mkdir(parents=True, exist_ok=True) - pet_path = pet_dir / "sub-01_pet.nii.gz" + pet_path = pet_dir / 'sub-01_pet.nii.gz' img.to_filename(pet_path) - + # Add metadata explicitly metadata = {} - json_path = pet_dir / "sub-01_pet.json" + json_path = pet_dir / 'sub-01_pet.json' json_path.write_text(json.dumps(metadata)) # func files (optional for PET workflow but included for consistency) - func_dir = bids_dir / "sub-01" / "func" + func_dir = bids_dir / 'sub-01' / 'func' func_dir.mkdir(parents=True, exist_ok=True) for run in range(1, 4): - func_path = func_dir / f"sub-01_task-mixedgamblestask_run-0{run}_bold.nii.gz" + func_path = func_dir / f'sub-01_task-mixedgamblestask_run-0{run}_bold.nii.gz' img.to_filename(func_path) - events_path = func_dir / f"sub-01_task-mixedgamblestask_run-0{run}_events.tsv" - events_path.write_text("onset\tduration\ttrial_type\n") + events_path = func_dir / f'sub-01_task-mixedgamblestask_run-0{run}_events.tsv' + events_path.write_text('onset\tduration\ttrial_type\n') return bids_dir @@ -195,4 +194,4 @@ def test_init_petprep_wf( wf = init_petprep_wf() - generate_expanded_graph(wf._create_flat_graph()) \ No newline at end of file + generate_expanded_graph(wf._create_flat_graph()) From 2386e087bdc13cfd42ace266adcf2fc9dcafd1d3 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Fri, 6 Jun 2025 09:32:51 +0200 Subject: [PATCH 79/82] FIX: bump niworkflows version to 1.13.4 --- pyproject.toml | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 7ecd23ff..4fe4ff23 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ dependencies = [ "nireports >= 24.1.0", "nitime >= 0.9", "nitransforms >= 24.1.1", - "niworkflows >= 1.13.3", + "niworkflows >= 1.13.4", "numpy >= 1.24", "packaging >= 24", "pandas >= 1.2", diff --git a/requirements.txt b/requirements.txt index 0e95f865..dc171a51 100644 --- a/requirements.txt +++ b/requirements.txt @@ -245,7 +245,7 @@ nitransforms==24.1.1 # via # fmriprep (pyproject.toml) # niworkflows -niworkflows==1.13.3 +niworkflows==1.13.4 # via # fmriprep (pyproject.toml) # smriprep From 8e81cf9030b70bec679c2621b5bcf0b1304fab62 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Fri, 6 Jun 2025 11:40:54 +0200 Subject: [PATCH 80/82] FIX: update fmriprep naming to petprep --- petprep/cli/run.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/petprep/cli/run.py b/petprep/cli/run.py index 3509515c..5fd745a6 100644 --- a/petprep/cli/run.py +++ b/petprep/cli/run.py @@ -54,7 +54,7 @@ def main(): tracker.start() if 'pdb' in config.execution.debug: - from fmriprep.utils.debug import setup_exceptionhook + from petprep.utils.debug import setup_exceptionhook setup_exceptionhook() config.nipype.plugin = 'Linear' @@ -97,7 +97,7 @@ def main(): # CRITICAL Load the config from the file. This is necessary because the ``build_workflow`` # function executed constrained in a process may change the config (and thus the global - # state of fMRIPrep). + # state of PETPrep). config.load(config_file) if config.execution.reports_only: @@ -169,7 +169,7 @@ def main(): if config.environment.exec_env in ( 'singularity', 'docker', - 'fmriprep-docker', + 'petprep-docker', ): boiler_file = Path('') / boiler_file.relative_to( config.execution.output_dir @@ -205,7 +205,7 @@ def main(): config.loggers.workflow.log(25, f'Saving logs at: {config.execution.log_dir}') config.loggers.workflow.log(25, f'Carbon emissions: {emissions} kg') - from fmriprep.reports.core import generate_reports + from petprep.reports.core import generate_reports # Generate reports phase session_list = ( From 5ff1c0adbc0c59d3ba30ea241f7d715a26a55386 Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Fri, 6 Jun 2025 21:06:20 +0200 Subject: [PATCH 81/82] FIX: add require_pet to BIDSDataGrabber --- petprep/workflows/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/petprep/workflows/base.py b/petprep/workflows/base.py index 4b8fcf41..8347ad00 100644 --- a/petprep/workflows/base.py +++ b/petprep/workflows/base.py @@ -259,6 +259,7 @@ def init_single_subject_wf(subject_id: str): BIDSDataGrabber( subject_data=subject_data, anat_only=config.workflow.anat_only, + require_pet=bool(subject_data['pet']), subject_id=subject_id, anat_derivatives=anatomical_cache if anatomical_cache else None, ), From 2017d454dd05e3992f7f7b4d019e267f70a39a0b Mon Sep 17 00:00:00 2001 From: mnoergaard Date: Fri, 6 Jun 2025 21:26:51 +0200 Subject: [PATCH 82/82] FIX: add require_pet to test_base --- petprep/workflows/tests/test_base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/petprep/workflows/tests/test_base.py b/petprep/workflows/tests/test_base.py index 32338769..a223d881 100644 --- a/petprep/workflows/tests/test_base.py +++ b/petprep/workflows/tests/test_base.py @@ -188,6 +188,7 @@ def test_init_petprep_wf( mock_collect_data.return_value = original_collect_data( bids_root, '01', + require_pet=True, bids_filters=bids_filters, queries=custom_queries, )