diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index f60a0f2..af98f52 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -71,7 +71,7 @@ jobs: needs: [download-test-data, docker-build] strategy: matrix: - atlas: ['Schaefer20187Networks', 'MIST', 'DiFuMo', 'HarvardOxfordCortical', 'HarvardOxfordCorticalSymmetricSplit', 'HarvardOxfordSubcortical'] + atlas: ['Schaefer2018', 'MIST', 'DiFuMo', 'HarvardOxfordCortical', 'HarvardOxfordCorticalSymmetricSplit', 'HarvardOxfordSubcortical'] steps: - uses: actions/checkout@v4 with: @@ -92,12 +92,12 @@ jobs: docker run --rm \ -v ${{ env.DATA }}:/test_data \ -v ./outputs:/outputs \ - -v ./outputs/working_dir:/work \ + -v ./outputs/atlases:/atlases \ ${{env.USER_NAME}}/${{env.REPO_NAME}} \ /test_data/ds000017-fmriprep22.0.1-downsampled-nosurface \ /outputs \ participant \ - -w /work \ + -a /atlases \ --atlas ${{ matrix.atlas }} \ --participant_label 1 \ --reindex-bids diff --git a/docs/source/changes.md b/docs/source/changes.md index e7a2b54..6fc2728 100644 --- a/docs/source/changes.md +++ b/docs/source/changes.md @@ -1,11 +1,13 @@ # What’s new -## 0.5.1.dev +## 0.6.0.dev **Released MONTH YEAR** ### New +- [EHN] Default atlas `Schaefer20187Networks` is renamed to `Schaefer2018`. `Schaefer20187Networks` will be deprecated ub 0.7.0. (@htwangtw) +- [EHN] `--work-dir` is now renamed to `--atlases-dir`. `--work-dir` will be deprecated ub 0.7.0. (@htwangtw) - [EHN] Add details of denoising strategy to the meta data of the time series extraction. (@htwangtw) [#144](https://github.com/bids-apps/giga_connectome/issues/144) ### Fixes @@ -17,6 +19,9 @@ ### Changes +- [EHN] Merge `atlas-` and the atlas description `desc-` into one filed `seg-` defined under 'Derivatives-Image data type' in BIDS. (@htwangtw) [#143](https://github.com/bids-apps/giga_connectome/issues/143) +- [EHN] Working directory is now renamed as `atlases/` to reflect on the atlases directory mentioned in BEP017. + ## 0.5.0 Released April 2024 diff --git a/docs/source/outputs.md b/docs/source/outputs.md index 33fe34a..8248f65 100644 --- a/docs/source/outputs.md +++ b/docs/source/outputs.md @@ -16,12 +16,12 @@ the output will be save in `sub-/[ses-]/func`. For each input image (that is, preprocessed BOLD time series) and each atlas the following data files will be generated -- a `[matches]_atlas-{atlas}_meas-PearsonCorrelation_desc-{atlas_description}{denoise_strategy}_relmat.tsv` +- a `[matches]_seg-{atlas}{atlas_description}_meas-PearsonCorrelation_desc-denoise{denoise_strategy}_relmat.tsv` file that contains the correlation matrix between all the regions of the atlas -- a `[matches]_atlas-{atlas}_desc-{atlas description}{denoise_strategy}_timeseries.tsv` +- a `[matches]_seg-{atlas}{atlas_description}_desc-denoise{denoise_strategy}_timeseries.tsv` file that contains the extracted timeseries for each region of the atlas -- `{atlas}` refers to the name of the atlas used (for example, `Schaefer20187Networks`) +- `{atlas}` refers to the name of the atlas used (for example, `Schaefer2018`) - `{atlas_description}` refers to the sub type of atlas used (for example, `100Parcels7Networks`) - `{denoise_strategy}` refers to the denoise strategy passed to the command line @@ -31,7 +31,7 @@ A JSON file is generated in the root of the output dataset (`meas-PearsonCorrela that contains metadata applicable to all `relmat.tsv` files. For each input image (that is, preprocessed BOLD time series) -a `[matches]_atlas-{atlas}_timeseries.json` +a `[matches]_desc-denoise{denoise_strategy}_timeseries.json` ### Example @@ -43,51 +43,100 @@ a `[matches]_atlas-{atlas}_timeseries.json` ├── sub-1 │   ├── ses-timepoint1 │   │   └── func -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-100Parcels7NetworksSimple_timeseries.tsv -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-200Parcels7NetworksSimple_timeseries.tsv -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-100Parcels7NetworksSimple_relmat.tsv -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-200Parcels7NetworksSimple_relmat.tsv -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_timeseries.json -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-100Parcels7NetworksSimple_timeseries.tsv -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-200Parcels7NetworksSimple_timeseries.tsv -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-100Parcels7NetworksSimple_relmat.tsv -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-200Parcels7NetworksSimple_relmat.tsv -│   │   └── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_timeseries.json +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_desc-denoiseSimple_timeseries.json +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_report.html +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_timeseries.tsv +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_report.html +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_timeseries.tsv +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_desc-denoiseSimple_timeseries.json +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_report.html +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_timeseries.tsv +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_report.html +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_timeseries.tsv +│   │   └── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv │   └── ses-timepoint2 │   └── func -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-100Parcels7NetworksSimple_timeseries.tsv -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-200Parcels7NetworksSimple_timeseries.tsv -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-100Parcels7NetworksSimple_relmat.tsv -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-200Parcels7NetworksSimple_relmat.tsv -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_timeseries.json -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-100Parcels7NetworksSimple_timeseries.tsv -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-200Parcels7NetworksSimple_timeseries.tsv -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-100Parcels7NetworksSimple_relmat.tsv -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-200Parcels7NetworksSimple_relmat.tsv -│   └── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_timeseries.json +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_desc-denoiseSimple_timeseries.json +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_report.html +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_timeseries.tsv +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_report.html +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_timeseries.tsv +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_desc-denoiseSimple_timeseries.json +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_report.html +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_timeseries.tsv +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_report.html +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_timeseries.tsv +│      └── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv └── sub-2 - ├── ses-timepoint1 - │   └── func - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-100Parcels7NetworksSimple_timeseries.tsv - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-200Parcels7NetworksSimple_timeseries.tsv - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-100Parcels7NetworksSimple_relmat.tsv - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-200Parcels7NetworksSimple_relmat.tsv - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_timeseries.json - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-100Parcels7NetworksSimple_timeseries.tsv - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-200Parcels7NetworksSimple_timeseries.tsv - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-100Parcels7NetworksSimple_relmat.tsv - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-200Parcels7NetworksSimple_relmat.tsv - │   └── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_timeseries.json - └── ses-timepoint2 - └── func - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-100Parcels7NetworksSimple_timeseries.tsv - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-200Parcels7NetworksSimple_timeseries.tsv - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-100Parcels7NetworksSimple_relmat.tsv - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-200Parcels7NetworksSimple_relmat.tsv - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_timeseries.json - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-100Parcels7NetworksSimple_timeseries.tsv - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-200Parcels7NetworksSimple_timeseries.tsv - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-100Parcels7NetworksSimple_relmat.tsv - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-200Parcels7NetworksSimple_relmat.tsv - └── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_timeseries.json +    ├── ses-timepoint1 +    │   └── func +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_desc-denoiseSimple_timeseries.json +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_report.html +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_timeseries.tsv +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_report.html +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_timeseries.tsv +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_desc-denoiseSimple_timeseries.json +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_report.html +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_timeseries.tsv +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_report.html +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_timeseries.tsv +    │   └── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +    └── ses-timepoint2 +    └── func +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_desc-denoiseSimple_timeseries.json +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_report.html +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_timeseries.tsv +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_report.html +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_timeseries.tsv +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_desc-denoiseSimple_timeseries.json +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_report.html +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_timeseries.tsv +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_report.html +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_timeseries.tsv +       └── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +``` + + +## Atlases + +The merged grey matter masks per subject and the atlases resampled to the individual EPI data are in the directory specified at `--atlases_dir`. + +For each subject and each atlas the following data files will be generated + +- a `sub-_space-MNI152NLin2009cAsym_res-2_label-GM_mask.nii.gz` + Grey matter mask in the dedicated space for a given subject, + created from merging all the EPI brain masks of a given subject, + and converges with the grey matter mask of the given space. +- `sub-_seg-{atlas}{atlas_description}_[dseg|probseg].nii.gz` + files where the atlas were sampled to `sub-_space-MNI152NLin2009cAsym_res-2_label-GM_mask.nii.gz` + for each individual. + +- `{atlas}` refers to the name of the atlas used (for example, `Schaefer2018`) +- `{atlas_description}` refers to the sub type of atlas used (for example, `100Parcels7Networks`) + +### Example + +``` +└── sub-1 + └── func + ├── sub-1_seg-Schaefer2018100Parcels7Networks_dseg.nii.gz + ├── sub-1_seg-Schaefer2018200Parcels7Networks_dseg.nii.gz + ├── sub-1_seg-Schaefer2018300Parcels7Networks_dseg.nii.gz + ├── sub-1_seg-Schaefer2018400Parcels7Networks_dseg.nii.gz + ├── sub-1_seg-Schaefer2018500Parcels7Networks_dseg.nii.gz + ├── sub-1_seg-Schaefer2018600Parcels7Networks_dseg.nii.gz + ├── sub-1_seg-Schaefer2018800Parcels7Networks_dseg.nii.gz + └── sub-1_space-MNI152NLin2009cAsym_res-2_label-GM_mask.nii.gz ``` diff --git a/docs/source/usage.md b/docs/source/usage.md index 93843f1..a08eec1 100644 --- a/docs/source/usage.md +++ b/docs/source/usage.md @@ -47,7 +47,7 @@ An example using Apptainer (formerly known as Singularity): ```bash FMRIPREP_DIR=/path/to/fmriprep_output OUTPUT_DIR=/path/to/connectom_output -WORKING_DIR=/path/to/working_dir +ATLASES_DIR=/path/to/atlases DENOISE_CONFIG=/path/to/denoise_config.json GIGA_CONNECTOME=/path/to/giga-connectome.simg @@ -55,10 +55,10 @@ GIGA_CONNECTOME=/path/to/giga-connectome.simg apptainer run \ --bind ${FMRIPREP_DIR}:/data/input \ --bind ${OUTPUT_DIR}:/data/output \ - --bind ${WORKING_DIR}:/data/working \ + --bind ${ATLASES_DIR}:/data/atlases \ --bind ${DENOISE_CONFIG}:/data/denoise_config.json \ ${GIGA_CONNECTOME} \ - -w /data/working \ + -a /data/atlases \ --denoise-strategy /data/denoise_config.json \ /data/input \ /data/output \ @@ -128,7 +128,7 @@ An example using Apptainer (formerly known as Singularity): ```bash FMRIPREP_DIR=/path/to/fmriprep_output OUTPUT_DIR=/path/to/connectom_output -WORKING_DIR=/path/to/working_dir +ATLASES_DIR=/path/to/atlases ATLAS_CONFIG=/path/to/atlas_config.json GIGA_CONNECTOME=/path/to/giga-connectome.simg @@ -138,10 +138,10 @@ export APPTAINERENV_TEMPLATEFLOW_HOME=/data/atlas apptainer run \ --bind ${FMRIPREP_DIR}:/data/input \ --bind ${OUTPUT_DIR}:/data/output \ - --bind ${WORKING_DIR}:/data/working \ + --bind ${ATLASES_DIR}:/data/atlases \ --bind ${ATLAS_CONFIG}:/data/atlas_config.json \ ${GIGA_CONNECTOME} \ - -w /data/working \ + -s /data/atlases \ --atlas /data/atlas_config.json \ /data/input \ /data/output \ diff --git a/giga_connectome/atlas.py b/giga_connectome/atlas.py index f8cbae0..106f663 100644 --- a/giga_connectome/atlas.py +++ b/giga_connectome/atlas.py @@ -30,6 +30,12 @@ {"name": str, "file_paths": Dict[str, List[Path]], "type": str}, ) +deprecations = { + # parser attribute name: + # (replacement, version slated to be removed in) + "Schaefer20187Networks": ("Schaefer2018", "0.7.0"), +} + def load_atlas_setting( atlas: str | Path | dict[str, Any], @@ -94,60 +100,55 @@ def load_atlas_setting( def resample_atlas_collection( - template: str, + subject_seg_file_names: list[str], atlas_config: ATLAS_SETTING_TYPE, - group_mask_dir: Path, - group_mask: Nifti1Image, + subject_mask_dir: Path, + subject_mask: Nifti1Image, ) -> list[Path]: """Resample a atlas collection to group grey matter mask. Parameters ---------- - template: str - Templateflow template name. This template should match the template of - `all_masks`. + subject_atlas_file_names: list of str + File names of subject atlas segmentations. atlas_config: dict Atlas name. Currently support Schaefer20187Networks, MIST, DiFuMo. - group_mask_dir: pathlib.Path + subject_mask_dir: pathlib.Path Path to where the outputs are saved. - group_mask : nibabel.nifti1.Nifti1Image - EPI (grey matter) mask for the current group of subjects. + subject_mask : nibabel.nifti1.Nifti1Image + EPI (grey matter) mask for the subject. Returns ------- list of pathlib.Path - Paths to atlases sampled to group level grey matter mask. + Paths to subject specific segmentations created from atlases sampled + to individual grey matter mask. """ gc_log.info("Resample atlas to group grey matter mask.") - resampled_atlases = [] + subject_seg = [] with progress_bar(text="Resampling atlases") as progress: task = progress.add_task( description="resampling", total=len(atlas_config["file_paths"]) ) - for desc in atlas_config["file_paths"]: + for seg_file, desc in zip( + subject_seg_file_names, atlas_config["file_paths"] + ): parcellation = atlas_config["file_paths"][desc] parcellation_resampled = resample_to_img( - parcellation, group_mask, interpolation="nearest" - ) - filename = ( - f"tpl-{template}_" - f"atlas-{atlas_config['name']}_" - "res-dataset_" - f"desc-{desc}_" - f"{atlas_config['type']}.nii.gz" + parcellation, subject_mask, interpolation="nearest" ) - save_path = group_mask_dir / filename + save_path = subject_mask_dir / seg_file nib.save(parcellation_resampled, save_path) - resampled_atlases.append(save_path) + subject_seg.append(save_path) progress.update(task, advance=1) - return resampled_atlases + return subject_seg def get_atlas_labels() -> List[str]: @@ -176,9 +177,18 @@ def _check_altas_config( KeyError atlas configuration not containing the correct keys. """ + if isinstance(atlas, str) and atlas in deprecations: + new_name, version = deprecations[atlas] + gc_log.warning( + f"{atlas} has been deprecated and will be removed in " + f"{version}. Please use {new_name} instead." + ) + atlas = new_name + # load the file first if the input is not already a dictionary atlas_dir = resource_filename("giga_connectome", "data/atlas") preset_atlas = [p.stem for p in Path(atlas_dir).glob("*.json")] + if isinstance(atlas, (str, Path)): if atlas in preset_atlas: config_path = Path( @@ -188,6 +198,10 @@ def _check_altas_config( ) elif Path(atlas).exists(): config_path = Path(atlas) + else: + raise FileNotFoundError( + f"Atlas configuration file {atlas} not found." + ) with open(config_path, "r") as file: atlas_config = json.load(file) diff --git a/giga_connectome/data/atlas/Schaefer20187Networks.json b/giga_connectome/data/atlas/Schaefer2018.json similarity index 91% rename from giga_connectome/data/atlas/Schaefer20187Networks.json rename to giga_connectome/data/atlas/Schaefer2018.json index c3211b4..e90b6e6 100644 --- a/giga_connectome/data/atlas/Schaefer20187Networks.json +++ b/giga_connectome/data/atlas/Schaefer2018.json @@ -1,5 +1,5 @@ { - "name": "Schaefer20187Networks", + "name": "Schaefer2018", "parameters": { "atlas": "Schaefer2018", "template": "MNI152NLin2009cAsym", diff --git a/giga_connectome/mask.py b/giga_connectome/mask.py index bcac2b2..db0dbbf 100644 --- a/giga_connectome/mask.py +++ b/giga_connectome/mask.py @@ -21,12 +21,13 @@ from giga_connectome.atlas import ATLAS_SETTING_TYPE, resample_atlas_collection from giga_connectome.logger import gc_logger +from giga_connectome import utils gc_log = gc_logger() def generate_gm_mask_atlas( - working_dir: Path, + atlases_dir: Path, atlas: ATLAS_SETTING_TYPE, template: str, masks: list[BIDSImageFile], @@ -34,46 +35,65 @@ def generate_gm_mask_atlas( """ """ # check masks; isolate this part and make sure to make it a validate # templateflow template with a config file - - group_mask_dir = working_dir / "groupmasks" / f"tpl-{template}" - group_mask_dir.mkdir(exist_ok=True, parents=True) - - group_mask, resampled_atlases = None, None - if group_mask_dir.exists(): - group_mask, resampled_atlases = _check_pregenerated_masks( - template, working_dir, atlas + subject, _, _ = utils.parse_bids_name(masks[0].path) + subject_mask_dir = atlases_dir / subject / "func" + subject_mask_dir.mkdir(exist_ok=True, parents=True) + target_subject_mask_file_name: str = utils.output_filename( + source_file=masks[0].path, + atlas="", + suffix="mask", + extension="nii.gz", + strategy="", + atlas_desc="", + ) + target_subject_seg_file_names: list[str] = [ + utils.output_filename( + source_file=masks[0].path, + atlas=atlas["name"], + suffix=atlas["type"], + extension="nii.gz", + strategy="", + atlas_desc=atlas_desc, ) + for atlas_desc in atlas["file_paths"] + ] + target_subject_mask, target_subject_seg = _check_pregenerated_masks( + subject_mask_dir, + target_subject_mask_file_name, + target_subject_seg_file_names, + ) - if not group_mask: + if not target_subject_mask: # grey matter group mask is only supplied in MNI152NLin2009c(A)sym - group_mask_nii = generate_group_mask( + subject_mask_nii = generate_subject_gm_mask( [m.path for m in masks], "MNI152NLin2009cAsym" ) - current_file_name = ( - f"tpl-{template}_res-dataset_label-GM_desc-group_mask.nii.gz" + nib.save( + subject_mask_nii, subject_mask_dir / target_subject_mask_file_name ) - group_mask = group_mask_dir / current_file_name - nib.save(group_mask_nii, group_mask) - if not resampled_atlases: - resampled_atlases = resample_atlas_collection( - template, atlas, group_mask_dir, group_mask + if not target_subject_seg: + subject_seg_niis = resample_atlas_collection( + target_subject_seg_file_names, + atlas, + subject_mask_dir, + subject_mask_nii, ) - return group_mask, resampled_atlases + return subject_mask_nii, subject_seg_niis -def generate_group_mask( +def generate_subject_gm_mask( imgs: Sequence[Path | str | Nifti1Image], template: str = "MNI152NLin2009cAsym", templateflow_dir: Path | None = None, n_iter: int = 2, ) -> Nifti1Image: """ - Generate a group EPI grey matter mask, and overlaid with a MNI grey + Generate a subject EPI grey matter mask, and overlaid with a MNI grey matter template. - The Group EPI mask will ensure the signal extraction is from the most - overlapping voxels. + The subject EPI mask will ensure the signal extraction is from the most + overlapping voxels for all scans of the subject. Parameters ---------- @@ -267,38 +287,30 @@ def _check_mask_affine( def _check_pregenerated_masks( - template: str, working_dir: Path, atlas: ATLAS_SETTING_TYPE -) -> tuple[Path | None, list[Path] | None]: + subject_mask_dir: Path, + subject_mask_file_name: str, + subject_seg_file_names: list[str], +) -> tuple[bool, bool]: """Check if the working directory is populated with needed files.""" - output_dir = working_dir / "groupmasks" / f"tpl-{template}" - group_mask: Path | None = ( - output_dir - / f"tpl-{template}_res-dataset_label-GM_desc-group_mask.nii.gz" - ) - if group_mask and not group_mask.exists(): - group_mask = None - else: + # subject grey matter mask + if target_subject_mask := ( + subject_mask_dir / subject_mask_file_name + ).exists(): gc_log.info( - f"Found pregenerated group level grey matter mask: {group_mask}" + "Found pregenerated group level grey matter mask: " + f"{subject_mask_dir / subject_mask_file_name}" ) # atlas - resampled_atlases: list[Path] = [] - for desc in atlas["file_paths"]: - filename = ( - f"tpl-{template}_" - f"atlas-{atlas['name']}_" - "res-dataset_" - f"desc-{desc}_" - f"{atlas['type']}.nii.gz" - ) - resampled_atlases.append(output_dir / filename) - all_exist = [file_path.exists() for file_path in resampled_atlases] - if not all(all_exist): - return group_mask, None - else: + all_exist = [ + (subject_mask_dir / file_path).exists() + for file_path in subject_seg_file_names + ] + if target_subject_seg := all(all_exist): gc_log.info( - f"Found resampled atlases:\n{[str(x) for x in resampled_atlases]}." - "\nSkipping group level mask generation step." + "Found resampled atlases:\n" + f"{[filepath for filepath in subject_seg_file_names]} " + f"in {subject_mask_dir}." + "\nSkipping individual segmentation generation step." ) - return group_mask, resampled_atlases + return target_subject_mask, target_subject_seg diff --git a/giga_connectome/postprocess.py b/giga_connectome/postprocess.py index 37ceecc..82b07d7 100644 --- a/giga_connectome/postprocess.py +++ b/giga_connectome/postprocess.py @@ -102,9 +102,9 @@ def run_postprocessing_dataset( for atlas_path in resampled_atlases: if isinstance(atlas_path, str): atlas_path = Path(atlas_path) - desc = atlas_path.name.split("desc-")[-1].split("_")[0] - atlas_maskers[desc] = _get_masker(atlas_path) - connectomes[desc] = [] + seg = atlas_path.name.split("seg-")[-1].split("_")[0] + atlas_maskers[seg] = _get_masker(atlas_path) + connectomes[seg] = [] correlation_measure = ConnectivityMeasure( kind="correlation", vectorize=False, discard_diagonal=False @@ -136,12 +136,14 @@ def run_postprocessing_dataset( connectome_path = connectome_path / session connectome_path = connectome_path / "func" - # All timeseries derivatives have the same metadata - # so one json file for them all. + # All timeseries derivatives of the same scan have the same + # metadata so one json file for them all. # see https://bids.neuroimaging.io/bep012 json_filename = connectome_path / utils.output_filename( source_file=Path(img.filename).stem, atlas=atlas["name"], + atlas_desc="", + strategy=strategy["name"], suffix="timeseries", extension="json", ) @@ -154,14 +156,11 @@ def run_postprocessing_dataset( with open(json_filename, "w") as f: json.dump(meta_data, f, indent=4) - for desc, masker in atlas_maskers.items(): + for seg, masker in atlas_maskers.items(): if not denoised_img: time_series_atlas, correlation_matrix = None, None - attribute_name = ( - f"{subject}_{specifier}" - f"_atlas-{atlas['name']}_desc-{desc}" - ) + attribute_name = f"{subject}_{specifier}" f"_seg-{seg}" gc_log.info(f"{attribute_name}: no volume after scrubbing") progress.update(task, advance=1) continue @@ -177,6 +176,8 @@ def run_postprocessing_dataset( ) ) + # reverse engineer atlas_desc + desc = seg.split(atlas["name"])[-1] # dump correlation_matrix to tsv relmat_filename = connectome_path / utils.output_filename( source_file=Path(img.filename).stem, @@ -184,7 +185,7 @@ def run_postprocessing_dataset( suffix="relmat", extension="tsv", strategy=strategy["name"], - desc=desc, + atlas_desc=desc, ) utils.check_path(relmat_filename) df = pd.DataFrame(correlation_matrix) @@ -197,7 +198,7 @@ def run_postprocessing_dataset( suffix="timeseries", extension="tsv", strategy=strategy["name"], - desc=desc, + atlas_desc=desc, ) utils.check_path(timeseries_filename) df = pd.DataFrame(time_series_atlas) @@ -210,7 +211,7 @@ def run_postprocessing_dataset( suffix="report", extension="html", strategy=strategy["name"], - desc=desc, + atlas_desc=desc, ) report.save_as_html(report_filename) diff --git a/giga_connectome/run.py b/giga_connectome/run.py index e9bc83f..156594e 100644 --- a/giga_connectome/run.py +++ b/giga_connectome/run.py @@ -1,5 +1,5 @@ from __future__ import annotations - +from typing import Any import argparse from pathlib import Path from typing import Sequence @@ -7,8 +7,35 @@ from giga_connectome import __version__ from giga_connectome.workflow import workflow from giga_connectome.atlas import get_atlas_labels +from giga_connectome.logger import gc_logger + +gc_log = gc_logger() preset_atlas = get_atlas_labels() +deprecations = { + # parser attribute name: + # (replacement flag, version slated to be removed in) + "work-dir": ("--atlases-dir", "0.7.0"), +} + + +class DeprecatedAction(argparse.Action): + def __call__( + self, + parser: argparse.ArgumentParser, + namespace: argparse.Namespace, + values: str | Sequence[Any] | None, + option_string: str | None = None, + ) -> None: + new_opt, rem_vers = deprecations.get(self.dest, (None, None)) + msg = ( + f"{self.option_strings} has been deprecated and will be removed " + f"in {rem_vers or 'a later version'}." + ) + if new_opt: + msg += f" Please use `{new_opt}` instead." + gc_log.warning(msg) + delattr(namespace, self.dest) def global_parser() -> argparse.ArgumentParser: @@ -51,20 +78,26 @@ def global_parser() -> argparse.ArgumentParser: nargs="+", ) parser.add_argument( - "-w", - "--work-dir", + "-a", + "--atlases-dir", action="store", type=Path, - default=Path("work").absolute(), - help="Path where intermediate results should be stored.", + default=Path("atlases").absolute(), + help="Path where subject specific segmentations are stored.", + ) + parser.add_argument( + "-w", + "--work-dir", + action=DeprecatedAction, + help="This argument is deprecated. Please use --atlas-dir instead.", ) parser.add_argument( "--atlas", help="The choice of atlas for time series extraction. Default atlas " f"choices are: {preset_atlas}. User can pass " "a path to a json file containing configuration for their own choice " - "of atlas. The default is 'Schaefer20187Networks'.", - default="Schaefer20187Networks", + "of atlas. The default is 'Schaefer2018'.", + default="Schaefer2018", ) parser.add_argument( "--denoise-strategy", diff --git a/giga_connectome/tests/test_atlas.py b/giga_connectome/tests/test_atlas.py index 62c494f..572d8cd 100644 --- a/giga_connectome/tests/test_atlas.py +++ b/giga_connectome/tests/test_atlas.py @@ -1,8 +1,17 @@ from giga_connectome.atlas import load_atlas_setting +import pytest +from pkg_resources import resource_filename def test_load_atlas_setting(): + # use Schaefer2018 when updating 0.7.0 atlas_config = load_atlas_setting("Schaefer20187Networks") - assert atlas_config["name"] == "Schaefer20187Networks" + assert atlas_config["name"] == "Schaefer2018" + atlas_config = load_atlas_setting("Schaefer2018") + assert atlas_config["name"] == "Schaefer2018" atlas_config = load_atlas_setting("HarvardOxfordCortical") assert atlas_config["name"] == "HarvardOxfordCortical" + pytest.raises(FileNotFoundError, load_atlas_setting, "blah") + json_path = resource_filename("giga_connectome", "data/atlas/DiFuMo.json") + atlas_config = load_atlas_setting(json_path) + assert atlas_config["name"] == "DiFuMo" diff --git a/giga_connectome/tests/test_cli.py b/giga_connectome/tests/test_cli.py index 85c79ea..7956c72 100644 --- a/giga_connectome/tests/test_cli.py +++ b/giga_connectome/tests/test_cli.py @@ -33,13 +33,14 @@ def test_help(capsys): @pytest.mark.smoke -def test_smoke(tmp_path, capsys): +def test_smoke(tmp_path, caplog): bids_dir = resource_filename( "giga_connectome", "data/test_data/ds000017-fmriprep22.0.1-downsampled-nosurface", ) output_dir = tmp_path / "output" - work_dir = tmp_path / "output/work" + atlases_dir = tmp_path / "atlases" + work_dir = tmp_path / "work" if not Path(output_dir).exists: Path(output_dir).mkdir() @@ -50,8 +51,10 @@ def test_smoke(tmp_path, capsys): "1", "-w", str(work_dir), + "-a", + str(atlases_dir), "--atlas", - "Schaefer20187Networks", + "Schaefer2018", "--denoise-strategy", "simple", "--reindex-bids", @@ -61,32 +64,32 @@ def test_smoke(tmp_path, capsys): "participant", ] ) + assert "has been deprecated" in caplog.text.splitlines()[0] output_folder = output_dir / "sub-1" / "ses-timepoint1" / "func" base = ( "sub-1_ses-timepoint1_task-probabilisticclassification" - "_run-01_space-MNI152NLin2009cAsym_res-2" - "_atlas-Schaefer20187Networks" + "_run-01_seg-Schaefer2018100Parcels7Networks" + ) + ts_base = ( + "sub-1_ses-timepoint1_task-probabilisticclassification" + "_run-01_desc-denoiseSimple" ) - relmat_file = output_folder / ( - base - + "_meas-PearsonCorrelation" - + "_desc-100Parcels7NetworksSimple_relmat.tsv" + base + "_meas-PearsonCorrelation" + "_desc-denoiseSimple_relmat.tsv" ) assert relmat_file.exists() relmat = pd.read_csv(relmat_file, sep="\t") assert len(relmat) == 100 - - json_file = relmat_file = output_folder / (base + "_timeseries.json") + json_file = relmat_file = output_folder / (ts_base + "_timeseries.json") assert json_file.exists() with open(json_file, "r") as f: content = json.load(f) assert content.get("SamplingFrequency") == 0.5 timeseries_file = relmat_file = output_folder / ( - base + "_desc-100Parcels7NetworksSimple_timeseries.tsv" + base + "_desc-denoiseSimple_timeseries.tsv" ) assert timeseries_file.exists() timeseries = pd.read_csv(timeseries_file, sep="\t") diff --git a/giga_connectome/tests/test_mask.py b/giga_connectome/tests/test_mask.py index 9bd11f6..e6e55a3 100644 --- a/giga_connectome/tests/test_mask.py +++ b/giga_connectome/tests/test_mask.py @@ -6,22 +6,25 @@ from giga_connectome import mask -def test_generate_group_mask(): +def test_generate_subject_gm_mask(): """Generate group epi grey matter mask and resample atlas.""" + # use different subject in the test, should work the same data = datasets.fetch_development_fmri(n_subjects=3) imgs = data.func - group_epi_mask = mask.generate_group_mask(imgs) + group_epi_mask = mask.generate_subject_gm_mask(imgs) # match the post processing details: https://osf.io/wjtyq assert group_epi_mask.shape == (50, 59, 50) - diff_tpl = mask.generate_group_mask(imgs, template="MNI152NLin2009aAsym") + diff_tpl = mask.generate_subject_gm_mask( + imgs, template="MNI152NLin2009aAsym" + ) assert diff_tpl.shape == (50, 59, 50) # test bad inputs with pytest.raises( ValueError, match="TemplateFlow does not supply template blah" ): - mask.generate_group_mask(imgs, template="blah") + mask.generate_subject_gm_mask(imgs, template="blah") def test_check_mask_affine(): diff --git a/giga_connectome/tests/test_utils.py b/giga_connectome/tests/test_utils.py index b3b924f..59940b4 100644 --- a/giga_connectome/tests/test_utils.py +++ b/giga_connectome/tests/test_utils.py @@ -33,10 +33,15 @@ def test_check_check_filter(): assert "dseg" in str(msg.value) -def test_parse_bids_name(): - subject, session, specifier = utils.parse_bids_name( - "sub-01_ses-ah_task-rest_run-1_space-MNIfake_res-2_desc-preproc_bold.nii.gz" - ) +@pytest.mark.parametrize( + "source_file", + [ + "sub-01_ses-ah_task-rest_run-1_space-MNIfake_res-2_desc-preproc_bold.nii.gz", + "sub-01_ses-ah_task-rest_run-1_space-MNIfake_res-2_desc-brain_mask.nii.gz", + ], +) +def test_parse_bids_name(source_file): + subject, session, specifier = utils.parse_bids_name(source_file) assert subject == "sub-01" assert session == "ses-ah" assert specifier == "ses-ah_task-rest_run-1" @@ -53,3 +58,62 @@ def test_get_subject_lists(): ) assert len(subjects) == 1 assert subjects[0] == "01" + + +@pytest.mark.parametrize( + "suffix,extension,target", + [ + ( + "timeseries", + "tsv", + "sub-01_ses-ah_task-rest_run-1_seg-fake100_desc-denoiseSimple_timeseries.tsv", + ), + ( + "timeseries", + "json", + "sub-01_ses-ah_task-rest_run-1_desc-denoiseSimple_timeseries.json", + ), + ( + "relmat", + "tsv", + "sub-01_ses-ah_task-rest_run-1_seg-fake100_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv", + ), + ( + "report", + "html", + "sub-01_ses-ah_task-rest_run-1_seg-fake100_desc-denoiseSimple_report.html", + ), + ], +) +def test_output_filename(suffix, extension, target): + source_file = "sub-01_ses-ah_task-rest_run-1_space-MNIfake_res-2_desc-preproc_bold.nii.gz" + + generated_target = utils.output_filename( + source_file=source_file, + atlas="fake", + suffix=suffix, + extension=extension, + strategy="simple", + atlas_desc="100", + ) + assert target == generated_target + + +@pytest.mark.parametrize( + "atlas,atlas_desc,suffix,target", + [ + ("fake", "100", "dseg", "sub-01_seg-fake100_dseg.nii.gz"), + ("", "", "mask", "sub-01_space-MNIfake_res-2_label-GM_mask.nii.gz"), + ], +) +def test_output_filename_seg(atlas, atlas_desc, suffix, target): + source_file = "sub-01_ses-ah_task-rest_run-1_space-MNIfake_res-2_desc-brain_mask.nii.gz" + generated_target = utils.output_filename( + source_file=source_file, + atlas=atlas, + suffix=suffix, + extension="nii.gz", + strategy="", + atlas_desc=atlas_desc, + ) + assert target == generated_target diff --git a/giga_connectome/utils.py b/giga_connectome/utils.py index 908d5bf..1ff0d6e 100644 --- a/giga_connectome/utils.py +++ b/giga_connectome/utils.py @@ -255,32 +255,33 @@ def output_filename( suffix: str, extension: str, strategy: str | None = None, - desc: str | None = None, + atlas_desc: str | None = None, ) -> str: """Generate output filneme.""" - root: str | list[str] = source_file.split("_")[:-1] + subject, session, specifier = parse_bids_name(source_file) + seg = f"seg-{atlas}{atlas_desc}" + if extension != "nii.gz": + root: str = f"{subject}_{specifier}" - # drop entities - # that are redundant or - # to make sure we get a single file across - root = [x for x in root if "desc" not in x] + if extension != "json": + root += f"_{seg}" - root = "_".join(root) - if root != "": - root += "_" + if suffix == "relmat": + root += "_meas-PearsonCorrelation" - root += f"atlas-{atlas}" + if strategy is None: + strategy = "" - if suffix == "relmat": - root += "_meas-PearsonCorrelation" - - if suffix == "timeseries" and extension == "json": - return f"{root}_timeseries.json" - - if strategy is None: - strategy = "" + return ( + f"{root}_desc-denoise{strategy.capitalize()}_{suffix}.{extension}" + ) - return f"{root}_desc-{desc}{strategy.capitalize()}_{suffix}.{extension}" + elif suffix == "mask": + reference = parse_bids_filename(source_file) + tpl: str = f"space-{reference['space']}_res-{reference['res']}" + return f"{subject}_{tpl}_label-GM_{suffix}.{extension}" + else: + return f"{subject}_{seg}_{suffix}.{extension}" def progress_bar(text: str, color: str = "green") -> Progress: diff --git a/giga_connectome/workflow.py b/giga_connectome/workflow.py index ac3ae8a..8724100 100644 --- a/giga_connectome/workflow.py +++ b/giga_connectome/workflow.py @@ -37,7 +37,7 @@ def workflow(args: argparse.Namespace) -> None: # set file paths bids_dir = args.bids_dir output_dir = args.output_dir - working_dir = args.work_dir + atlases_dir = args.atlases_dir standardize = True # always standardising the time series smoothing_fwhm = args.smoothing_fwhm calculate_average_correlation = ( @@ -54,7 +54,7 @@ def workflow(args: argparse.Namespace) -> None: # check output path output_dir.mkdir(parents=True, exist_ok=True) - working_dir.mkdir(parents=True, exist_ok=True) + atlases_dir.mkdir(parents=True, exist_ok=True) # get template information; currently we only support the fmriprep defaults template = ( @@ -79,8 +79,8 @@ def workflow(args: argparse.Namespace) -> None: subj_data, _ = utils.get_bids_images( [subject], template, bids_dir, args.reindex_bids, bids_filters ) - group_mask, resampled_atlases = generate_gm_mask_atlas( - working_dir, atlas, template, subj_data["mask"] + subject_mask_nii, subject_seg_niis = generate_gm_mask_atlas( + atlases_dir, atlas, template, subj_data["mask"] ) gc_log.info(f"Generate subject level connectomes: sub-{subject}") @@ -88,9 +88,9 @@ def workflow(args: argparse.Namespace) -> None: run_postprocessing_dataset( strategy, atlas, - resampled_atlases, + subject_seg_niis, subj_data["bold"], - group_mask, + subject_mask_nii, standardize, smoothing_fwhm, output_dir,