From 22037190af61a3deab5fce4f490760d3703d7222 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Fri, 17 May 2024 12:16:33 -0400 Subject: [PATCH 01/15] Update subject segmentation outputs --- giga_connectome/atlas.py | 41 +++++----- giga_connectome/mask.py | 112 +++++++++++++++------------- giga_connectome/tests/test_mask.py | 11 ++- giga_connectome/tests/test_utils.py | 96 +++++++++++++++++++++++- giga_connectome/utils.py | 39 +++++----- 5 files changed, 199 insertions(+), 100 deletions(-) diff --git a/giga_connectome/atlas.py b/giga_connectome/atlas.py index f8cbae0..2cd5d13 100644 --- a/giga_connectome/atlas.py +++ b/giga_connectome/atlas.py @@ -94,60 +94,55 @@ def load_atlas_setting( def resample_atlas_collection( - template: str, + subject_seg_file_names: list[str], atlas_config: ATLAS_SETTING_TYPE, - group_mask_dir: Path, - group_mask: Nifti1Image, + subject_mask_dir: Path, + subject_mask: Nifti1Image, ) -> list[Path]: """Resample a atlas collection to group grey matter mask. Parameters ---------- - template: str - Templateflow template name. This template should match the template of - `all_masks`. + subject_atlas_file_names: list of str + File names of subject atlas segmentations. atlas_config: dict Atlas name. Currently support Schaefer20187Networks, MIST, DiFuMo. - group_mask_dir: pathlib.Path + subject_mask_dir: pathlib.Path Path to where the outputs are saved. - group_mask : nibabel.nifti1.Nifti1Image - EPI (grey matter) mask for the current group of subjects. + subject_mask : nibabel.nifti1.Nifti1Image + EPI (grey matter) mask for the subject. Returns ------- list of pathlib.Path - Paths to atlases sampled to group level grey matter mask. + Paths to subject specific segmentations created from atlases sampled + to individual grey matter mask. """ gc_log.info("Resample atlas to group grey matter mask.") - resampled_atlases = [] + subject_seg = [] with progress_bar(text="Resampling atlases") as progress: task = progress.add_task( description="resampling", total=len(atlas_config["file_paths"]) ) - for desc in atlas_config["file_paths"]: + for seg_file, desc in zip( + subject_seg_file_names, atlas_config["file_paths"] + ): parcellation = atlas_config["file_paths"][desc] parcellation_resampled = resample_to_img( - parcellation, group_mask, interpolation="nearest" + parcellation, subject_mask, interpolation="nearest" ) - filename = ( - f"tpl-{template}_" - f"atlas-{atlas_config['name']}_" - "res-dataset_" - f"desc-{desc}_" - f"{atlas_config['type']}.nii.gz" - ) - save_path = group_mask_dir / filename + save_path = subject_mask_dir / seg_file nib.save(parcellation_resampled, save_path) - resampled_atlases.append(save_path) + subject_seg.append(save_path) progress.update(task, advance=1) - return resampled_atlases + return subject_seg def get_atlas_labels() -> List[str]: diff --git a/giga_connectome/mask.py b/giga_connectome/mask.py index bcac2b2..8b7c81d 100644 --- a/giga_connectome/mask.py +++ b/giga_connectome/mask.py @@ -21,6 +21,7 @@ from giga_connectome.atlas import ATLAS_SETTING_TYPE, resample_atlas_collection from giga_connectome.logger import gc_logger +from giga_connectome import utils gc_log = gc_logger() @@ -34,46 +35,65 @@ def generate_gm_mask_atlas( """ """ # check masks; isolate this part and make sure to make it a validate # templateflow template with a config file - - group_mask_dir = working_dir / "groupmasks" / f"tpl-{template}" - group_mask_dir.mkdir(exist_ok=True, parents=True) - - group_mask, resampled_atlases = None, None - if group_mask_dir.exists(): - group_mask, resampled_atlases = _check_pregenerated_masks( - template, working_dir, atlas + subject, _, _ = utils.parse_bids_name(masks[0].path) + subject_mask_dir = working_dir / subject / "func" + subject_mask_dir.mkdir(exist_ok=True, parents=True) + target_subject_mask_file_name: str = utils.output_filename( + source_file=masks[0].path, + atlas="", + suffix="mask", + extension="nii.gz", + strategy="", + atlas_desc="", + ) + target_subject_seg_file_names: list[str] = [ + utils.output_filename( + source_file=masks[0].path, + atlas=atlas["name"], + suffix=atlas["type"], + extension="nii.gz", + strategy="", + atlas_desc=atlas_desc, ) + for atlas_desc in atlas["file_paths"] + ] + target_subject_mask, target_subject_seg = _check_pregenerated_masks( + subject_mask_dir, + target_subject_mask_file_name, + target_subject_seg_file_names, + ) - if not group_mask: + if not target_subject_mask: # grey matter group mask is only supplied in MNI152NLin2009c(A)sym - group_mask_nii = generate_group_mask( + subject_mask_nii = generate_subject_gm_mask( [m.path for m in masks], "MNI152NLin2009cAsym" ) - current_file_name = ( - f"tpl-{template}_res-dataset_label-GM_desc-group_mask.nii.gz" + nib.save( + subject_mask_nii, subject_mask_dir / target_subject_mask_file_name ) - group_mask = group_mask_dir / current_file_name - nib.save(group_mask_nii, group_mask) - if not resampled_atlases: - resampled_atlases = resample_atlas_collection( - template, atlas, group_mask_dir, group_mask + if not target_subject_seg: + subject_seg_niis = resample_atlas_collection( + target_subject_seg_file_names, + atlas, + subject_mask_dir, + subject_mask_nii, ) - return group_mask, resampled_atlases + return subject_mask_nii, subject_seg_niis -def generate_group_mask( +def generate_subject_gm_mask( imgs: Sequence[Path | str | Nifti1Image], template: str = "MNI152NLin2009cAsym", templateflow_dir: Path | None = None, n_iter: int = 2, ) -> Nifti1Image: """ - Generate a group EPI grey matter mask, and overlaid with a MNI grey + Generate a subject EPI grey matter mask, and overlaid with a MNI grey matter template. - The Group EPI mask will ensure the signal extraction is from the most - overlapping voxels. + The subject EPI mask will ensure the signal extraction is from the most + overlapping voxels for all scans of the subject. Parameters ---------- @@ -267,38 +287,30 @@ def _check_mask_affine( def _check_pregenerated_masks( - template: str, working_dir: Path, atlas: ATLAS_SETTING_TYPE -) -> tuple[Path | None, list[Path] | None]: + subject_mask_dir: Path, + subject_mask_file_name: str, + subject_seg_file_names: list[str], +) -> tuple[bool, bool]: """Check if the working directory is populated with needed files.""" - output_dir = working_dir / "groupmasks" / f"tpl-{template}" - group_mask: Path | None = ( - output_dir - / f"tpl-{template}_res-dataset_label-GM_desc-group_mask.nii.gz" - ) - if group_mask and not group_mask.exists(): - group_mask = None - else: + # subject grey matter mask + if target_subject_mask := ( + subject_mask_dir / subject_mask_file_name + ).exists(): gc_log.info( - f"Found pregenerated group level grey matter mask: {group_mask}" + "Found pregenerated group level grey matter mask: " + f"{subject_mask_dir / subject_mask_file_name}" ) # atlas - resampled_atlases: list[Path] = [] - for desc in atlas["file_paths"]: - filename = ( - f"tpl-{template}_" - f"atlas-{atlas['name']}_" - "res-dataset_" - f"desc-{desc}_" - f"{atlas['type']}.nii.gz" - ) - resampled_atlases.append(output_dir / filename) - all_exist = [file_path.exists() for file_path in resampled_atlases] - if not all(all_exist): - return group_mask, None - else: + all_exist = [ + (subject_mask_dir / file_path).exists() + for file_path in subject_seg_file_names + ] + if target_subject_seg := all(all_exist): gc_log.info( - f"Found resampled atlases:\n{[str(x) for x in resampled_atlases]}." - "\nSkipping group level mask generation step." + "Found resampled atlases:\n" + f"{[filepath for filepath in subject_seg_file_names]} " + f"in {subject_mask_dir}." + "\nSkipping individual segmentation generation step." ) - return group_mask, resampled_atlases + return target_subject_mask, target_subject_seg diff --git a/giga_connectome/tests/test_mask.py b/giga_connectome/tests/test_mask.py index 9bd11f6..e6e55a3 100644 --- a/giga_connectome/tests/test_mask.py +++ b/giga_connectome/tests/test_mask.py @@ -6,22 +6,25 @@ from giga_connectome import mask -def test_generate_group_mask(): +def test_generate_subject_gm_mask(): """Generate group epi grey matter mask and resample atlas.""" + # use different subject in the test, should work the same data = datasets.fetch_development_fmri(n_subjects=3) imgs = data.func - group_epi_mask = mask.generate_group_mask(imgs) + group_epi_mask = mask.generate_subject_gm_mask(imgs) # match the post processing details: https://osf.io/wjtyq assert group_epi_mask.shape == (50, 59, 50) - diff_tpl = mask.generate_group_mask(imgs, template="MNI152NLin2009aAsym") + diff_tpl = mask.generate_subject_gm_mask( + imgs, template="MNI152NLin2009aAsym" + ) assert diff_tpl.shape == (50, 59, 50) # test bad inputs with pytest.raises( ValueError, match="TemplateFlow does not supply template blah" ): - mask.generate_group_mask(imgs, template="blah") + mask.generate_subject_gm_mask(imgs, template="blah") def test_check_mask_affine(): diff --git a/giga_connectome/tests/test_utils.py b/giga_connectome/tests/test_utils.py index b3b924f..a641b28 100644 --- a/giga_connectome/tests/test_utils.py +++ b/giga_connectome/tests/test_utils.py @@ -33,10 +33,15 @@ def test_check_check_filter(): assert "dseg" in str(msg.value) -def test_parse_bids_name(): - subject, session, specifier = utils.parse_bids_name( - "sub-01_ses-ah_task-rest_run-1_space-MNIfake_res-2_desc-preproc_bold.nii.gz" - ) +@pytest.mark.parametrize( + "source_file", + [ + "sub-01_ses-ah_task-rest_run-1_space-MNIfake_res-2_desc-preproc_bold.nii.gz", + "sub-01_ses-ah_task-rest_run-1_space-MNIfake_res-2_desc-brain_mask.nii.gz", + ], +) +def test_parse_bids_name(source_file): + subject, session, specifier = utils.parse_bids_name(source_file) assert subject == "sub-01" assert session == "ses-ah" assert specifier == "ses-ah_task-rest_run-1" @@ -53,3 +58,86 @@ def test_get_subject_lists(): ) assert len(subjects) == 1 assert subjects[0] == "01" + + +def test_output_filename(): + source_file = "sub-01_ses-ah_task-rest_run-1_space-MNIfake_res-2_desc-preproc_bold.nii.gz" + atlas = "fake" + atlas_desc = "100" + strategy = "simple" + + generated_target = utils.output_filename( + source_file=source_file, + atlas=atlas, + suffix="timeseries", + extension="tsv", + strategy=strategy, + atlas_desc=atlas_desc, + ) + assert ( + "sub-01_ses-ah_task-rest_run-1_seg-fake100_desc-denoiseSimple_timeseries.tsv" + == generated_target + ) + + generated_target = utils.output_filename( + source_file=source_file, + atlas=atlas, + suffix="timeseries", + extension="json", + strategy=strategy, + atlas_desc=atlas_desc, + ) + assert ( + "sub-01_ses-ah_task-rest_run-1_desc-denoiseSimple_timeseries.json" + == generated_target + ) + + generated_target = utils.output_filename( + source_file=source_file, + atlas=atlas, + suffix="relmat", + extension="tsv", + strategy=strategy, + atlas_desc=atlas_desc, + ) + assert ( + "sub-01_ses-ah_task-rest_run-1_seg-fake100_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv" + == generated_target + ) + + generated_target = utils.output_filename( + source_file=source_file, + atlas=atlas, + suffix="report", + extension="html", + strategy=strategy, + atlas_desc=atlas_desc, + ) + assert ( + "sub-01_ses-ah_task-rest_run-1_seg-fake100_desc-denoiseSimple_report.html" + == generated_target + ) + + source_file = "sub-01_ses-ah_task-rest_run-1_space-MNIfake_res-2_desc-brain_mask.nii.gz" + + generated_target = utils.output_filename( + source_file=source_file, + atlas=atlas, + suffix="dseg", + extension="nii.gz", + strategy="", + atlas_desc=atlas_desc, + ) + assert "sub-01_seg-fake100_dseg.nii.gz" == generated_target + + generated_target = utils.output_filename( + source_file=source_file, + atlas="", + suffix="mask", + extension="nii.gz", + strategy="", + atlas_desc="", + ) + assert ( + "sub-01_space-MNIfake_res-2_label-GM_mask.nii.gz" == generated_target + ) diff --git a/giga_connectome/utils.py b/giga_connectome/utils.py index 908d5bf..1ff0d6e 100644 --- a/giga_connectome/utils.py +++ b/giga_connectome/utils.py @@ -255,32 +255,33 @@ def output_filename( suffix: str, extension: str, strategy: str | None = None, - desc: str | None = None, + atlas_desc: str | None = None, ) -> str: """Generate output filneme.""" - root: str | list[str] = source_file.split("_")[:-1] + subject, session, specifier = parse_bids_name(source_file) + seg = f"seg-{atlas}{atlas_desc}" + if extension != "nii.gz": + root: str = f"{subject}_{specifier}" - # drop entities - # that are redundant or - # to make sure we get a single file across - root = [x for x in root if "desc" not in x] + if extension != "json": + root += f"_{seg}" - root = "_".join(root) - if root != "": - root += "_" + if suffix == "relmat": + root += "_meas-PearsonCorrelation" - root += f"atlas-{atlas}" + if strategy is None: + strategy = "" - if suffix == "relmat": - root += "_meas-PearsonCorrelation" - - if suffix == "timeseries" and extension == "json": - return f"{root}_timeseries.json" - - if strategy is None: - strategy = "" + return ( + f"{root}_desc-denoise{strategy.capitalize()}_{suffix}.{extension}" + ) - return f"{root}_desc-{desc}{strategy.capitalize()}_{suffix}.{extension}" + elif suffix == "mask": + reference = parse_bids_filename(source_file) + tpl: str = f"space-{reference['space']}_res-{reference['res']}" + return f"{subject}_{tpl}_label-GM_{suffix}.{extension}" + else: + return f"{subject}_{seg}_{suffix}.{extension}" def progress_bar(text: str, color: str = "green") -> Progress: From 8c4145bbb28d29ec67659c8787898e9556218d5c Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Fri, 17 May 2024 12:26:04 -0400 Subject: [PATCH 02/15] desc -> atlas_desc to make the source explicit --- giga_connectome/postprocess.py | 6 +++--- giga_connectome/workflow.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/giga_connectome/postprocess.py b/giga_connectome/postprocess.py index 37ceecc..5846433 100644 --- a/giga_connectome/postprocess.py +++ b/giga_connectome/postprocess.py @@ -184,7 +184,7 @@ def run_postprocessing_dataset( suffix="relmat", extension="tsv", strategy=strategy["name"], - desc=desc, + atlas_desc=desc, ) utils.check_path(relmat_filename) df = pd.DataFrame(correlation_matrix) @@ -197,7 +197,7 @@ def run_postprocessing_dataset( suffix="timeseries", extension="tsv", strategy=strategy["name"], - desc=desc, + atlas_desc=desc, ) utils.check_path(timeseries_filename) df = pd.DataFrame(time_series_atlas) @@ -210,7 +210,7 @@ def run_postprocessing_dataset( suffix="report", extension="html", strategy=strategy["name"], - desc=desc, + atlas_desc=desc, ) report.save_as_html(report_filename) diff --git a/giga_connectome/workflow.py b/giga_connectome/workflow.py index ac3ae8a..32cb77d 100644 --- a/giga_connectome/workflow.py +++ b/giga_connectome/workflow.py @@ -79,7 +79,7 @@ def workflow(args: argparse.Namespace) -> None: subj_data, _ = utils.get_bids_images( [subject], template, bids_dir, args.reindex_bids, bids_filters ) - group_mask, resampled_atlases = generate_gm_mask_atlas( + subject_mask_nii, subject_seg_niis = generate_gm_mask_atlas( working_dir, atlas, template, subj_data["mask"] ) @@ -88,9 +88,9 @@ def workflow(args: argparse.Namespace) -> None: run_postprocessing_dataset( strategy, atlas, - resampled_atlases, + subject_seg_niis, subj_data["bold"], - group_mask, + subject_mask_nii, standardize, smoothing_fwhm, output_dir, From 04b0be2c196bd1cdfff9e46da33d650f4f29deda Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Fri, 17 May 2024 12:39:57 -0400 Subject: [PATCH 03/15] parametise the output name test --- giga_connectome/tests/test_utils.py | 108 +++++++++++----------------- 1 file changed, 42 insertions(+), 66 deletions(-) diff --git a/giga_connectome/tests/test_utils.py b/giga_connectome/tests/test_utils.py index a641b28..59940b4 100644 --- a/giga_connectome/tests/test_utils.py +++ b/giga_connectome/tests/test_utils.py @@ -60,84 +60,60 @@ def test_get_subject_lists(): assert subjects[0] == "01" -def test_output_filename(): +@pytest.mark.parametrize( + "suffix,extension,target", + [ + ( + "timeseries", + "tsv", + "sub-01_ses-ah_task-rest_run-1_seg-fake100_desc-denoiseSimple_timeseries.tsv", + ), + ( + "timeseries", + "json", + "sub-01_ses-ah_task-rest_run-1_desc-denoiseSimple_timeseries.json", + ), + ( + "relmat", + "tsv", + "sub-01_ses-ah_task-rest_run-1_seg-fake100_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv", + ), + ( + "report", + "html", + "sub-01_ses-ah_task-rest_run-1_seg-fake100_desc-denoiseSimple_report.html", + ), + ], +) +def test_output_filename(suffix, extension, target): source_file = "sub-01_ses-ah_task-rest_run-1_space-MNIfake_res-2_desc-preproc_bold.nii.gz" - atlas = "fake" - atlas_desc = "100" - strategy = "simple" - - generated_target = utils.output_filename( - source_file=source_file, - atlas=atlas, - suffix="timeseries", - extension="tsv", - strategy=strategy, - atlas_desc=atlas_desc, - ) - assert ( - "sub-01_ses-ah_task-rest_run-1_seg-fake100_desc-denoiseSimple_timeseries.tsv" - == generated_target - ) generated_target = utils.output_filename( source_file=source_file, - atlas=atlas, - suffix="timeseries", - extension="json", - strategy=strategy, - atlas_desc=atlas_desc, - ) - assert ( - "sub-01_ses-ah_task-rest_run-1_desc-denoiseSimple_timeseries.json" - == generated_target + atlas="fake", + suffix=suffix, + extension=extension, + strategy="simple", + atlas_desc="100", ) + assert target == generated_target - generated_target = utils.output_filename( - source_file=source_file, - atlas=atlas, - suffix="relmat", - extension="tsv", - strategy=strategy, - atlas_desc=atlas_desc, - ) - assert ( - "sub-01_ses-ah_task-rest_run-1_seg-fake100_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv" - == generated_target - ) - - generated_target = utils.output_filename( - source_file=source_file, - atlas=atlas, - suffix="report", - extension="html", - strategy=strategy, - atlas_desc=atlas_desc, - ) - assert ( - "sub-01_ses-ah_task-rest_run-1_seg-fake100_desc-denoiseSimple_report.html" - == generated_target - ) +@pytest.mark.parametrize( + "atlas,atlas_desc,suffix,target", + [ + ("fake", "100", "dseg", "sub-01_seg-fake100_dseg.nii.gz"), + ("", "", "mask", "sub-01_space-MNIfake_res-2_label-GM_mask.nii.gz"), + ], +) +def test_output_filename_seg(atlas, atlas_desc, suffix, target): source_file = "sub-01_ses-ah_task-rest_run-1_space-MNIfake_res-2_desc-brain_mask.nii.gz" - generated_target = utils.output_filename( source_file=source_file, atlas=atlas, - suffix="dseg", + suffix=suffix, extension="nii.gz", strategy="", atlas_desc=atlas_desc, ) - assert "sub-01_seg-fake100_dseg.nii.gz" == generated_target - - generated_target = utils.output_filename( - source_file=source_file, - atlas="", - suffix="mask", - extension="nii.gz", - strategy="", - atlas_desc="", - ) - assert ( - "sub-01_space-MNIfake_res-2_label-GM_mask.nii.gz" == generated_target - ) + assert target == generated_target From 57499885ba1a92577bd047f5e5b9ad5a4160bb3a Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Fri, 17 May 2024 14:33:58 -0400 Subject: [PATCH 04/15] ensure the file name changes are reflected in the workflow --- ...er20187Networks.json => Schaefer2018.json} | 2 +- giga_connectome/mask.py | 4 ++-- giga_connectome/postprocess.py | 21 +++++++++-------- giga_connectome/run.py | 12 +++++----- giga_connectome/tests/test_cli.py | 23 +++++++++---------- giga_connectome/workflow.py | 6 ++--- 6 files changed, 34 insertions(+), 34 deletions(-) rename giga_connectome/data/atlas/{Schaefer20187Networks.json => Schaefer2018.json} (91%) diff --git a/giga_connectome/data/atlas/Schaefer20187Networks.json b/giga_connectome/data/atlas/Schaefer2018.json similarity index 91% rename from giga_connectome/data/atlas/Schaefer20187Networks.json rename to giga_connectome/data/atlas/Schaefer2018.json index c3211b4..e90b6e6 100644 --- a/giga_connectome/data/atlas/Schaefer20187Networks.json +++ b/giga_connectome/data/atlas/Schaefer2018.json @@ -1,5 +1,5 @@ { - "name": "Schaefer20187Networks", + "name": "Schaefer2018", "parameters": { "atlas": "Schaefer2018", "template": "MNI152NLin2009cAsym", diff --git a/giga_connectome/mask.py b/giga_connectome/mask.py index 8b7c81d..db0dbbf 100644 --- a/giga_connectome/mask.py +++ b/giga_connectome/mask.py @@ -27,7 +27,7 @@ def generate_gm_mask_atlas( - working_dir: Path, + atlases_dir: Path, atlas: ATLAS_SETTING_TYPE, template: str, masks: list[BIDSImageFile], @@ -36,7 +36,7 @@ def generate_gm_mask_atlas( # check masks; isolate this part and make sure to make it a validate # templateflow template with a config file subject, _, _ = utils.parse_bids_name(masks[0].path) - subject_mask_dir = working_dir / subject / "func" + subject_mask_dir = atlases_dir / subject / "func" subject_mask_dir.mkdir(exist_ok=True, parents=True) target_subject_mask_file_name: str = utils.output_filename( source_file=masks[0].path, diff --git a/giga_connectome/postprocess.py b/giga_connectome/postprocess.py index 5846433..82b07d7 100644 --- a/giga_connectome/postprocess.py +++ b/giga_connectome/postprocess.py @@ -102,9 +102,9 @@ def run_postprocessing_dataset( for atlas_path in resampled_atlases: if isinstance(atlas_path, str): atlas_path = Path(atlas_path) - desc = atlas_path.name.split("desc-")[-1].split("_")[0] - atlas_maskers[desc] = _get_masker(atlas_path) - connectomes[desc] = [] + seg = atlas_path.name.split("seg-")[-1].split("_")[0] + atlas_maskers[seg] = _get_masker(atlas_path) + connectomes[seg] = [] correlation_measure = ConnectivityMeasure( kind="correlation", vectorize=False, discard_diagonal=False @@ -136,12 +136,14 @@ def run_postprocessing_dataset( connectome_path = connectome_path / session connectome_path = connectome_path / "func" - # All timeseries derivatives have the same metadata - # so one json file for them all. + # All timeseries derivatives of the same scan have the same + # metadata so one json file for them all. # see https://bids.neuroimaging.io/bep012 json_filename = connectome_path / utils.output_filename( source_file=Path(img.filename).stem, atlas=atlas["name"], + atlas_desc="", + strategy=strategy["name"], suffix="timeseries", extension="json", ) @@ -154,14 +156,11 @@ def run_postprocessing_dataset( with open(json_filename, "w") as f: json.dump(meta_data, f, indent=4) - for desc, masker in atlas_maskers.items(): + for seg, masker in atlas_maskers.items(): if not denoised_img: time_series_atlas, correlation_matrix = None, None - attribute_name = ( - f"{subject}_{specifier}" - f"_atlas-{atlas['name']}_desc-{desc}" - ) + attribute_name = f"{subject}_{specifier}" f"_seg-{seg}" gc_log.info(f"{attribute_name}: no volume after scrubbing") progress.update(task, advance=1) continue @@ -177,6 +176,8 @@ def run_postprocessing_dataset( ) ) + # reverse engineer atlas_desc + desc = seg.split(atlas["name"])[-1] # dump correlation_matrix to tsv relmat_filename = connectome_path / utils.output_filename( source_file=Path(img.filename).stem, diff --git a/giga_connectome/run.py b/giga_connectome/run.py index e9bc83f..d7cc36e 100644 --- a/giga_connectome/run.py +++ b/giga_connectome/run.py @@ -51,20 +51,20 @@ def global_parser() -> argparse.ArgumentParser: nargs="+", ) parser.add_argument( - "-w", - "--work-dir", + "-a", + "--atlases-dir", action="store", type=Path, - default=Path("work").absolute(), - help="Path where intermediate results should be stored.", + default=Path("atlases").absolute(), + help="Path where subject specific segmentations are stored.", ) parser.add_argument( "--atlas", help="The choice of atlas for time series extraction. Default atlas " f"choices are: {preset_atlas}. User can pass " "a path to a json file containing configuration for their own choice " - "of atlas. The default is 'Schaefer20187Networks'.", - default="Schaefer20187Networks", + "of atlas. The default is 'Schaefer2018'.", + default="Schaefer2018", ) parser.add_argument( "--denoise-strategy", diff --git a/giga_connectome/tests/test_cli.py b/giga_connectome/tests/test_cli.py index 85c79ea..f19ec7e 100644 --- a/giga_connectome/tests/test_cli.py +++ b/giga_connectome/tests/test_cli.py @@ -39,7 +39,7 @@ def test_smoke(tmp_path, capsys): "data/test_data/ds000017-fmriprep22.0.1-downsampled-nosurface", ) output_dir = tmp_path / "output" - work_dir = tmp_path / "output/work" + atlases_dir = tmp_path / "atlases" if not Path(output_dir).exists: Path(output_dir).mkdir() @@ -49,9 +49,9 @@ def test_smoke(tmp_path, capsys): "--participant_label", "1", "-w", - str(work_dir), + str(atlases_dir), "--atlas", - "Schaefer20187Networks", + "Schaefer2018", "--denoise-strategy", "simple", "--reindex-bids", @@ -66,27 +66,26 @@ def test_smoke(tmp_path, capsys): base = ( "sub-1_ses-timepoint1_task-probabilisticclassification" - "_run-01_space-MNI152NLin2009cAsym_res-2" - "_atlas-Schaefer20187Networks" + "_run-01_seg-Schaefer2018100Parcels7Networks" + ) + ts_base = ( + "sub-1_ses-timepoint1_task-probabilisticclassification" + "_run-01_desc-denoiseSimple" ) - relmat_file = output_folder / ( - base - + "_meas-PearsonCorrelation" - + "_desc-100Parcels7NetworksSimple_relmat.tsv" + base + "_meas-PearsonCorrelation" + "_desc-denoiseSimple_relmat.tsv" ) assert relmat_file.exists() relmat = pd.read_csv(relmat_file, sep="\t") assert len(relmat) == 100 - - json_file = relmat_file = output_folder / (base + "_timeseries.json") + json_file = relmat_file = output_folder / (ts_base + "_timeseries.json") assert json_file.exists() with open(json_file, "r") as f: content = json.load(f) assert content.get("SamplingFrequency") == 0.5 timeseries_file = relmat_file = output_folder / ( - base + "_desc-100Parcels7NetworksSimple_timeseries.tsv" + base + "_desc-denoiseSimple_timeseries.tsv" ) assert timeseries_file.exists() timeseries = pd.read_csv(timeseries_file, sep="\t") diff --git a/giga_connectome/workflow.py b/giga_connectome/workflow.py index 32cb77d..8724100 100644 --- a/giga_connectome/workflow.py +++ b/giga_connectome/workflow.py @@ -37,7 +37,7 @@ def workflow(args: argparse.Namespace) -> None: # set file paths bids_dir = args.bids_dir output_dir = args.output_dir - working_dir = args.work_dir + atlases_dir = args.atlases_dir standardize = True # always standardising the time series smoothing_fwhm = args.smoothing_fwhm calculate_average_correlation = ( @@ -54,7 +54,7 @@ def workflow(args: argparse.Namespace) -> None: # check output path output_dir.mkdir(parents=True, exist_ok=True) - working_dir.mkdir(parents=True, exist_ok=True) + atlases_dir.mkdir(parents=True, exist_ok=True) # get template information; currently we only support the fmriprep defaults template = ( @@ -80,7 +80,7 @@ def workflow(args: argparse.Namespace) -> None: [subject], template, bids_dir, args.reindex_bids, bids_filters ) subject_mask_nii, subject_seg_niis = generate_gm_mask_atlas( - working_dir, atlas, template, subj_data["mask"] + atlases_dir, atlas, template, subj_data["mask"] ) gc_log.info(f"Generate subject level connectomes: sub-{subject}") From b2dac1d55723e2e3b6708db5866516c30d96f044 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Fri, 17 May 2024 14:47:11 -0400 Subject: [PATCH 05/15] FIX test should reflect new changw --- giga_connectome/tests/test_atlas.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/giga_connectome/tests/test_atlas.py b/giga_connectome/tests/test_atlas.py index 62c494f..d2ee8e3 100644 --- a/giga_connectome/tests/test_atlas.py +++ b/giga_connectome/tests/test_atlas.py @@ -2,7 +2,7 @@ def test_load_atlas_setting(): - atlas_config = load_atlas_setting("Schaefer20187Networks") - assert atlas_config["name"] == "Schaefer20187Networks" + atlas_config = load_atlas_setting("Schaefer2018") + assert atlas_config["name"] == "Schaefer2018" atlas_config = load_atlas_setting("HarvardOxfordCortical") assert atlas_config["name"] == "HarvardOxfordCortical" From c77bc007196611b279d73ec6fb49e701854a6fcc Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Fri, 17 May 2024 15:31:27 -0400 Subject: [PATCH 06/15] Update documentations related to introducing seg entity --- .github/workflows/docker.yml | 4 +- docs/source/changes.md | 6 +- docs/source/outputs.md | 145 ++++++++++++++++++++---------- docs/source/usage.md | 12 +-- giga_connectome/tests/test_cli.py | 2 +- 5 files changed, 111 insertions(+), 58 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index f60a0f2..769a310 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -92,12 +92,12 @@ jobs: docker run --rm \ -v ${{ env.DATA }}:/test_data \ -v ./outputs:/outputs \ - -v ./outputs/working_dir:/work \ + -v ./outputs/atlases:/atlases \ ${{env.USER_NAME}}/${{env.REPO_NAME}} \ /test_data/ds000017-fmriprep22.0.1-downsampled-nosurface \ /outputs \ participant \ - -w /work \ + -a /atlases \ --atlas ${{ matrix.atlas }} \ --participant_label 1 \ --reindex-bids diff --git a/docs/source/changes.md b/docs/source/changes.md index e7a2b54..3f3e9e4 100644 --- a/docs/source/changes.md +++ b/docs/source/changes.md @@ -1,11 +1,12 @@ # What’s new -## 0.5.1.dev +## 0.6.0.dev **Released MONTH YEAR** ### New +- [EHN] `--work-dir` is now renamed to `--atlases-dir` - [EHN] Add details of denoising strategy to the meta data of the time series extraction. (@htwangtw) [#144](https://github.com/bids-apps/giga_connectome/issues/144) ### Fixes @@ -17,6 +18,9 @@ ### Changes +- [EHN] Merge `atlas-` and the atlas description `desc-` into one filed `seg-` defined under 'Derivatives-Image data type' in BIDS. (@htwangtw) [#143](https://github.com/bids-apps/giga_connectome/issues/143) +- [EHN] Working directory is now renamed as `atlases/` to reflect on the atlases directory mentioned in BEP017. + ## 0.5.0 Released April 2024 diff --git a/docs/source/outputs.md b/docs/source/outputs.md index 33fe34a..8248f65 100644 --- a/docs/source/outputs.md +++ b/docs/source/outputs.md @@ -16,12 +16,12 @@ the output will be save in `sub-/[ses-]/func`. For each input image (that is, preprocessed BOLD time series) and each atlas the following data files will be generated -- a `[matches]_atlas-{atlas}_meas-PearsonCorrelation_desc-{atlas_description}{denoise_strategy}_relmat.tsv` +- a `[matches]_seg-{atlas}{atlas_description}_meas-PearsonCorrelation_desc-denoise{denoise_strategy}_relmat.tsv` file that contains the correlation matrix between all the regions of the atlas -- a `[matches]_atlas-{atlas}_desc-{atlas description}{denoise_strategy}_timeseries.tsv` +- a `[matches]_seg-{atlas}{atlas_description}_desc-denoise{denoise_strategy}_timeseries.tsv` file that contains the extracted timeseries for each region of the atlas -- `{atlas}` refers to the name of the atlas used (for example, `Schaefer20187Networks`) +- `{atlas}` refers to the name of the atlas used (for example, `Schaefer2018`) - `{atlas_description}` refers to the sub type of atlas used (for example, `100Parcels7Networks`) - `{denoise_strategy}` refers to the denoise strategy passed to the command line @@ -31,7 +31,7 @@ A JSON file is generated in the root of the output dataset (`meas-PearsonCorrela that contains metadata applicable to all `relmat.tsv` files. For each input image (that is, preprocessed BOLD time series) -a `[matches]_atlas-{atlas}_timeseries.json` +a `[matches]_desc-denoise{denoise_strategy}_timeseries.json` ### Example @@ -43,51 +43,100 @@ a `[matches]_atlas-{atlas}_timeseries.json` ├── sub-1 │   ├── ses-timepoint1 │   │   └── func -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-100Parcels7NetworksSimple_timeseries.tsv -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-200Parcels7NetworksSimple_timeseries.tsv -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-100Parcels7NetworksSimple_relmat.tsv -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-200Parcels7NetworksSimple_relmat.tsv -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_timeseries.json -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-100Parcels7NetworksSimple_timeseries.tsv -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-200Parcels7NetworksSimple_timeseries.tsv -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-100Parcels7NetworksSimple_relmat.tsv -│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-200Parcels7NetworksSimple_relmat.tsv -│   │   └── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_timeseries.json +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_desc-denoiseSimple_timeseries.json +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_report.html +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_timeseries.tsv +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_report.html +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_timeseries.tsv +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_desc-denoiseSimple_timeseries.json +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_report.html +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_timeseries.tsv +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_report.html +│   │   ├── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_timeseries.tsv +│   │   └── sub-1_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv │   └── ses-timepoint2 │   └── func -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-100Parcels7NetworksSimple_timeseries.tsv -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-200Parcels7NetworksSimple_timeseries.tsv -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-100Parcels7NetworksSimple_relmat.tsv -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-200Parcels7NetworksSimple_relmat.tsv -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_timeseries.json -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-100Parcels7NetworksSimple_timeseries.tsv -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-200Parcels7NetworksSimple_timeseries.tsv -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-100Parcels7NetworksSimple_relmat.tsv -│   ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-200Parcels7NetworksSimple_relmat.tsv -│   └── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_timeseries.json +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_desc-denoiseSimple_timeseries.json +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_report.html +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_timeseries.tsv +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_report.html +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_timeseries.tsv +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_desc-denoiseSimple_timeseries.json +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_report.html +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_timeseries.tsv +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_report.html +│      ├── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_timeseries.tsv +│      └── sub-1_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv └── sub-2 - ├── ses-timepoint1 - │   └── func - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-100Parcels7NetworksSimple_timeseries.tsv - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-200Parcels7NetworksSimple_timeseries.tsv - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-100Parcels7NetworksSimple_relmat.tsv - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-200Parcels7NetworksSimple_relmat.tsv - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_timeseries.json - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-100Parcels7NetworksSimple_timeseries.tsv - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-200Parcels7NetworksSimple_timeseries.tsv - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-100Parcels7NetworksSimple_relmat.tsv - │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-200Parcels7NetworksSimple_relmat.tsv - │   └── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_timeseries.json - └── ses-timepoint2 - └── func - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-100Parcels7NetworksSimple_timeseries.tsv - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-200Parcels7NetworksSimple_timeseries.tsv - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-100Parcels7NetworksSimple_relmat.tsv - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-200Parcels7NetworksSimple_relmat.tsv - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_timeseries.json - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-100Parcels7NetworksSimple_timeseries.tsv - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_desc-200Parcels7NetworksSimple_timeseries.tsv - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-100Parcels7NetworksSimple_relmat.tsv - ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_meas-PearsonCorrelation_desc-200Parcels7NetworksSimple_relmat.tsv - └── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_space-MNI152NLin2009cAsym_res-2_atlas-Schaefer20187Networks_timeseries.json +    ├── ses-timepoint1 +    │   └── func +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_desc-denoiseSimple_timeseries.json +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_report.html +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_timeseries.tsv +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_report.html +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_timeseries.tsv +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_desc-denoiseSimple_timeseries.json +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_report.html +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_timeseries.tsv +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_report.html +    │   ├── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_timeseries.tsv +    │   └── sub-2_ses-timepoint1_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +    └── ses-timepoint2 +    └── func +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_desc-denoiseSimple_timeseries.json +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_report.html +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_timeseries.tsv +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018100Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_report.html +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_timeseries.tsv +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-01_seg-Schaefer2018200Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_desc-denoiseSimple_timeseries.json +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_report.html +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_desc-denoiseSimple_timeseries.tsv +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018100Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_report.html +       ├── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_desc-denoiseSimple_timeseries.tsv +       └── sub-2_ses-timepoint2_task-probabilisticclassification_run-02_seg-Schaefer2018200Parcels7Networks_meas-PearsonCorrelation_desc-denoiseSimple_relmat.tsv +``` + + +## Atlases + +The merged grey matter masks per subject and the atlases resampled to the individual EPI data are in the directory specified at `--atlases_dir`. + +For each subject and each atlas the following data files will be generated + +- a `sub-_space-MNI152NLin2009cAsym_res-2_label-GM_mask.nii.gz` + Grey matter mask in the dedicated space for a given subject, + created from merging all the EPI brain masks of a given subject, + and converges with the grey matter mask of the given space. +- `sub-_seg-{atlas}{atlas_description}_[dseg|probseg].nii.gz` + files where the atlas were sampled to `sub-_space-MNI152NLin2009cAsym_res-2_label-GM_mask.nii.gz` + for each individual. + +- `{atlas}` refers to the name of the atlas used (for example, `Schaefer2018`) +- `{atlas_description}` refers to the sub type of atlas used (for example, `100Parcels7Networks`) + +### Example + +``` +└── sub-1 + └── func + ├── sub-1_seg-Schaefer2018100Parcels7Networks_dseg.nii.gz + ├── sub-1_seg-Schaefer2018200Parcels7Networks_dseg.nii.gz + ├── sub-1_seg-Schaefer2018300Parcels7Networks_dseg.nii.gz + ├── sub-1_seg-Schaefer2018400Parcels7Networks_dseg.nii.gz + ├── sub-1_seg-Schaefer2018500Parcels7Networks_dseg.nii.gz + ├── sub-1_seg-Schaefer2018600Parcels7Networks_dseg.nii.gz + ├── sub-1_seg-Schaefer2018800Parcels7Networks_dseg.nii.gz + └── sub-1_space-MNI152NLin2009cAsym_res-2_label-GM_mask.nii.gz ``` diff --git a/docs/source/usage.md b/docs/source/usage.md index 93843f1..a08eec1 100644 --- a/docs/source/usage.md +++ b/docs/source/usage.md @@ -47,7 +47,7 @@ An example using Apptainer (formerly known as Singularity): ```bash FMRIPREP_DIR=/path/to/fmriprep_output OUTPUT_DIR=/path/to/connectom_output -WORKING_DIR=/path/to/working_dir +ATLASES_DIR=/path/to/atlases DENOISE_CONFIG=/path/to/denoise_config.json GIGA_CONNECTOME=/path/to/giga-connectome.simg @@ -55,10 +55,10 @@ GIGA_CONNECTOME=/path/to/giga-connectome.simg apptainer run \ --bind ${FMRIPREP_DIR}:/data/input \ --bind ${OUTPUT_DIR}:/data/output \ - --bind ${WORKING_DIR}:/data/working \ + --bind ${ATLASES_DIR}:/data/atlases \ --bind ${DENOISE_CONFIG}:/data/denoise_config.json \ ${GIGA_CONNECTOME} \ - -w /data/working \ + -a /data/atlases \ --denoise-strategy /data/denoise_config.json \ /data/input \ /data/output \ @@ -128,7 +128,7 @@ An example using Apptainer (formerly known as Singularity): ```bash FMRIPREP_DIR=/path/to/fmriprep_output OUTPUT_DIR=/path/to/connectom_output -WORKING_DIR=/path/to/working_dir +ATLASES_DIR=/path/to/atlases ATLAS_CONFIG=/path/to/atlas_config.json GIGA_CONNECTOME=/path/to/giga-connectome.simg @@ -138,10 +138,10 @@ export APPTAINERENV_TEMPLATEFLOW_HOME=/data/atlas apptainer run \ --bind ${FMRIPREP_DIR}:/data/input \ --bind ${OUTPUT_DIR}:/data/output \ - --bind ${WORKING_DIR}:/data/working \ + --bind ${ATLASES_DIR}:/data/atlases \ --bind ${ATLAS_CONFIG}:/data/atlas_config.json \ ${GIGA_CONNECTOME} \ - -w /data/working \ + -s /data/atlases \ --atlas /data/atlas_config.json \ /data/input \ /data/output \ diff --git a/giga_connectome/tests/test_cli.py b/giga_connectome/tests/test_cli.py index f19ec7e..4d10879 100644 --- a/giga_connectome/tests/test_cli.py +++ b/giga_connectome/tests/test_cli.py @@ -48,7 +48,7 @@ def test_smoke(tmp_path, capsys): [ "--participant_label", "1", - "-w", + "-a", str(atlases_dir), "--atlas", "Schaefer2018", From acc0c11bf827a6a9daf94b4a94f816a9c9e541a2 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Fri, 17 May 2024 15:36:23 -0400 Subject: [PATCH 07/15] Reflect the change of Schaefer atlas name in github workflow --- .github/workflows/docker.yml | 2 +- docs/source/changes.md | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 769a310..af98f52 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -71,7 +71,7 @@ jobs: needs: [download-test-data, docker-build] strategy: matrix: - atlas: ['Schaefer20187Networks', 'MIST', 'DiFuMo', 'HarvardOxfordCortical', 'HarvardOxfordCorticalSymmetricSplit', 'HarvardOxfordSubcortical'] + atlas: ['Schaefer2018', 'MIST', 'DiFuMo', 'HarvardOxfordCortical', 'HarvardOxfordCorticalSymmetricSplit', 'HarvardOxfordSubcortical'] steps: - uses: actions/checkout@v4 with: diff --git a/docs/source/changes.md b/docs/source/changes.md index 3f3e9e4..f2a94eb 100644 --- a/docs/source/changes.md +++ b/docs/source/changes.md @@ -6,7 +6,8 @@ ### New -- [EHN] `--work-dir` is now renamed to `--atlases-dir` +- [EHN] Default atlas `Schaefer20187Networks` is renamed to `Schaefer2018`. (@htwangtw) +- [EHN] `--work-dir` is now renamed to `--atlases-dir`. (@htwangtw) - [EHN] Add details of denoising strategy to the meta data of the time series extraction. (@htwangtw) [#144](https://github.com/bids-apps/giga_connectome/issues/144) ### Fixes From 13dbd3090a5f417b9f76c652bea1455e9cbb5dec Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Tue, 28 May 2024 15:56:27 -0400 Subject: [PATCH 08/15] [full_test] test deprication --- giga_connectome/run.py | 25 +++++++++++++++++++++++++ giga_connectome/tests/test_cli.py | 5 +++++ 2 files changed, 30 insertions(+) diff --git a/giga_connectome/run.py b/giga_connectome/run.py index d7cc36e..1b1c150 100644 --- a/giga_connectome/run.py +++ b/giga_connectome/run.py @@ -7,8 +7,27 @@ from giga_connectome import __version__ from giga_connectome.workflow import workflow from giga_connectome.atlas import get_atlas_labels +from giga_connectome.logger import gc_logger + +gc_log = gc_logger() preset_atlas = get_atlas_labels() +deprecations = { + # parser attribute name: (replacement flag, version slated to be removed in) + 'work-dir': ('--atlases-dir', '0.7.0'), +} + +class DeprecatedAction(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + new_opt, rem_vers = deprecations.get(self.dest, (None, None)) + msg = ( + f"{self.option_strings} has been deprecated and will be removed in " + f"{rem_vers or 'a later version'}." + ) + if new_opt: + msg += f' Please use `{new_opt}` instead.' + gc_log.warning(msg) + delattr(namespace, self.dest) def global_parser() -> argparse.ArgumentParser: @@ -58,6 +77,12 @@ def global_parser() -> argparse.ArgumentParser: default=Path("atlases").absolute(), help="Path where subject specific segmentations are stored.", ) + parser.add_argument( + "-w", + "--work-dir", + action=DeprecatedAction, + help="This argument is deprecated. Please use --atlas-dir instead.", + ) parser.add_argument( "--atlas", help="The choice of atlas for time series extraction. Default atlas " diff --git a/giga_connectome/tests/test_cli.py b/giga_connectome/tests/test_cli.py index 4d10879..86d6cfc 100644 --- a/giga_connectome/tests/test_cli.py +++ b/giga_connectome/tests/test_cli.py @@ -40,6 +40,7 @@ def test_smoke(tmp_path, capsys): ) output_dir = tmp_path / "output" atlases_dir = tmp_path / "atlases" + work_dir = tmp_path / "work" if not Path(output_dir).exists: Path(output_dir).mkdir() @@ -48,6 +49,8 @@ def test_smoke(tmp_path, capsys): [ "--participant_label", "1", + "-w", + str(work_dir), "-a", str(atlases_dir), "--atlas", @@ -61,6 +64,8 @@ def test_smoke(tmp_path, capsys): "participant", ] ) + captured = capsys.readouterr() + assert "has been deprecated" in captured.out output_folder = output_dir / "sub-1" / "ses-timepoint1" / "func" From 70f9ddc87905a85ea566315c682a21dd8387a047 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Tue, 28 May 2024 16:13:12 -0400 Subject: [PATCH 09/15] FIX type of the new deprication action class --- giga_connectome/run.py | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/giga_connectome/run.py b/giga_connectome/run.py index 1b1c150..156594e 100644 --- a/giga_connectome/run.py +++ b/giga_connectome/run.py @@ -1,5 +1,5 @@ from __future__ import annotations - +from typing import Any import argparse from pathlib import Path from typing import Sequence @@ -13,19 +13,27 @@ preset_atlas = get_atlas_labels() deprecations = { - # parser attribute name: (replacement flag, version slated to be removed in) - 'work-dir': ('--atlases-dir', '0.7.0'), + # parser attribute name: + # (replacement flag, version slated to be removed in) + "work-dir": ("--atlases-dir", "0.7.0"), } + class DeprecatedAction(argparse.Action): - def __call__(self, parser, namespace, values, option_string=None): + def __call__( + self, + parser: argparse.ArgumentParser, + namespace: argparse.Namespace, + values: str | Sequence[Any] | None, + option_string: str | None = None, + ) -> None: new_opt, rem_vers = deprecations.get(self.dest, (None, None)) msg = ( - f"{self.option_strings} has been deprecated and will be removed in " - f"{rem_vers or 'a later version'}." + f"{self.option_strings} has been deprecated and will be removed " + f"in {rem_vers or 'a later version'}." ) if new_opt: - msg += f' Please use `{new_opt}` instead.' + msg += f" Please use `{new_opt}` instead." gc_log.warning(msg) delattr(namespace, self.dest) From 04f06691ae4a65414def75500ff642d52647b72a Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Tue, 28 May 2024 16:36:36 -0400 Subject: [PATCH 10/15] [full_test] deprecation warning for atlas name --- giga_connectome/atlas.py | 18 ++++++++++++++++++ giga_connectome/tests/test_cli.py | 4 ++-- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/giga_connectome/atlas.py b/giga_connectome/atlas.py index 2cd5d13..7968c53 100644 --- a/giga_connectome/atlas.py +++ b/giga_connectome/atlas.py @@ -30,6 +30,11 @@ {"name": str, "file_paths": Dict[str, List[Path]], "type": str}, ) +deprecations = { + # parser attribute name: + # (replacement, version slated to be removed in) + "Schaefer20187Networks": ("Schaefer2018", "0.7.0"), +} def load_atlas_setting( atlas: str | Path | dict[str, Any], @@ -171,9 +176,18 @@ def _check_altas_config( KeyError atlas configuration not containing the correct keys. """ + if atlas in deprecations: + new_name, version = deprecations[atlas] + gc_log.warning( + f"{atlas} has been deprecated and will be removed in " + f"{version}. Please use {new_name} instead." + ) + atlas = new_name + # load the file first if the input is not already a dictionary atlas_dir = resource_filename("giga_connectome", "data/atlas") preset_atlas = [p.stem for p in Path(atlas_dir).glob("*.json")] + if isinstance(atlas, (str, Path)): if atlas in preset_atlas: config_path = Path( @@ -183,6 +197,10 @@ def _check_altas_config( ) elif Path(atlas).exists(): config_path = Path(atlas) + else: + raise FileNotFoundError( + f"Atlas configuration file {atlas} not found." + ) with open(config_path, "r") as file: atlas_config = json.load(file) diff --git a/giga_connectome/tests/test_cli.py b/giga_connectome/tests/test_cli.py index 86d6cfc..ffc5569 100644 --- a/giga_connectome/tests/test_cli.py +++ b/giga_connectome/tests/test_cli.py @@ -54,7 +54,7 @@ def test_smoke(tmp_path, capsys): "-a", str(atlases_dir), "--atlas", - "Schaefer2018", + "Schaefer20187Networks", # use Schaefer2018 when updating 0.7.0 "--denoise-strategy", "simple", "--reindex-bids", @@ -65,7 +65,7 @@ def test_smoke(tmp_path, capsys): ] ) captured = capsys.readouterr() - assert "has been deprecated" in captured.out + assert "has been deprecated" in captured.out.split()[0] output_folder = output_dir / "sub-1" / "ses-timepoint1" / "func" From 55363fed73bd726bb01420ee9475e481760de6a8 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 28 May 2024 20:36:52 +0000 Subject: [PATCH 11/15] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- giga_connectome/atlas.py | 1 + 1 file changed, 1 insertion(+) diff --git a/giga_connectome/atlas.py b/giga_connectome/atlas.py index 7968c53..c745787 100644 --- a/giga_connectome/atlas.py +++ b/giga_connectome/atlas.py @@ -36,6 +36,7 @@ "Schaefer20187Networks": ("Schaefer2018", "0.7.0"), } + def load_atlas_setting( atlas: str | Path | dict[str, Any], ) -> ATLAS_SETTING_TYPE: From 63aa083716130d8d2ac134f971efa2b193bbb923 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Tue, 28 May 2024 16:40:09 -0400 Subject: [PATCH 12/15] [full_test] check type of potentially depricated atlas input --- giga_connectome/atlas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/giga_connectome/atlas.py b/giga_connectome/atlas.py index c745787..106f663 100644 --- a/giga_connectome/atlas.py +++ b/giga_connectome/atlas.py @@ -177,7 +177,7 @@ def _check_altas_config( KeyError atlas configuration not containing the correct keys. """ - if atlas in deprecations: + if isinstance(atlas, str) and atlas in deprecations: new_name, version = deprecations[atlas] gc_log.warning( f"{atlas} has been deprecated and will be removed in " From a6f218b3aa4a6b78fbd060f0b5da3b3f19db4abd Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Tue, 28 May 2024 17:17:20 -0400 Subject: [PATCH 13/15] [full_test] TEST capture log correctly --- giga_connectome/tests/test_atlas.py | 5 ++++- giga_connectome/tests/test_cli.py | 7 +++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/giga_connectome/tests/test_atlas.py b/giga_connectome/tests/test_atlas.py index d2ee8e3..be1b66b 100644 --- a/giga_connectome/tests/test_atlas.py +++ b/giga_connectome/tests/test_atlas.py @@ -1,7 +1,10 @@ from giga_connectome.atlas import load_atlas_setting -def test_load_atlas_setting(): +def test_load_atlas_setting(capsys): + # use Schaefer2018 when updating 0.7.0 + atlas_config = load_atlas_setting("Schaefer20187Networks") + assert atlas_config["name"] == "Schaefer2018" atlas_config = load_atlas_setting("Schaefer2018") assert atlas_config["name"] == "Schaefer2018" atlas_config = load_atlas_setting("HarvardOxfordCortical") diff --git a/giga_connectome/tests/test_cli.py b/giga_connectome/tests/test_cli.py index ffc5569..7956c72 100644 --- a/giga_connectome/tests/test_cli.py +++ b/giga_connectome/tests/test_cli.py @@ -33,7 +33,7 @@ def test_help(capsys): @pytest.mark.smoke -def test_smoke(tmp_path, capsys): +def test_smoke(tmp_path, caplog): bids_dir = resource_filename( "giga_connectome", "data/test_data/ds000017-fmriprep22.0.1-downsampled-nosurface", @@ -54,7 +54,7 @@ def test_smoke(tmp_path, capsys): "-a", str(atlases_dir), "--atlas", - "Schaefer20187Networks", # use Schaefer2018 when updating 0.7.0 + "Schaefer2018", "--denoise-strategy", "simple", "--reindex-bids", @@ -64,8 +64,7 @@ def test_smoke(tmp_path, capsys): "participant", ] ) - captured = capsys.readouterr() - assert "has been deprecated" in captured.out.split()[0] + assert "has been deprecated" in caplog.text.splitlines()[0] output_folder = output_dir / "sub-1" / "ses-timepoint1" / "func" From 3b388142fdfd40f9fd3f76c8b993434cc5021cd0 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Tue, 28 May 2024 17:38:03 -0400 Subject: [PATCH 14/15] add deprication related details to doc --- docs/source/changes.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/changes.md b/docs/source/changes.md index f2a94eb..6fc2728 100644 --- a/docs/source/changes.md +++ b/docs/source/changes.md @@ -6,8 +6,8 @@ ### New -- [EHN] Default atlas `Schaefer20187Networks` is renamed to `Schaefer2018`. (@htwangtw) -- [EHN] `--work-dir` is now renamed to `--atlases-dir`. (@htwangtw) +- [EHN] Default atlas `Schaefer20187Networks` is renamed to `Schaefer2018`. `Schaefer20187Networks` will be deprecated ub 0.7.0. (@htwangtw) +- [EHN] `--work-dir` is now renamed to `--atlases-dir`. `--work-dir` will be deprecated ub 0.7.0. (@htwangtw) - [EHN] Add details of denoising strategy to the meta data of the time series extraction. (@htwangtw) [#144](https://github.com/bids-apps/giga_connectome/issues/144) ### Fixes From 18c1384eefbdb9879d515e39a30d858991452c21 Mon Sep 17 00:00:00 2001 From: Hao-Ting Wang Date: Tue, 28 May 2024 17:47:43 -0400 Subject: [PATCH 15/15] TEST improve coverage --- giga_connectome/tests/test_atlas.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/giga_connectome/tests/test_atlas.py b/giga_connectome/tests/test_atlas.py index be1b66b..572d8cd 100644 --- a/giga_connectome/tests/test_atlas.py +++ b/giga_connectome/tests/test_atlas.py @@ -1,7 +1,9 @@ from giga_connectome.atlas import load_atlas_setting +import pytest +from pkg_resources import resource_filename -def test_load_atlas_setting(capsys): +def test_load_atlas_setting(): # use Schaefer2018 when updating 0.7.0 atlas_config = load_atlas_setting("Schaefer20187Networks") assert atlas_config["name"] == "Schaefer2018" @@ -9,3 +11,7 @@ def test_load_atlas_setting(capsys): assert atlas_config["name"] == "Schaefer2018" atlas_config = load_atlas_setting("HarvardOxfordCortical") assert atlas_config["name"] == "HarvardOxfordCortical" + pytest.raises(FileNotFoundError, load_atlas_setting, "blah") + json_path = resource_filename("giga_connectome", "data/atlas/DiFuMo.json") + atlas_config = load_atlas_setting(json_path) + assert atlas_config["name"] == "DiFuMo"