Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

EHN save denoising details into meta data #145

Merged
merged 6 commits into from
May 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions docs/source/changes.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,13 @@

### New

- [EHN] Add details of denoising strategy to the meta data of the time series extraction. (@htwangtw) [#144](https://github.com/bids-apps/giga_connectome/issues/144)

### Fixes

- [MAINT] Remove recurrsive import. (@htwangtw) [#135](https://github.com/bids-apps/giga_connectome/issues/135)
- [DOCS] Remove`meas` entity in timeseries outputs in the documentation. (@htwangtw) [#136](https://github.com/bids-apps/giga_connectome/issues/136)

### Enhancements

### Changes
Expand Down
2 changes: 1 addition & 1 deletion docs/source/outputs.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ and each atlas the following data files will be generated

- a `[matches]_atlas-{atlas}_meas-PearsonCorrelation_desc-{atlas_description}{denoise_strategy}_relmat.tsv`
file that contains the correlation matrix between all the regions of the atlas
- a `[matches]_atlas-{atlas}_meas-PearsonCorrelation_desc-{atlas description}{denoise_strategy}_timeseries.tsv`
- a `[matches]_atlas-{atlas}_desc-{atlas description}{denoise_strategy}_timeseries.tsv`
file that contains the extracted timeseries for each region of the atlas

- `{atlas}` refers to the name of the atlas used (for example, `Schaefer20187Networks`)
Expand Down
8 changes: 0 additions & 8 deletions giga_connectome/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,9 @@
except ImportError:
pass

from .atlas import load_atlas_setting
from .denoise import get_denoise_strategy
from .mask import generate_gm_mask_atlas
from .postprocess import run_postprocessing_dataset

__all__ = [
"__copyright__",
"__packagename__",
"__version__",
"generate_gm_mask_atlas",
"load_atlas_setting",
"run_postprocessing_dataset",
"get_denoise_strategy",
]
67 changes: 65 additions & 2 deletions giga_connectome/denoise.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import pandas as pd
from nibabel import Nifti1Image
from nilearn.interfaces import fmriprep
from nilearn.interfaces.fmriprep import load_confounds_utils as lc_utils
from nilearn.maskers import NiftiMasker
from pkg_resources import resource_filename

Expand Down Expand Up @@ -42,6 +43,17 @@
},
)

METADATA_TYPE = TypedDict(
"METADATA_TYPE",
{
"ConfoundRegressors": List[str],
"NumberOfVolumesDiscardedByMotionScrubbing": int,
"NumberOfVolumesDiscardedByNonsteadyStatesDetector": int,
"MeanFramewiseDisplacement": float,
"SamplingFrequency": float,
},
)


def get_denoise_strategy(
strategy: str,
Expand Down Expand Up @@ -105,13 +117,65 @@ def is_ica_aroma(strategy: STRATEGY_TYPE) -> bool:
raise ValueError(f"Invalid input dictionary. {strategy['parameters']}")


def denoise_meta_data(strategy: STRATEGY_TYPE, img: str) -> METADATA_TYPE:
"""Get metadata of the denoising process.

Including: column names of the confound regressors, number of
volumes discarded by motion scrubbing, number of volumes discarded
by non-steady states detector, mean framewise displacement and
place holder for sampling frequency (1/TR).

Parameters
----------
strategy : dict
Denoising strategy parameter to pass to load_confounds_strategy
or load_confounds.
img : str
Path to the nifti image to denoise.

Returns
-------
dict
Metadata of the denoising process.
"""
cf, sm = strategy["function"](img, **strategy["parameters"])
cf_file = lc_utils.get_confounds_file(img, flag_full_aroma=False)
framewise_displacement = pd.read_csv(cf_file, sep="\t")[
"framewise_displacement"
]
mean_fd = np.mean(framewise_displacement)
# get non steady state volumes
_, sample_mask_non_steady = fmriprep.load_confounds(
img, strategy=["high_pass"]
)
n_non_steady = (
cf.shape[0] - sample_mask_non_steady.shape[0]
if sample_mask_non_steady is not None
else 0
)
n_scrub = 0
if "scrubbing" in strategy["parameters"].get(
"denoise_strategy", ""
) or "srub" in strategy["parameters"].get("strategy", []):
n_scrub = cf.shape[0] - sm.shape[0] - n_non_steady

meta_data: METADATA_TYPE = {
"ConfoundRegressors": cf.columns.tolist(),
"NumberOfVolumesDiscardedByMotionScrubbing": n_scrub,
"NumberOfVolumesDiscardedByNonsteadyStatesDetector": n_non_steady,
"MeanFramewiseDisplacement": mean_fd,
"SamplingFrequency": np.nan, # place holder
}
return meta_data


def denoise_nifti_voxel(
strategy: STRATEGY_TYPE,
group_mask: str | Path,
standardize: bool,
smoothing_fwhm: float,
img: str,
) -> Nifti1Image:
) -> Nifti1Image | None:
"""Denoise voxel level data per nifti image.

Parameters
Expand Down Expand Up @@ -146,7 +210,6 @@ def denoise_nifti_voxel(
standardize=standardize,
smoothing_fwhm=smoothing_fwhm,
)

time_series_voxel = group_masker.fit_transform(
img, confounds=cf, sample_mask=sm
)
Expand Down
18 changes: 12 additions & 6 deletions giga_connectome/postprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,11 @@
from giga_connectome import utils
from giga_connectome.atlas import ATLAS_SETTING_TYPE
from giga_connectome.connectome import generate_timeseries_connectomes
from giga_connectome.denoise import STRATEGY_TYPE, denoise_nifti_voxel
from giga_connectome.denoise import (
STRATEGY_TYPE,
denoise_nifti_voxel,
denoise_meta_data,
)
from giga_connectome.logger import gc_logger
from giga_connectome.utils import progress_bar

Expand Down Expand Up @@ -122,6 +126,7 @@ def run_postprocessing_dataset(
denoised_img = denoise_nifti_voxel(
strategy, group_mask, standardize, smoothing_fwhm, img.path
)

# parse file name
subject, session, specifier = utils.parse_bids_name(img.path)

Expand All @@ -141,12 +146,13 @@ def run_postprocessing_dataset(
extension="json",
)
utils.check_path(json_filename)
with open(json_filename, "w") as f:
json.dump(
{"SamplingFrequency": 1 / img.entities["RepetitionTime"]},
f,
indent=4,
if denoised_img:
meta_data = denoise_meta_data(strategy, img.path)
meta_data["SamplingFrequency"] = (
1 / img.entities["RepetitionTime"]
)
with open(json_filename, "w") as f:
json.dump(meta_data, f, indent=4)

for desc, masker in atlas_maskers.items():

Expand Down
2 changes: 1 addition & 1 deletion giga_connectome/tests/test_atlas.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from giga_connectome import load_atlas_setting
from giga_connectome.atlas import load_atlas_setting


def test_load_atlas_setting():
Expand Down
36 changes: 36 additions & 0 deletions giga_connectome/tests/test_denoise.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
from giga_connectome.denoise import denoise_meta_data, get_denoise_strategy
from pkg_resources import resource_filename
from numpy import testing


def test_denoise_nifti_voxel():
img_file = resource_filename(
"giga_connectome",
"data/test_data/ds000017-fmriprep22.0.1-downsampled-nosurface/sub-1/ses-timepoint1/func/sub-1_ses-timepoint1_task-probabilisticclassification_run-01_space-MNI152NLin2009cAsym_res-2_desc-preproc_bold.nii.gz",
)
strategy = get_denoise_strategy("scrubbing.2")
meta_data = denoise_meta_data(
strategy=strategy,
img=img_file,
)

assert len(meta_data["ConfoundRegressors"]) == 36
assert meta_data["NumberOfVolumesDiscardedByMotionScrubbing"] == 12
assert meta_data["NumberOfVolumesDiscardedByNonsteadyStatesDetector"] == 2
testing.assert_almost_equal(
meta_data["MeanFramewiseDisplacement"], 0.107, decimal=3
)

strategy = get_denoise_strategy("simple")
meta_data = denoise_meta_data(
strategy=strategy,
img=img_file,
)

assert len(meta_data["ConfoundRegressors"]) == 30
assert meta_data["NumberOfVolumesDiscardedByMotionScrubbing"] == 0
assert meta_data["NumberOfVolumesDiscardedByNonsteadyStatesDetector"] == 2
testing.assert_almost_equal(
meta_data["MeanFramewiseDisplacement"], 0.107, decimal=3
)
print(type(meta_data["NumberOfVolumesDiscardedByMotionScrubbing"]))
14 changes: 6 additions & 8 deletions giga_connectome/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,12 @@

import argparse

from giga_connectome import (
generate_gm_mask_atlas,
get_denoise_strategy,
load_atlas_setting,
methods,
run_postprocessing_dataset,
utils,
)
from giga_connectome.mask import generate_gm_mask_atlas
from giga_connectome.atlas import load_atlas_setting
from giga_connectome.denoise import get_denoise_strategy
from giga_connectome import methods, utils
from giga_connectome.postprocess import run_postprocessing_dataset

from giga_connectome.denoise import is_ica_aroma
from giga_connectome.logger import gc_logger

Expand Down
Loading