Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[REF] Refactor and clean utils.input_files module #1311

Draft
wants to merge 13 commits into
base: dev
Choose a base branch
from
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,11 @@ def _build_input_node(self):
save_part_sess_long_ids_to_tsv,
)
from clinica.utils.exceptions import ClinicaException
from clinica.utils.input_files import T1_FS_DESTRIEUX, T1_FS_T_DESTRIEUX
from clinica.utils.input_files import (
Parcellation,
QueryPatternName,
query_pattern_factory,
)
from clinica.utils.inputs import (
clinica_file_reader,
format_clinica_file_reader_errors,
Expand Down Expand Up @@ -119,19 +123,26 @@ def _build_input_node(self):
) = extract_subject_session_longitudinal_ids_from_filename(
to_process_ids
)

pattern_segmentation = query_pattern_factory(
QueryPatternName.T1_FREESURFER_SEGMENTATION
)(Parcellation.DESTRIEUX)
_, errors_destrieux = clinica_file_reader(
self.subjects, self.sessions, self.caps_directory, T1_FS_DESTRIEUX
self.subjects, self.sessions, self.caps_directory, pattern_segmentation
)
pattern_template = query_pattern_factory(
QueryPatternName.T1_FREESURFER_TEMPLATE
)(Parcellation.DESTRIEUX)
_, errors_t_destrieux = clinica_file_reader(
self.subjects, list_long_id, self.caps_directory, T1_FS_T_DESTRIEUX
self.subjects, list_long_id, self.caps_directory, pattern_template
)
all_errors = [errors_destrieux, errors_t_destrieux]

if any(all_errors):
message = "Clinica faced errors while trying to read files in your CAPS directory.\n"
for error, info in zip(all_errors, [T1_FS_DESTRIEUX, T1_FS_T_DESTRIEUX]):
message += format_clinica_file_reader_errors(error, info)
for error, pattern in zip(
all_errors, [pattern_segmentation, pattern_template]
):
message += format_clinica_file_reader_errors(error, pattern)
raise ClinicaException(message)

save_part_sess_long_ids_to_tsv(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@
from typing import List

from clinica.pipelines.engine import Pipeline
from clinica.utils.input_files import (
Parcellation,
QueryPatternName,
query_pattern_factory,
)


class T1FreeSurferTemplate(Pipeline):
Expand All @@ -17,7 +22,6 @@ def get_processed_images(
) -> List[str]:
import re

from clinica.utils.input_files import T1_FS_T_DESTRIEUX
from clinica.utils.inputs import clinica_file_reader
from clinica.utils.longitudinal import get_long_id
from clinica.utils.participant import get_unique_subjects
Expand All @@ -28,11 +32,13 @@ def get_processed_images(
list_long_id = [
get_long_id(list_session_ids) for list_session_ids in list_list_session_ids
]

image_ids: List[str] = []
if caps_directory.is_dir():
pattern = query_pattern_factory(QueryPatternName.T1_FREESURFER_TEMPLATE)(
Parcellation.DESTRIEUX
)
t1_freesurfer_files, _ = clinica_file_reader(
list_participant_id, list_long_id, caps_directory, T1_FS_T_DESTRIEUX
list_participant_id, list_long_id, caps_directory, pattern
)
image_ids = [
re.search(r"(sub-[a-zA-Z0-9]+)_(long-[a-zA-Z0-9]+)", file).group()
Expand Down Expand Up @@ -88,9 +94,7 @@ def _build_input_node(self):
from clinica.pipelines.anatomical.freesurfer.longitudinal.utils import (
save_part_sess_long_ids_to_tsv,
)
from clinica.utils.exceptions import ClinicaCAPSError, ClinicaException
from clinica.utils.filemanip import extract_subjects_sessions_from_filename
from clinica.utils.input_files import T1_FS_DESTRIEUX
from clinica.utils.inputs import clinica_file_filter
from clinica.utils.longitudinal import (
get_long_id,
Expand Down Expand Up @@ -149,11 +153,12 @@ def _build_input_node(self):
self.subjects, self.sessions = extract_subjects_sessions_from_filename(
to_process_ids
)

pattern = query_pattern_factory(QueryPatternName.T1_FREESURFER_SEGMENTATION)(
Parcellation.DESTRIEUX
)
_, self.subjects, self.sessions = clinica_file_filter(
self.subjects, self.sessions, self.caps_directory, T1_FS_DESTRIEUX
self.subjects, self.sessions, self.caps_directory, pattern
)

long_ids = get_participants_long_id(self.subjects, self.sessions)
save_part_sess_long_ids_to_tsv(
self.subjects, self.sessions, long_ids, self.base_dir / self.name
Expand Down
17 changes: 13 additions & 4 deletions clinica/pipelines/anatomical/freesurfer/t1/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from nipype import config

from clinica.pipelines.engine import Pipeline
from clinica.utils.input_files import QueryPattern

cfg = dict(execution={"parameterize_dirs": False})
config.update_config(cfg)
Expand All @@ -21,13 +22,20 @@ def get_processed_images(
caps_directory: Path, subjects: List[str], sessions: List[str]
) -> List[str]:
from clinica.utils.filemanip import extract_image_ids
from clinica.utils.input_files import T1_FS_DESTRIEUX
from clinica.utils.input_files import (
Parcellation,
QueryPatternName,
query_pattern_factory,
)
from clinica.utils.inputs import clinica_file_reader

image_ids: List[str] = []
if caps_directory.is_dir():
pattern = query_pattern_factory(
QueryPatternName.T1_FREESURFER_SEGMENTATION
)(Parcellation.DESTRIEUX)
t1_freesurfer_files, _ = clinica_file_reader(
subjects, sessions, caps_directory, T1_FS_DESTRIEUX
subjects, sessions, caps_directory, pattern
)
image_ids = extract_image_ids(t1_freesurfer_files)
return image_ids
Expand Down Expand Up @@ -96,7 +104,7 @@ def _build_input_node(self):
extract_subjects_sessions_from_filename,
save_participants_sessions,
)
from clinica.utils.input_files import T1W_NII
from clinica.utils.input_files import QueryPatternName, query_pattern_factory
from clinica.utils.inputs import clinica_file_filter
from clinica.utils.stream import cprint
from clinica.utils.ux import print_images_to_process
Expand Down Expand Up @@ -130,8 +138,9 @@ def _build_input_node(self):
to_process_ids
)

pattern = query_pattern_factory(QueryPatternName.T1W)()
t1w_files, self.subjects, self.sessions = clinica_file_filter(
self.subjects, self.sessions, self.bids_directory, T1W_NII
self.subjects, self.sessions, self.bids_directory, pattern
)

if not t1w_files:
Expand Down
44 changes: 27 additions & 17 deletions clinica/pipelines/dwi/connectome/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,31 +61,41 @@ def _build_input_node(self):
import nipype.interfaces.utility as nutil
import nipype.pipeline.engine as npe

import clinica.utils.input_files as input_files
from clinica.utils.exceptions import ClinicaCAPSError
from clinica.utils.filemanip import save_participants_sessions
from clinica.utils.input_files import (
DWIFileType,
Parcellation,
QueryPatternName,
query_pattern_factory,
)
from clinica.utils.inputs import clinica_list_of_files_reader
from clinica.utils.stream import cprint
from clinica.utils.ux import print_images_to_process

# Read CAPS files
patterns = [
query_pattern_factory(QueryPatternName.T1_FREESURFER_WHITE_MATTER)()
]
patterns.extend(
[
query_pattern_factory(QueryPatternName.T1_FREESURFER_SEGMENTATION)(p)
for p in (Parcellation.DESIKAN, Parcellation.DESTRIEUX)
]
)
patterns.append(query_pattern_factory(QueryPatternName.T1_FREESURFER_BRAIN)())
patterns.extend(
[
query_pattern_factory(QueryPatternName.DWI_PREPROC)(file_type)
for file_type in (DWIFileType.NII, DWIFileType.BVEC, DWIFileType.BVAL)
]
)
patterns.append(query_pattern_factory(QueryPatternName.DWI_PREPROC_BRAINMASK)())
list_caps_files = clinica_list_of_files_reader(
self.subjects,
self.sessions,
self.caps_directory,
[
# Inputs from t1-freesurfer pipeline
input_files.T1_FS_WM, # list_caps_files[0]
input_files.T1_FS_DESIKAN, # list_caps_files[1]
input_files.T1_FS_DESTRIEUX, # list_caps_files[2]
input_files.T1_FS_BRAIN, # list_caps_files[3]
# Inputs from dwi-preprocessing pipeline
input_files.DWI_PREPROC_NII, # list_caps_files[4]
input_files.DWI_PREPROC_BRAINMASK, # list_caps_files[5]
input_files.DWI_PREPROC_BVEC, # list_caps_files[6]
input_files.DWI_PREPROC_BVAL, # list_caps_files[7]
],
raise_exception=True,
patterns,
)

# Check space of DWI dataset
Expand All @@ -110,7 +120,7 @@ def _build_input_node(self):
]

list_grad_fsl = [
(bvec, bval) for bvec, bval in zip(list_caps_files[6], list_caps_files[7])
(bvec, bval) for bvec, bval in zip(list_caps_files[5], list_caps_files[6])
]

# Save subjects to process in <WD>/<Pipeline.name>/participants.tsv
Expand All @@ -133,7 +143,7 @@ def _build_input_node(self):
("wm_mask_file", list_caps_files[0]),
("t1_brain_file", list_caps_files[3]),
("dwi_file", list_caps_files[4]),
("dwi_brainmask_file", list_caps_files[5]),
("dwi_brainmask_file", list_caps_files[7]),
("grad_fsl", list_grad_fsl),
("atlas_files", list_atlas_files),
],
Expand Down Expand Up @@ -161,7 +171,7 @@ def _build_input_node(self):
iterables=[
("wm_mask_file", list_caps_files[0]),
("dwi_file", list_caps_files[4]),
("dwi_brainmask_file", list_caps_files[5]),
("dwi_brainmask_file", list_caps_files[7]),
("grad_fsl", list_grad_fsl),
("atlas_files", list_atlas_files),
],
Expand Down
18 changes: 11 additions & 7 deletions clinica/pipelines/dwi/dti/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,22 +67,26 @@ def _build_input_node(self):
import nipype.interfaces.utility as nutil
import nipype.pipeline.engine as npe

import clinica.utils.input_files as input_files
from clinica.utils.filemanip import save_participants_sessions
from clinica.utils.input_files import (
DWIFileType,
QueryPatternName,
query_pattern_factory,
)
from clinica.utils.inputs import clinica_list_of_files_reader
from clinica.utils.stream import cprint
from clinica.utils.ux import print_images_to_process

patterns = [
query_pattern_factory(QueryPatternName.DWI_PREPROC)(file_type)
for file_type in (DWIFileType.NII, DWIFileType.BVEC, DWIFileType.BVAL)
]
patterns.append(query_pattern_factory(QueryPatternName.DWI_PREPROC_BRAINMASK)())
list_caps_files = clinica_list_of_files_reader(
self.subjects,
self.sessions,
self.caps_directory,
[
input_files.DWI_PREPROC_NII,
input_files.DWI_PREPROC_BVEC,
input_files.DWI_PREPROC_BVAL,
input_files.DWI_PREPROC_BRAINMASK,
],
patterns,
raise_exception=True,
)

Expand Down
13 changes: 2 additions & 11 deletions clinica/pipelines/dwi/dti/utils.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
"""This module contains utilities used by the DWIDTI pipeline."""

from enum import Enum
from pathlib import Path
from typing import Dict, List, Tuple

from clinica.utils.dwi import DTIBasedMeasure

__all__ = [
"DTIBasedMeasure",
"compute_statistics_on_atlases",
"get_caps_filenames",
"rename_into_caps",
Expand All @@ -15,15 +15,6 @@
]


class DTIBasedMeasure(str, Enum):
"""Possible DTI measures."""

FRACTIONAL_ANISOTROPY = "FA"
MEAN_DIFFUSIVITY = "MD"
AXIAL_DIFFUSIVITY = "AD"
RADIAL_DIFFUSIVITY = "RD"


def compute_statistics_on_atlases(
registered_map: Path, name_map: str, dwi_preprocessed_file: Path
) -> List[Path]:
Expand Down
52 changes: 29 additions & 23 deletions clinica/pipelines/dwi/preprocessing/fmap/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,11 @@
from nipype import config

from clinica.pipelines.dwi.preprocessing.engine import DWIPreprocessingPipeline
from clinica.utils.input_files import (
DWIFileType,
QueryPatternName,
query_pattern_factory,
)

# Use hash instead of parameters for iterables folder names
# Otherwise path will be too long and generate OSError
Expand All @@ -30,13 +35,15 @@ def get_processed_images(
caps_directory: Path, subjects: List[str], sessions: List[str]
) -> List[str]:
from clinica.utils.filemanip import extract_image_ids
from clinica.utils.input_files import DWI_PREPROC_NII
from clinica.utils.inputs import clinica_file_reader

image_ids: List[str] = []
if caps_directory.is_dir():
pattern = query_pattern_factory(QueryPatternName.DWI_PREPROC)(
DWIFileType.NII
)
preproc_files, _ = clinica_file_reader(
subjects, sessions, caps_directory, DWI_PREPROC_NII
subjects, sessions, caps_directory, pattern
)
image_ids = extract_image_ids(preproc_files)
return image_ids
Expand Down Expand Up @@ -97,32 +104,33 @@ def _build_input_node(self):
import nipype.pipeline.engine as npe

from clinica.utils.filemanip import save_participants_sessions
from clinica.utils.input_files import (
DWI_BVAL,
DWI_BVEC,
DWI_JSON,
DWI_NII,
FMAP_MAGNITUDE1_NII,
FMAP_PHASEDIFF_JSON,
FMAP_PHASEDIFF_NII,
)
from clinica.utils.inputs import clinica_list_of_files_reader
from clinica.utils.stream import cprint
from clinica.utils.ux import print_images_to_process

patterns = [
query_pattern_factory(QueryPatternName.DWI)(file_type)
for file_type in (
DWIFileType.NII,
DWIFileType.BVEC,
DWIFileType.BVAL,
DWIFileType.JSON,
)
]
patterns.append(
query_pattern_factory(QueryPatternName.DWI_FMAP_MAGNITUDE1)(DWIFileType.NII)
)
patterns.extend(
[
query_pattern_factory(QueryPatternName.DWI_FMAP_PHASEDIFF)(file_type)
for file_type in (DWIFileType.NII, DWIFileType.JSON)
]
)
list_bids_files = clinica_list_of_files_reader(
self.subjects,
self.sessions,
self.bids_directory,
[
DWI_NII,
DWI_BVEC,
DWI_BVAL,
DWI_JSON,
FMAP_MAGNITUDE1_NII,
FMAP_PHASEDIFF_NII,
FMAP_PHASEDIFF_JSON,
],
patterns,
raise_exception=True,
)
save_participants_sessions(
Expand All @@ -131,9 +139,7 @@ def _build_input_node(self):
if len(self.subjects):
print_images_to_process(self.subjects, self.sessions)
cprint(
f"List available in {self.base_dir / self.name / 'participants.tsv'}"
)
cprint(
f"List available in {self.base_dir / self.name / 'participants.tsv'}\n"
"Computational time will depend of the number of volumes in your DWI dataset and the use of CUDA."
)

Expand Down
Loading
Loading