diff --git a/docs/api.rst b/docs/api.rst index a840791ec..a089bef57 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -157,6 +157,11 @@ For more information about functional characterization analysis, see :ref:`Meta- .. autosummary:: :toctree: generated/ + :template: class.rst + + transforms.ImageTransformer + transforms.ImagesToCoordinates + :template: function.rst transforms.transform_images @@ -173,10 +178,6 @@ For more information about functional characterization analysis, see :ref:`Meta- transforms.z_to_t transforms.z_to_p - :template: class.rst - - transforms.ImagesToCoordinates - .. _api_extract_ref: diff --git a/examples/01_datasets/plot_dataset_io.py b/examples/01_datasets/plot_dataset_io.py index 1e18c32a0..11f872e0d 100644 --- a/examples/01_datasets/plot_dataset_io.py +++ b/examples/01_datasets/plot_dataset_io.py @@ -121,10 +121,9 @@ # beta images, you can also calculate varcope (variance) images. # # We use :mod:`nimare.transforms` to perform these transformations -# (especially :func:`nimare.transforms.transform_images`) -dset.images = nimare.transforms.transform_images( - dset.images, "varcope", dset.masker, dset.metadata, out_dir=None -) +# (especially :class:`nimare.transforms.ImageTransformer`) +varcope_transformer = nimare.transforms.ImageTransformer(target="varcope") +dset = varcope_transformer.transform(dset) dset.images[["id", "varcope"]].head() ############################################################################### @@ -147,9 +146,8 @@ ############################################################################### # Let's try to fill in missing z images # ````````````````````````````````````````````````````````````````````````````` -dset.images = nimare.transforms.transform_images( - dset.images, "z", dset.masker, dset.metadata, out_dir=None -) +z_transformer = nimare.transforms.ImageTransformer(target="z") +dset = z_transformer.transform(dset) z_images = dset.get_images(imtype="z") z_images = [str(z) for z in z_images] print("\n".join(z_images)) diff --git a/examples/01_datasets/plot_neurovault_io.py b/examples/01_datasets/plot_neurovault_io.py index 2c0d1fbc6..0921b0dac 100644 --- a/examples/01_datasets/plot_neurovault_io.py +++ b/examples/01_datasets/plot_neurovault_io.py @@ -62,18 +62,17 @@ # ------------------------------ # Some of the statistical maps are T statistics and others are Z statistics. # To perform a Fisher's meta analysis, we need all Z maps. -# Thoughtfully, NiMARE has a function named ``transform_images`` that will +# Thoughtfully, NiMARE has a class named ``ImageTransformer`` that will # help us. -from nimare.transforms import transform_images +from nimare.transforms import ImageTransformer # Not all studies have Z maps! print(dset.images["z"]) -dset.images = transform_images( - dset.images, target="z", masker=dset.masker, metadata_df=dset.metadata -) +z_transformer = ImageTransformer(target="z") +dset = z_transformer.transform(dset) -# All studies have Z maps! +# All studies now have Z maps! print(dset.images["z"]) diff --git a/examples/01_datasets/transform_images_to_coordinates.py b/examples/01_datasets/transform_images_to_coordinates.py index 7b533e7b3..1ed59b3c2 100644 --- a/examples/01_datasets/transform_images_to_coordinates.py +++ b/examples/01_datasets/transform_images_to_coordinates.py @@ -22,7 +22,7 @@ import matplotlib.pyplot as plt import nimare -from nimare.transforms import ImagesToCoordinates, transform_images +from nimare.transforms import ImagesToCoordinates, ImageTransformer from nimare.meta.cbma import ALE from nimare.tests.utils import get_test_data_path @@ -39,9 +39,8 @@ dset.update_path(dset_dir) # ImagesToCoordinates uses z or p statistical maps -dset.images = transform_images( - dset.images, target="z", masker=dset.masker, metadata_df=dset.metadata -) +z_transformer = ImageTransformer(target="z") +dset = z_transformer.transform(dset) study_no_images = "pain_02.nidm-1" # delete images for study @@ -129,7 +128,8 @@ # while studies with only coordinates (no images) are in 'replace', # they are removed from 'demolish'. print( - f"studies in 'replace', but not 'demolish': {set(dset_replace.coordinates['id']) - set(dset_demolish.coordinates['id'])}" + "studies in 'replace', but not 'demolish': " + f"{set(dset_replace.coordinates['id']) - set(dset_demolish.coordinates['id'])}" ) ############################################################################### diff --git a/examples/02_meta-analyses/plot_compare_ibma_and_cbma.py b/examples/02_meta-analyses/plot_compare_ibma_and_cbma.py index d07d564cd..347bc9e68 100644 --- a/examples/02_meta-analyses/plot_compare_ibma_and_cbma.py +++ b/examples/02_meta-analyses/plot_compare_ibma_and_cbma.py @@ -18,7 +18,7 @@ from nilearn.plotting import plot_stat_map import nimare -from nimare.transforms import ImagesToCoordinates, transform_images +from nimare.transforms import ImagesToCoordinates, ImageTransformer from nimare.meta.ibma import DerSimonianLaird from nimare.meta.cbma import ALE from nimare.tests.utils import get_test_data_path @@ -36,12 +36,11 @@ dset.update_path(dset_dir) # Calculate missing statistical images from the available stats. -dset.images = transform_images( - dset.images, target="z", masker=dset.masker, metadata_df=dset.metadata -) -dset.images = nimare.transforms.transform_images( - dset.images, target="varcope", masker=dset.masker, metadata_df=dset.metadata -) +z_transformer = ImageTransformer(target="z") +dset = z_transformer.transform(dset) + +varcope_transformer = ImageTransformer(target="varcope") +dset = varcope_transformer.transform(dset) # create coordinates from statistical maps coord_gen = ImagesToCoordinates(merge_strategy="replace") diff --git a/examples/02_meta-analyses/plot_ibma.py b/examples/02_meta-analyses/plot_ibma.py index d385e00c1..05197ba9f 100644 --- a/examples/02_meta-analyses/plot_ibma.py +++ b/examples/02_meta-analyses/plot_ibma.py @@ -37,12 +37,10 @@ dset = nimare.dataset.Dataset(dset_file) dset.update_path(dset_dir) # Calculate missing images -dset.images = nimare.transforms.transform_images( - dset.images, target="z", masker=dset.masker, metadata_df=dset.metadata -) -dset.images = nimare.transforms.transform_images( - dset.images, target="varcope", masker=dset.masker, metadata_df=dset.metadata -) +z_transformer = nimare.transforms.ImageTransformer(target="z") +varcope_transformer = nimare.transforms.ImageTransformer(target="varcope") +dset = z_transformer.transform(dset) +dset = varcope_transformer.transform(dset) ############################################################################### # Stouffer's diff --git a/nimare/base.py b/nimare/base.py index d9f61de3e..54d4ae6e5 100644 --- a/nimare/base.py +++ b/nimare/base.py @@ -350,6 +350,7 @@ def __init__(self): @abstractmethod def transform(self, dataset): """Add stuff to transformer.""" + # Using attribute check instead of type check to allow fake Datasets for testing. if not hasattr(dataset, "slice"): raise ValueError( 'Argument "dataset" must be a valid Dataset ' diff --git a/nimare/generate.py b/nimare/generate.py index df14f844e..dad3aa5cd 100644 --- a/nimare/generate.py +++ b/nimare/generate.py @@ -7,7 +7,7 @@ from .dataset import Dataset from .io import convert_neurovault_to_dataset from .meta.utils import compute_ale_ma, get_ale_kernel -from .transforms import transform_images +from .transforms import ImageTransformer from .utils import mm2vox, vox2mm # defaults for creating a neurovault dataset @@ -162,9 +162,8 @@ def create_neurovault_dataset( dataset = convert_neurovault_to_dataset( collection_ids, contrasts, img_dir, map_type_conversion, **dset_kwargs ) - dataset.images = transform_images( - dataset.images, target="z", masker=dataset.masker, metadata_df=dataset.metadata - ) + transformer = ImageTransformer(target="z") + dataset = transformer.transform(dataset) return dataset diff --git a/nimare/tests/test_transforms.py b/nimare/tests/test_transforms.py index 5f6d051fa..1cfe921ad 100644 --- a/nimare/tests/test_transforms.py +++ b/nimare/tests/test_transforms.py @@ -9,6 +9,26 @@ from nimare import transforms +def test_ImageTransformer(testdata_ibma): + """Smoke test on transforms.ImageTransformer.""" + dset = testdata_ibma + z_files = dset.images["z"].tolist() + z_transformer = transforms.ImageTransformer(target="z") + new_dset = z_transformer.transform(dset) + new_z_files = new_dset.images["z"].tolist() + assert z_files[:-1] == new_z_files[:-1] + # new z statistic map should have 3 dimensions + assert len(nib.load(new_z_files[-1]).shape) == 3 + assert all([nzf is not None for nzf in new_z_files]) + + varcope_files = dset.images["varcope"].tolist() + varcope_transformer = transforms.ImageTransformer(target="varcope") + new_dset = varcope_transformer.transform(dset) + new_varcope_files = new_dset.images["varcope"].tolist() + assert not all([isinstance(vf, str) for vf in varcope_files]) + assert all([isinstance(vf, str) for vf in new_varcope_files]) + + def test_transform_images(testdata_ibma): """Smoke test on transforms.transform_images.""" dset = testdata_ibma diff --git a/nimare/transforms.py b/nimare/transforms.py index e85d26750..e5e8dc581 100644 --- a/nimare/transforms.py +++ b/nimare/transforms.py @@ -18,6 +18,58 @@ LGR = logging.getLogger(__name__) +class ImageTransformer(Transformer): + """A class to create new images from existing ones within a Dataset. + + This class is a light wrapper around :func:`nimare.transforms.transform_images`. + + Parameters + ---------- + target : {'z', 'p', 'beta', 'varcope'} + Target image type. + overwrite : :obj:`bool`, optional + Whether to overwrite existing files or not. Default is False. + + See Also + -------- + nimare.transforms.transform_images : The function called by this class. + """ + + def __init__(self, target, overwrite=False): + self.target = target + self.overwrite = overwrite + + def transform(self, dataset): + """Generate images of the target type from other image types in a Dataset. + + Parameters + ---------- + dataset : :obj:`nimare.dataset.Dataset` + A Dataset containing images and relevant metadata. + + Returns + ------- + new_dataset : :obj:`nimare.dataset.Dataset` + A copy of the input Dataset, with new images added to its images attribute. + """ + # Using attribute check instead of type check to allow fake Datasets for testing. + if not hasattr(dataset, "slice"): + raise ValueError( + f"Argument 'dataset' must be a valid Dataset object, not a {type(dataset)}." + ) + + new_dataset = dataset.copy() + new_dataset.images = transform_images( + dataset.images, + target=self.target, + masker=dataset.masker, + metadata_df=dataset.metadata, + out_dir=dataset.basepath, + overwrite=self.overwrite, + ) + return new_dataset + + def transform_images(images_df, target, masker, metadata_df=None, out_dir=None, overwrite=False): """Generate images of a given type from other image types and write out to files.