Skip to content

Commit

Permalink
[ENH] Add ImageTransformer class (#513)
Browse files Browse the repository at this point in the history
* Add ImageTransformer.

* Replace transform_images with ImageTransformer in examples.

* Fix transformer calls.
  • Loading branch information
tsalo authored Jun 2, 2021
1 parent bffd5e9 commit a83811f
Show file tree
Hide file tree
Showing 10 changed files with 106 additions and 39 deletions.
9 changes: 5 additions & 4 deletions docs/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,11 @@ For more information about functional characterization analysis, see :ref:`Meta-

.. autosummary::
:toctree: generated/
:template: class.rst

transforms.ImageTransformer
transforms.ImagesToCoordinates

:template: function.rst

transforms.transform_images
Expand All @@ -173,10 +178,6 @@ For more information about functional characterization analysis, see :ref:`Meta-
transforms.z_to_t
transforms.z_to_p

:template: class.rst

transforms.ImagesToCoordinates


.. _api_extract_ref:

Expand Down
12 changes: 5 additions & 7 deletions examples/01_datasets/plot_dataset_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,10 +121,9 @@
# beta images, you can also calculate varcope (variance) images.
#
# We use :mod:`nimare.transforms` to perform these transformations
# (especially :func:`nimare.transforms.transform_images`)
dset.images = nimare.transforms.transform_images(
dset.images, "varcope", dset.masker, dset.metadata, out_dir=None
)
# (especially :class:`nimare.transforms.ImageTransformer`)
varcope_transformer = nimare.transforms.ImageTransformer(target="varcope")
dset = varcope_transformer.transform(dset)
dset.images[["id", "varcope"]].head()

###############################################################################
Expand All @@ -147,9 +146,8 @@
###############################################################################
# Let's try to fill in missing z images
# `````````````````````````````````````````````````````````````````````````````
dset.images = nimare.transforms.transform_images(
dset.images, "z", dset.masker, dset.metadata, out_dir=None
)
z_transformer = nimare.transforms.ImageTransformer(target="z")
dset = z_transformer.transform(dset)
z_images = dset.get_images(imtype="z")
z_images = [str(z) for z in z_images]
print("\n".join(z_images))
Expand Down
11 changes: 5 additions & 6 deletions examples/01_datasets/plot_neurovault_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,18 +62,17 @@
# ------------------------------
# Some of the statistical maps are T statistics and others are Z statistics.
# To perform a Fisher's meta analysis, we need all Z maps.
# Thoughtfully, NiMARE has a function named ``transform_images`` that will
# Thoughtfully, NiMARE has a class named ``ImageTransformer`` that will
# help us.
from nimare.transforms import transform_images
from nimare.transforms import ImageTransformer

# Not all studies have Z maps!
print(dset.images["z"])

dset.images = transform_images(
dset.images, target="z", masker=dset.masker, metadata_df=dset.metadata
)
z_transformer = ImageTransformer(target="z")
dset = z_transformer.transform(dset)

# All studies have Z maps!
# All studies now have Z maps!
print(dset.images["z"])


Expand Down
10 changes: 5 additions & 5 deletions examples/01_datasets/transform_images_to_coordinates.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import matplotlib.pyplot as plt

import nimare
from nimare.transforms import ImagesToCoordinates, transform_images
from nimare.transforms import ImagesToCoordinates, ImageTransformer
from nimare.meta.cbma import ALE
from nimare.tests.utils import get_test_data_path

Expand All @@ -39,9 +39,8 @@
dset.update_path(dset_dir)

# ImagesToCoordinates uses z or p statistical maps
dset.images = transform_images(
dset.images, target="z", masker=dset.masker, metadata_df=dset.metadata
)
z_transformer = ImageTransformer(target="z")
dset = z_transformer.transform(dset)

study_no_images = "pain_02.nidm-1"
# delete images for study
Expand Down Expand Up @@ -129,7 +128,8 @@
# while studies with only coordinates (no images) are in 'replace',
# they are removed from 'demolish'.
print(
f"studies in 'replace', but not 'demolish': {set(dset_replace.coordinates['id']) - set(dset_demolish.coordinates['id'])}"
"studies in 'replace', but not 'demolish': "
f"{set(dset_replace.coordinates['id']) - set(dset_demolish.coordinates['id'])}"
)

###############################################################################
Expand Down
13 changes: 6 additions & 7 deletions examples/02_meta-analyses/plot_compare_ibma_and_cbma.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from nilearn.plotting import plot_stat_map

import nimare
from nimare.transforms import ImagesToCoordinates, transform_images
from nimare.transforms import ImagesToCoordinates, ImageTransformer
from nimare.meta.ibma import DerSimonianLaird
from nimare.meta.cbma import ALE
from nimare.tests.utils import get_test_data_path
Expand All @@ -36,12 +36,11 @@
dset.update_path(dset_dir)

# Calculate missing statistical images from the available stats.
dset.images = transform_images(
dset.images, target="z", masker=dset.masker, metadata_df=dset.metadata
)
dset.images = nimare.transforms.transform_images(
dset.images, target="varcope", masker=dset.masker, metadata_df=dset.metadata
)
z_transformer = ImageTransformer(target="z")
dset = z_transformer.transform(dset)

varcope_transformer = ImageTransformer(target="varcope")
dset = varcope_transformer.transform(dset)

# create coordinates from statistical maps
coord_gen = ImagesToCoordinates(merge_strategy="replace")
Expand Down
10 changes: 4 additions & 6 deletions examples/02_meta-analyses/plot_ibma.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,10 @@
dset = nimare.dataset.Dataset(dset_file)
dset.update_path(dset_dir)
# Calculate missing images
dset.images = nimare.transforms.transform_images(
dset.images, target="z", masker=dset.masker, metadata_df=dset.metadata
)
dset.images = nimare.transforms.transform_images(
dset.images, target="varcope", masker=dset.masker, metadata_df=dset.metadata
)
z_transformer = nimare.transforms.ImageTransformer(target="z")
varcope_transformer = nimare.transforms.ImageTransformer(target="varcope")
dset = z_transformer.transform(dset)
dset = varcope_transformer.transform(dset)

###############################################################################
# Stouffer's
Expand Down
1 change: 1 addition & 0 deletions nimare/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -350,6 +350,7 @@ def __init__(self):
@abstractmethod
def transform(self, dataset):
"""Add stuff to transformer."""
# Using attribute check instead of type check to allow fake Datasets for testing.
if not hasattr(dataset, "slice"):
raise ValueError(
'Argument "dataset" must be a valid Dataset '
Expand Down
7 changes: 3 additions & 4 deletions nimare/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from .dataset import Dataset
from .io import convert_neurovault_to_dataset
from .meta.utils import compute_ale_ma, get_ale_kernel
from .transforms import transform_images
from .transforms import ImageTransformer
from .utils import mm2vox, vox2mm

# defaults for creating a neurovault dataset
Expand Down Expand Up @@ -162,9 +162,8 @@ def create_neurovault_dataset(
dataset = convert_neurovault_to_dataset(
collection_ids, contrasts, img_dir, map_type_conversion, **dset_kwargs
)
dataset.images = transform_images(
dataset.images, target="z", masker=dataset.masker, metadata_df=dataset.metadata
)
transformer = ImageTransformer(target="z")
dataset = transformer.transform(dataset)

return dataset

Expand Down
20 changes: 20 additions & 0 deletions nimare/tests/test_transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,26 @@
from nimare import transforms


def test_ImageTransformer(testdata_ibma):
"""Smoke test on transforms.ImageTransformer."""
dset = testdata_ibma
z_files = dset.images["z"].tolist()
z_transformer = transforms.ImageTransformer(target="z")
new_dset = z_transformer.transform(dset)
new_z_files = new_dset.images["z"].tolist()
assert z_files[:-1] == new_z_files[:-1]
# new z statistic map should have 3 dimensions
assert len(nib.load(new_z_files[-1]).shape) == 3
assert all([nzf is not None for nzf in new_z_files])

varcope_files = dset.images["varcope"].tolist()
varcope_transformer = transforms.ImageTransformer(target="varcope")
new_dset = varcope_transformer.transform(dset)
new_varcope_files = new_dset.images["varcope"].tolist()
assert not all([isinstance(vf, str) for vf in varcope_files])
assert all([isinstance(vf, str) for vf in new_varcope_files])


def test_transform_images(testdata_ibma):
"""Smoke test on transforms.transform_images."""
dset = testdata_ibma
Expand Down
52 changes: 52 additions & 0 deletions nimare/transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,58 @@
LGR = logging.getLogger(__name__)


class ImageTransformer(Transformer):
"""A class to create new images from existing ones within a Dataset.
This class is a light wrapper around :func:`nimare.transforms.transform_images`.
Parameters
----------
target : {'z', 'p', 'beta', 'varcope'}
Target image type.
overwrite : :obj:`bool`, optional
Whether to overwrite existing files or not. Default is False.
See Also
--------
nimare.transforms.transform_images : The function called by this class.
"""

def __init__(self, target, overwrite=False):
self.target = target
self.overwrite = overwrite

def transform(self, dataset):
"""Generate images of the target type from other image types in a Dataset.
Parameters
----------
dataset : :obj:`nimare.dataset.Dataset`
A Dataset containing images and relevant metadata.
Returns
-------
new_dataset : :obj:`nimare.dataset.Dataset`
A copy of the input Dataset, with new images added to its images attribute.
"""
# Using attribute check instead of type check to allow fake Datasets for testing.
if not hasattr(dataset, "slice"):
raise ValueError(
f"Argument 'dataset' must be a valid Dataset object, not a {type(dataset)}."
)

new_dataset = dataset.copy()
new_dataset.images = transform_images(
dataset.images,
target=self.target,
masker=dataset.masker,
metadata_df=dataset.metadata,
out_dir=dataset.basepath,
overwrite=self.overwrite,
)
return new_dataset


def transform_images(images_df, target, masker, metadata_df=None, out_dir=None, overwrite=False):
"""Generate images of a given type from other image types and write out to files.
Expand Down

0 comments on commit a83811f

Please sign in to comment.