From a33c140cace3cdd297c6356776ad1f84525e5f13 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Thu, 17 Dec 2020 13:50:16 -0500 Subject: [PATCH] [DOC] Warn users about Peaks2MapsKernel and KDA Estimator (#420) * Add warning to Peaks2MapsKernel. * Add warning to KDA estimator. * Update nimare/meta/cbma/mkda.py * Fix KDA warning. --- nimare/meta/cbma/mkda.py | 12 ++++++++++++ nimare/meta/kernel.py | 9 +++++++++ 2 files changed, 21 insertions(+) diff --git a/nimare/meta/cbma/mkda.py b/nimare/meta/cbma/mkda.py index 629417354..0dc215cf9 100644 --- a/nimare/meta/cbma/mkda.py +++ b/nimare/meta/cbma/mkda.py @@ -462,6 +462,12 @@ class KDA(CBMAEstimator): Available correction methods: :func:`KDA.correct_fwe_montecarlo` + Warning + ------- + The KDA algorithm has been replaced in the literature with the MKDA algorithm. + As such, this estimator should almost never be used, outside of systematic + comparisons between algorithms. + References ---------- .. [1] Wager, Tor D., et al. "Valence, gender, and lateralization of @@ -476,6 +482,12 @@ class KDA(CBMAEstimator): def __init__( self, kernel_transformer=KDAKernel, null_method="empirical", n_iters=10000, **kwargs ): + LGR.warning( + "The KDA algorithm has been replaced in the literature with the MKDA algorithm. " + "As such, this estimator should almost never be used, outside of systematic " + "comparisons between algorithms." + ) + if not (isinstance(kernel_transformer, KDAKernel) or kernel_transformer == KDAKernel): LGR.warning( f"The KernelTransformer being used ({kernel_transformer}) is not optimized " diff --git a/nimare/meta/kernel.py b/nimare/meta/kernel.py index 07d36c5d6..52bac9dba 100644 --- a/nimare/meta/kernel.py +++ b/nimare/meta/kernel.py @@ -319,12 +319,21 @@ class Peaks2MapsKernel(KernelTransformer): resample_to_mask : :obj:`bool`, optional If True, will resample the MA maps to the mask's header. Default is True. + + Warning + ------- + Peaks2MapsKernel is not intended for serious research. + We strongly recommend against using it for any meaningful analyses. """ def __init__(self, model_dir="auto"): # Use private attribute to hide value from get_params. # get_params will find model_dir=None, which is *very important* when a path is provided. self._model_dir = model_dir + LGR.warning( + "The Peaks2Maps kernel transformer is not intended for serious research. " + "We strongly recommend against using it for any meaningful analyses." + ) def _transform(self, mask, coordinates): transformed = []