Skip to content

Commit

Permalink
DRY
Browse files Browse the repository at this point in the history
Signed-off-by: zethson <[email protected]>
  • Loading branch information
Zethson committed Jan 6, 2025
1 parent adc4919 commit 6324250
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 15 deletions.
9 changes: 7 additions & 2 deletions ehrapy/_compat.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# Since we might check whether an object is an instance of dask.array.Array
# without requiring dask installed in the environment.
# This would become obsolete should dask become a requirement for ehrapy

from collections.abc import Callable

try:
import dask.array as da
Expand All @@ -11,6 +10,12 @@
DASK_AVAILABLE = False


def _raise_array_type_not_implemented(func: Callable, type_: type) -> NotImplementedError:
return NotImplementedError(
f"{func.__name__} does not support array type {type_}. Must be of type {func.registry.keys()}." # type: ignore
)


def is_dask_array(array):
if DASK_AVAILABLE:
return isinstance(array, da.Array)
Expand Down
14 changes: 7 additions & 7 deletions ehrapy/preprocessing/_normalization.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import numpy as np
import sklearn.preprocessing as sklearn_pp

from ehrapy._compat import is_dask_array
from ehrapy._compat import _raise_array_type_not_implemented

try:
import dask.array as da
Expand Down Expand Up @@ -77,7 +77,7 @@ def _scale_func_group(

@singledispatch
def _scale_norm_function(arr):
raise NotImplementedError(f"scale_norm does not support data to be of type {type(arr)}")
_raise_array_type_not_implemented(_scale_norm_function, type(arr))


@_scale_norm_function.register
Expand Down Expand Up @@ -135,7 +135,7 @@ def scale_norm(

@singledispatch
def _minmax_norm_function(arr):
raise NotImplementedError(f"minmax_norm does not support data to be of type {type(arr)}")
_raise_array_type_not_implemented(_minmax_norm_function, type(arr))


@_minmax_norm_function.register
Expand Down Expand Up @@ -194,7 +194,7 @@ def minmax_norm(

@singledispatch
def _maxabs_norm_function(arr):
raise NotImplementedError(f"maxabs_norm does not support data to be of type {type(arr)}")
_raise_array_type_not_implemented(_scale_norm_function, type(arr))


@_maxabs_norm_function.register
Expand Down Expand Up @@ -243,7 +243,7 @@ def maxabs_norm(

@singledispatch
def _robust_scale_norm_function(arr, **kwargs):
raise NotImplementedError(f"robust_scale_norm does not support data to be of type {type(arr)}")
_raise_array_type_not_implemented(_robust_scale_norm_function, type(arr))


@_robust_scale_norm_function.register
Expand Down Expand Up @@ -303,7 +303,7 @@ def robust_scale_norm(

@singledispatch
def _quantile_norm_function(arr):
raise NotImplementedError(f"robust_scale_norm does not support data to be of type {type(arr)}")
_raise_array_type_not_implemented(_quantile_norm_function, type(arr))


@_quantile_norm_function.register
Expand Down Expand Up @@ -362,7 +362,7 @@ def quantile_norm(

@singledispatch
def _power_norm_function(arr, **kwargs):
raise NotImplementedError(f"power_norm does not support data to be of type {type(arr)}")
_raise_array_type_not_implemented(_power_norm_function, type(arr))


@_power_norm_function.register
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ medcat = [
"medcat",
]
dask = [
"dask",
"anndata[dask]",
"dask-ml",
]
dev = [
Expand Down
5 changes: 0 additions & 5 deletions tests/preprocessing/test_normalization.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,11 +89,6 @@ def test_vars_checks(adata_to_norm):
ep.pp.scale_norm(adata_to_norm, vars=["String1"])


# TODO: list the supported array types centrally?
norm_scale_supported_types = [np.asarray, da.asarray]
norm_scale_unsupported_types = [sparse.csc_matrix]


# TODO: check this for each function, with just default settings?
@pytest.mark.parametrize(
"array_type,expected_error",
Expand Down

0 comments on commit 6324250

Please sign in to comment.