Skip to content

Commit

Permalink
Merge pull request #655 from bioimage-io/add_tests
Browse files Browse the repository at this point in the history
add more tests
  • Loading branch information
FynnBe authored Nov 13, 2024
2 parents 5f44013 + 1224a9a commit 33c3673
Show file tree
Hide file tree
Showing 9 changed files with 90 additions and 28 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ jobs:
- name: install additinal deps for a full pyright check
run: pip install json_schema_for_humans
if: matrix.is-dev-version
- run: ruff check **/*.py # ignore notebooks for now
- run: ruff check
if: matrix.is-dev-version
- run: pyright --version
if: matrix.is-dev-version
Expand Down
31 changes: 27 additions & 4 deletions bioimageio/spec/model/v0_4.py
Original file line number Diff line number Diff line change
Expand Up @@ -520,12 +520,14 @@ class ProcessingDescrBase(NodeWithExplicitlySetFields):


class BinarizeKwargs(ProcessingKwargs):
"""key word arguments for `BinarizeDescr`"""

threshold: float
"""The fixed threshold"""


class BinarizeDescr(ProcessingDescrBase):
"""BinarizeDescr the tensor with a fixed threshold.
"""BinarizeDescr the tensor with a fixed `BinarizeKwargs.threshold`.
Values above the threshold will be set to one, values below the threshold to zero.
"""

Expand All @@ -534,21 +536,29 @@ class BinarizeDescr(ProcessingDescrBase):


class ClipKwargs(ProcessingKwargs):
"""key word arguments for `ClipDescr`"""

min: float
"""minimum value for clipping"""
max: float
"""maximum value for clipping"""


class ClipDescr(ProcessingDescrBase):
"""Set tensor values below min to min and above max to max."""
"""Clip tensor values to a range.
Set tensor values below `ClipKwargs.min` to `ClipKwargs.min`
and above `ClipKwargs.max` to `ClipKwargs.max`.
"""

name: Literal["clip"] = "clip"

kwargs: ClipKwargs


class ScaleLinearKwargs(ProcessingKwargs):
"""key word arguments for `ScaleLinearDescr`"""

axes: Annotated[Optional[AxesInCZYX], Field(examples=["xy"])] = None
"""The subset of axes to scale jointly.
For example xy to scale the two image axes for 2d data jointly."""
Expand Down Expand Up @@ -597,6 +607,8 @@ def kwargs(self) -> ProcessingKwargs:


class ZeroMeanUnitVarianceKwargs(ProcessingKwargs):
"""key word arguments for `ZeroMeanUnitVarianceDescr`"""

mode: Literal["fixed", "per_dataset", "per_sample"] = "fixed"
"""Mode for computing mean and variance.
| mode | description |
Expand Down Expand Up @@ -642,6 +654,15 @@ class ZeroMeanUnitVarianceDescr(ProcessingDescrBase):


class ScaleRangeKwargs(ProcessingKwargs):
"""key word arguments for `ScaleRangeDescr`
For `min_percentile`=0.0 (the default) and `max_percentile`=100 (the default)
this processing step normalizes data to the [0, 1] intervall.
For other percentiles the normalized values will partially be outside the [0, 1]
intervall. Use `ScaleRange` followed by `ClipDescr` if you want to limit the
normalized values to a range.
"""

mode: Literal["per_dataset", "per_sample"]
"""Mode for computing percentiles.
| mode | description |
Expand All @@ -654,10 +675,10 @@ class ScaleRangeKwargs(ProcessingKwargs):
For example xy to normalize the two image axes for 2d data jointly."""

min_percentile: Annotated[Union[int, float], Interval(ge=0, lt=100)] = 0.0
"""The lower percentile used for normalization."""
"""The lower percentile used to determine the value to align with zero."""

max_percentile: Annotated[Union[int, float], Interval(gt=1, le=100)] = 100.0
"""The upper percentile used for normalization
"""The upper percentile used to determine the value to align with one.
Has to be bigger than `min_percentile`.
The range is 1 to 100 instead of 0 to 100 to avoid mistakenly
accepting percentiles specified in the range 0.0 to 1.0."""
Expand Down Expand Up @@ -691,6 +712,8 @@ class ScaleRangeDescr(ProcessingDescrBase):


class ScaleMeanVarianceKwargs(ProcessingKwargs):
"""key word arguments for `ScaleMeanVarianceDescr`"""

mode: Literal["per_dataset", "per_sample"]
"""Mode for computing mean and variance.
| mode | description |
Expand Down
52 changes: 41 additions & 11 deletions bioimageio/spec/model/v0_5.py
Original file line number Diff line number Diff line change
Expand Up @@ -789,11 +789,15 @@ class ProcessingDescrBase(NodeWithExplicitlySetFields, ABC):


class BinarizeKwargs(ProcessingKwargs):
"""key word arguments for `BinarizeDescr`"""

threshold: float
"""The fixed threshold"""


class BinarizeAlongAxisKwargs(ProcessingKwargs):
"""key word arguments for `BinarizeDescr`"""

threshold: NotEmpty[List[float]]
"""The fixed threshold values along `axis`"""

Expand All @@ -803,7 +807,9 @@ class BinarizeAlongAxisKwargs(ProcessingKwargs):

class BinarizeDescr(ProcessingDescrBase):
"""Binarize the tensor with a fixed threshold.
Values above the threshold will be set to one, values below the threshold to zero.
Values above `BinarizeKwargs.threshold`/`BinarizeAlongAxisKwargs.threshold`
will be set to one, values below the threshold to zero.
"""

id: Literal["binarize"] = "binarize"
Expand All @@ -818,6 +824,8 @@ class ClipDescr(ProcessingDescrBase):


class EnsureDtypeKwargs(ProcessingKwargs):
"""key word arguments for `EnsureDtypeDescr`"""

dtype: Literal[
"float32",
"float64",
Expand All @@ -834,11 +842,15 @@ class EnsureDtypeKwargs(ProcessingKwargs):


class EnsureDtypeDescr(ProcessingDescrBase):
"""cast the tensor data type to `EnsureDtypeKwargs.dtype` (if not matching)"""

id: Literal["ensure_dtype"] = "ensure_dtype"
kwargs: EnsureDtypeKwargs


class ScaleLinearKwargs(ProcessingKwargs):
"""key word arguments for `ScaleLinearDescr`"""

gain: float = 1.0
"""multiplicative factor"""

Expand All @@ -857,6 +869,8 @@ def _validate(self) -> Self:


class ScaleLinearAlongAxisKwargs(ProcessingKwargs):
"""key word arguments for `ScaleLinearDescr`"""

axis: Annotated[NonBatchAxisId, Field(examples=["channel"])]
"""The axis of of gains/offsets values."""

Expand Down Expand Up @@ -912,8 +926,7 @@ def kwargs(self) -> ProcessingKwargs:


class FixedZeroMeanUnitVarianceKwargs(ProcessingKwargs):
"""Normalize with fixed, precomputed values for mean and variance.
See `zero_mean_unit_variance` for data dependent normalization."""
"""key word arguments for `FixedZeroMeanUnitVarianceDescr`"""

mean: float
"""The mean value to normalize with."""
Expand All @@ -923,8 +936,7 @@ class FixedZeroMeanUnitVarianceKwargs(ProcessingKwargs):


class FixedZeroMeanUnitVarianceAlongAxisKwargs(ProcessingKwargs):
"""Normalize with fixed, precomputed values for mean and variance.
See `zero_mean_unit_variance` for data dependent normalization."""
"""key word arguments for `FixedZeroMeanUnitVarianceDescr`"""

mean: NotEmpty[List[float]]
"""The mean value(s) to normalize with."""
Expand All @@ -949,7 +961,13 @@ def _mean_and_std_match(self) -> Self:


class FixedZeroMeanUnitVarianceDescr(ProcessingDescrBase):
"""Subtract a given mean and divide by a given variance."""
"""Subtract a given mean and divide by the standard deviation.
Normalize with fixed, precomputed values for
`FixedZeroMeanUnitVarianceKwargs.mean` and `FixedZeroMeanUnitVarianceKwargs.std`
Use `FixedZeroMeanUnitVarianceAlongAxisKwargs` for independent scaling along given
axes.
"""

id: Literal["fixed_zero_mean_unit_variance"] = "fixed_zero_mean_unit_variance"
kwargs: Union[
Expand All @@ -958,6 +976,8 @@ class FixedZeroMeanUnitVarianceDescr(ProcessingDescrBase):


class ZeroMeanUnitVarianceKwargs(ProcessingKwargs):
"""key word arguments for `ZeroMeanUnitVarianceDescr`"""

axes: Annotated[
Optional[Sequence[AxisId]], Field(examples=[("batch", "x", "y")])
] = None
Expand All @@ -979,6 +999,15 @@ class ZeroMeanUnitVarianceDescr(ProcessingDescrBase):


class ScaleRangeKwargs(ProcessingKwargs):
"""key word arguments for `ScaleRangeDescr`
For `min_percentile`=0.0 (the default) and `max_percentile`=100 (the default)
this processing step normalizes data to the [0, 1] intervall.
For other percentiles the normalized values will partially be outside the [0, 1]
intervall. Use `ScaleRange` followed by `ClipDescr` if you want to limit the
normalized values to a range.
"""

axes: Annotated[
Optional[Sequence[AxisId]], Field(examples=[("batch", "x", "y")])
] = None
Expand All @@ -989,10 +1018,10 @@ class ScaleRangeKwargs(ProcessingKwargs):
Default: Scale all axes jointly."""

min_percentile: Annotated[float, Interval(ge=0, lt=100)] = 0.0
"""The lower percentile used for normalization."""
"""The lower percentile used to determine the value to align with zero."""

max_percentile: Annotated[float, Interval(gt=1, le=100)] = 100.0
"""The upper percentile used for normalization
"""The upper percentile used to determine the value to align with one.
Has to be bigger than `min_percentile`.
The range is 1 to 100 instead of 0 to 100 to avoid mistakenly
accepting percentiles specified in the range 0.0 to 1.0."""
Expand Down Expand Up @@ -1023,8 +1052,7 @@ class ScaleRangeDescr(ProcessingDescrBase):


class ScaleMeanVarianceKwargs(ProcessingKwargs):
"""Scale a tensor's data distribution to match another tensor's mean/std.
`out = (tensor - mean) / (std + eps) * (ref_std + eps) + ref_mean.`"""
"""key word arguments for `ScaleMeanVarianceKwargs`"""

reference_tensor: TensorId
"""Name of tensor to match."""
Expand All @@ -1044,7 +1072,9 @@ class ScaleMeanVarianceKwargs(ProcessingKwargs):


class ScaleMeanVarianceDescr(ProcessingDescrBase):
"""Scale the tensor s.t. its mean and variance match a reference tensor."""
"""Scale a tensor's data distribution to match another tensor's mean/std.
`out = (tensor - mean) / (std + eps) * (ref_std + eps) + ref_mean.`
"""

id: Literal["scale_mean_variance"] = "scale_mean_variance"
kwargs: ScaleMeanVarianceKwargs
Expand Down
7 changes: 5 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,11 @@ testpaths = ["bioimageio/spec", "tests", "scripts"]
[tool.ruff]
line-length = 88
target-version = "py312"
include = ["*.py", "*.pyi", "**/pyproject.toml", "*.ipynb"]
exclude = ["scripts/pdoc/original.py", "scripts/pdoc/patched.py"]
exclude = [
"bioimageio_cache",
"scripts/pdoc/original.py",
"scripts/pdoc/patched.py",
]

[tool.coverage.report]
exclude_also = ["if TYPE_CHECKING:", "assert_never\\("]
2 changes: 1 addition & 1 deletion scripts/generate_version_submodule_imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def process(info: Info, check: bool):
)
)
else:
with package_init.open("w", newline="\n") as f:
with package_init.open("w", newline="\n", encoding="utf-8") as f:
_ = f.write(updated)


Expand Down
6 changes: 3 additions & 3 deletions scripts/interactive_docs/__main__.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
from typing_extensions import assert_never
import sys
from pathlib import Path

from typing_extensions import assert_never

from bioimageio.spec import SpecificResourceDescr
from scripts.generate_json_schemas import MAJOR_MINOR_VERSION

from . import generate_docs


html_result: "str | Exception" = generate_docs(
raw_type=SpecificResourceDescr, root_path=["Delivery"]
)
Expand All @@ -21,7 +21,7 @@
)
docs_output_path.parent.mkdir(parents=True, exist_ok=True)
print(f"[INFO] Writing interactive docs to {docs_output_path}")
with open(docs_output_path, "w") as f:
with open(docs_output_path, "w", encoding="utf-8") as f:
_ = f.write(html_result)
print(f"Wrote {_} bytes to {docs_output_path}")
else:
Expand Down
16 changes: 10 additions & 6 deletions scripts/interactive_docs/hint.py
Original file line number Diff line number Diff line change
Expand Up @@ -716,7 +716,11 @@ def do_parse(
return Unrecognized(raw_hint)
type_args: Tuple[Type[Any], Type[Any]] = getattr(raw_hint, "__args__")
key_type = type_args[0]
if key_type != str and key_type != int and key_type != typing.Union[int, str]:
if (
key_type is not str
and key_type is not int
and key_type != typing.Union[int, str]
):
return ParsingError(
f"Mappings with keys that are not ints or strings is not supported yet: {raw_hint}"
)
Expand Down Expand Up @@ -992,15 +996,15 @@ def to_type_widget(

def get_example(self) -> Example:
hint_type = self.hint_type
if hint_type == int:
if hint_type is int:
return Example(123456)
if hint_type == float:
if hint_type is float:
return Example(3.14)
if hint_type == bool:
if hint_type is bool:
return Example(True)
if hint_type == str:
if hint_type is str:
return Example("some free-format string")
if hint_type == type(None):
if hint_type is type(None):
return Example(None)
return Example("--- NO EXAMPLES PROVIDED ---")

Expand Down
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
test_extras := [
"deepdiff",
"filelock", # for session fixtures due to pytest-xdist
"h5py",
"lxml",
"psutil", # parallel pytest with '-n auto'
"pytest-cov",
Expand Down
1 change: 1 addition & 0 deletions tests/test_bioimageio_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ def test_rdf(
"10.5281/zenodo.5764892/1.1", # affable-shark/1.1
"ambitious-sloth/1.2",
"breezy-handbag/1",
"faithful-chicken/1",
"ilastik/ilastik/1",
"uplifting-ice-cream/1",
],
Expand Down

0 comments on commit 33c3673

Please sign in to comment.