Skip to content

Commit

Permalink
Merge pull request #1 from joshmoore/writer
Browse files Browse the repository at this point in the history
Looks good, let me merge this and add tests and such
  • Loading branch information
glyg authored Jan 12, 2021
2 parents 7b9599a + d55edae commit 7c86ddf
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 40 deletions.
12 changes: 1 addition & 11 deletions ome_zarr/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from skimage.segmentation import clear_border

from .scale import Scaler
from .writer import write_multiscale

CHANNEL_DIMENSION = 1

Expand Down Expand Up @@ -92,17 +93,6 @@ def rgb_to_5d(pixels: np.ndarray) -> List:
return video


def write_multiscale(pyramid: List, group: zarr.Group) -> None:
"""Write a pyramid with multiscale metadata to disk."""
paths = []
for path, dataset in enumerate(pyramid):
group.create_dataset(str(path), data=pyramid[path])
paths.append({"path": str(path)})

multiscales = [{"version": "0.1", "datasets": paths}]
group.attrs["multiscales"] = multiscales


def create_zarr(
zarr_directory: str,
method: Callable[..., Tuple[List, List]] = coins,
Expand Down
49 changes: 20 additions & 29 deletions ome_zarr/writer.py
Original file line number Diff line number Diff line change
@@ -1,67 +1,61 @@
"""Image writer utility
"""
import json
import logging
from pathlib import Path
from typing import Any, List, Tuple, Union

import dask.array as da
import numpy as np
import zarr

from .io import parse_url
from .reader import Node
from .types import JSONDict

LOGGER = logging.getLogger("ome_zarr.writer")


def write_multiscale(
pyramid: List, group: zarr.Group, chunks: Union[Tuple[int], int] = None,
) -> None:
"""Write a pyramid with multiscale metadata to disk."""
paths = []
for path, dataset in enumerate(pyramid):
# TODO: chunks here could be different per layer
group.create_dataset(str(path), data=pyramid[path], chunks=chunks)
paths.append({"path": str(path)})

multiscales = [{"version": "0.1", "datasets": paths}]
group.attrs["multiscales"] = multiscales


def write_image(
path: str,
image: np.ndarray,
name: str = "0",
group: str = None,
group: zarr.Group,
chunks: Union[Tuple[int], int] = None,
byte_order: Union[str, List[str]] = "tczyx",
**metadata: JSONDict,
) -> zarr.hierarchy.Group:
) -> None:
"""Writes an image to the zarr store according to ome-zarr specification
Parameters
----------
path: str,
a path to the zarr store location
image: np.ndarray
the image to save
group: str, optional
group: zarr.Group
the group within the zarr store to store the data in
chunks: int or tuple of ints,
size of the saved chunks to store the image
byte_order: str or list of str, default "tczyx"
combination of the letters defining the order
in which the dimensions are saved
Return
------
Zarr Group which contains the image.
"""

zarr_location = parse_url(path, "w")
if zarr_location is None:
raise ValueError

node = Node(zarr=zarr_location, root=[])

if image.ndim > 5:
raise ValueError("Only images of 5D or less are supported")

shape_5d: Tuple[Any, ...] = (*(1,) * (5 - image.ndim), *image.shape)
image = image.reshape(shape_5d)

if chunks is None:
image = da.from_array(image)
else:
if chunks is not None:
_chunks = _retuple(chunks, shape_5d)
image = da.from_array(image, chunks=_chunks)

Expand Down Expand Up @@ -94,11 +88,8 @@ def write_image(
omero["rdefs"] = {"model": "color"}

metadata["omero"] = omero
da.to_zarr(arr=image, url=node.zarr.subpath(name))
with open(Path(node.zarr.subpath(name)) / ".zattrs", "w") as za:
json.dump(metadata, za)

return node
write_multiscale([image], group) # TODO: downsample
group.attrs.update(metadata)


def _retuple(chunks: Union[Tuple[int], int], shape: Tuple[Any, ...]) -> Tuple[int, ...]:
Expand Down

0 comments on commit 7c86ddf

Please sign in to comment.