Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Channel type #65

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 7 additions & 4 deletions batch/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from rest_framework import serializers

from . import models
import cloud_ilastik.datasets.models as datasets_models
from cloud_ilastik import datasets
from cloud_ilastik.datasets import neuroglancer as ng


Expand Down Expand Up @@ -32,7 +32,7 @@ class Meta:

class ResultSerializer(serializers.ModelSerializer):
class Meta:
model = datasets_models.Dataset
model = datasets.models.Dataset
fields = ["neuroglancer_url"]


Expand Down Expand Up @@ -61,7 +61,7 @@ class Meta:
class BatchJob(serializers.Serializer):
project = serializers.PrimaryKeyRelatedField(queryset=models.Project.objects.all(), allow_null=False)
datasets = serializers.PrimaryKeyRelatedField(
many=True, queryset=datasets_models.Dataset.objects.all(), allow_null=False
many=True, queryset=datasets.models.Dataset.objects.all(), allow_null=False
)

class Meta:
Expand All @@ -72,7 +72,10 @@ class JobUpdate(serializers.Serializer):
status = serializers.ChoiceField(choices=[models.JobStatus.done.value, models.JobStatus.failed.value])
result_url = serializers.URLField()
name = serializers.CharField()
dtype = serializers.ChoiceField(choices=datasets_models.DType.values())
dtype = serializers.ChoiceField(choices=datasets.models.DType.values())
channel_type = serializers.ChoiceField(
choices=datasets.types.ChannelType.values(), default=datasets.types.ChannelType.Intensity.value, required=False
)
size_t = serializers.IntegerField(default=1)
size_z = serializers.IntegerField(default=1)
size_y = serializers.IntegerField()
Expand Down
3 changes: 2 additions & 1 deletion batch/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def post(self, request, *args, **kwargs):
prefix = "dataset-"
# Is it an error if there are no datasets, or some datasets in the request are not in the compatible set?
datasets = self._compatible_datasets.filter(
id__in=(int(k[len(prefix):]) for k in self.request.POST if k.startswith(prefix))
id__in=(int(k[len(prefix) :]) for k in self.request.POST if k.startswith(prefix))
)
except (KeyError, ValueError):
return HttpResponse(status=400)
Expand Down Expand Up @@ -145,6 +145,7 @@ def update(self, request, external_id: str):
"name": serializer.data["name"],
"url": serializer.data["result_url"],
"dtype": serializer.data["dtype"],
"channel_type": serializer.data["channel_type"],
**{k: v for k, v in serializer.data.items() if k.startswith("size_")},
"job": job,
}
Expand Down
2 changes: 1 addition & 1 deletion cloud_ilastik/datasets/apps.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@


class DatasetsConfig(AppConfig):
name = 'cloud_ilastik.datasets'
name = "cloud_ilastik.datasets"
20 changes: 20 additions & 0 deletions cloud_ilastik/datasets/migrations/0006_dataset_channel_type.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# Generated by Django 2.2.9 on 2020-03-25 13:35

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
("datasets", "0005_auto_20200117_1549"),
]

operations = [
migrations.AddField(
model_name="dataset",
name="channel_type",
field=models.CharField(
choices=[("Intensity", "intensity"), ("IndexedColor", "indexed")], default="intensity", max_length=15
),
),
]
34 changes: 18 additions & 16 deletions cloud_ilastik/datasets/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,25 +7,24 @@
import files.models as files_models

from . import neuroglancer as ng
from . import types

TAR_URL_RE = re.compile("/data$")


class DType(str, enum.Enum):
def _generate_next_value_(name, start, count, last_values):
return name

uint8 = enum.auto()
uint16 = enum.auto()
uint32 = enum.auto()
uint64 = enum.auto()
int8 = enum.auto()
int16 = enum.auto()
int32 = enum.auto()
int64 = enum.auto()
float16 = enum.auto()
float32 = enum.auto()
float64 = enum.auto()
@enum.unique
class DType(enum.Enum):
uint8 = "uint8"
uint16 = "uint16"
uint32 = "uint32"
uint64 = "uint64"
int8 = "int8"
int16 = "int16"
int32 = "int32"
int64 = "int64"
float16 = "float16"
float32 = "float32"
float64 = "float64"

@classmethod
def choices(cls):
Expand All @@ -40,6 +39,9 @@ class Dataset(models.Model):
name = models.CharField(max_length=255)
url = models.URLField()
dtype = models.CharField(max_length=15, choices=DType.choices())
channel_type = models.CharField(
max_length=15, choices=types.ChannelType.choices(), default=types.ChannelType.Intensity.value
)
size_t = models.PositiveIntegerField(default=1)
size_z = models.PositiveIntegerField(default=1)
size_y = models.PositiveIntegerField()
Expand Down Expand Up @@ -72,7 +74,7 @@ def as_viewer_layer(self):
mode = ng.ColorMode.RGB
else:
mode = ng.ColorMode.ILASTIK
return ng.Layer(self.url, self.size_c, color_mode=mode, role="data")
return ng.Layer(self.url, self.size_c, color_mode=mode, channel_type=self.channel_type, role="data")

@property
def neuroglancer_url(self):
Expand Down
59 changes: 41 additions & 18 deletions cloud_ilastik/datasets/neuroglancer.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,36 +6,34 @@

from django.conf import settings

__all__ = ["viewer_url"]

from . import types

@enum.unique
class ColorMode(enum.Enum):
ILASTIK = "ilastik"
RGB = "rgb"
GRAYSCALE = "grayscale"
__all__ = ["viewer_url"]


class Layer:
url: str
num_channels: int
role: str
selected: bool
color_mode: ColorMode
color_table: types.ColorTable
channel_type: types.ChannelType

def __init__(
self,
url: str,
num_channels: int,
role: str = "data",
selected: bool = False,
color_mode: ColorMode = ColorMode.RGB,
color_table: types.ColorTable = types.ColorTable.RGB,
channel_type: types.ChannelType = types.ChannelType.Intensity,
):
self.url = url
self.num_channels = num_channels
self.role = role
self.selected = selected
self.color_mode = color_mode
self.color_table = color_table
self.channel_type = channel_type


class _Color:
Expand Down Expand Up @@ -71,24 +69,24 @@ def as_normalized_vec3(self) -> str:
return f"vec3({self.r}, {self.g}, {self.b})"

@classmethod
def get_colors(cls, num_colors: int, mode: ColorMode) -> List["Color"]:
def get_colors(cls, num_colors: int, table: types.ColorTable) -> List["Color"]:
color_table = {
ColorMode.GRAYSCALE: cls.COLORS_GRAYSCALE,
ColorMode.RGB: cls.COLORS_RGB,
ColorMode.ILASTIK: cls.COLORS_ILASTIK,
}[mode]
types.ColorTable.GRAYSCALE: cls.COLORS_GRAYSCALE,
types.ColorTable.RGB: cls.COLORS_RGB,
types.ColorTable.ILASTIK: cls.COLORS_ILASTIK,
}[table]

return [cls(*rgb) for rgb in color_table[:num_colors]]

def __repr__(self):
return f"<Color ({self.r},{self.g},{self.b})>"


def _create_fragment_shader(channel_colors: List[_Color]):
def _create_intensity_fragment_shader(colors):
color_lines: List[str] = []
colors_to_mix: List[str] = []

for idx, color in enumerate(channel_colors):
for idx, color in enumerate(colors):
color_line = f"vec3 color{idx} = ({color.as_normalized_vec3()} / 255.0) * toNormalized(getDataValue({idx}));"
color_lines.append(color_line)
colors_to_mix.append(f"color{idx}")
Expand All @@ -104,21 +102,46 @@ def _create_fragment_shader(channel_colors: List[_Color]):
return "\n".join(shader_lines)


def _create_indexed_color_fragment_shader(colors):
color_lines: List[str] = ["vec4(0.0, 0.0, 0.0, 0.0)"]

for color in colors:
color_lines.append(f"vec4({color.r / 255.0}, {color.g / 255.0}, {color.b / 255.0}, 1.0)")

return f"""vec4 COLOR_MASKS[{len(color_lines)}] = vec4[](
{",".join(color_lines)}
);
void main() {{
uint val = toRaw(getDataValue());
emitRGB(COLOR_MASKS[val]);
}}"""


def _create_fragment_shader(colors: List[_Color], channel_type: types.ChannelType):
if channel_type == types.ChannelType.Intensity:
return _create_intensity_fragment_shader(colors)
elif channel_type == types.ChannelType.IndexedColor:
return _create_indexed_color_fragment_shader(colors)
else:
raise Exception(f"Unknown channel type {channel_type}")


def viewer_url(layers: List[Layer], show_control_panel=False) -> str:
ng_url = "https://web.ilastik.org/viewer/#!"
ng_layers = []
selected_layer = None

for layer in layers:
data_url = layer.url.replace(settings.SWIFT_PREFIX, "https://web.ilastik.org/data/")
colors = _Color.get_colors(layer.color_table)
ng_layers.append(
{
"type": "image",
"source": {"url": f"n5://{data_url}"},
"tab": "source",
"blend": "default",
"name": layer.role,
"shader": _create_fragment_shader(_Color.get_colors(layer.num_channels, layer.color_mode)),
"shader": _create_fragment_shader(colors, layer.channel_type),
}
)

Expand Down
22 changes: 22 additions & 0 deletions cloud_ilastik/datasets/types.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import enum


@enum.unique
class ChannelType(enum.Enum):
Intensity = "intensity"
IndexedColor = "indexed"

@classmethod
def choices(cls):
return tuple((item.name, item.value) for item in cls)

@classmethod
def values(cls):
return tuple(item.value for item in cls)


@enum.unique
class ColorTable(enum.Enum):
ILASTIK = "ilastik"
RGB = "rgb"
GRAYSCALE = "grayscale"
6 changes: 3 additions & 3 deletions cloud_ilastik/datasets/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@

from . import views

app_name = 'datasets'
app_name = "datasets"
urlpatterns = [
path('', views.ListView.as_view(), name='list'),
path('<int:pk>/', views.DetailView.as_view(), name='detail'),
path("", views.ListView.as_view(), name="list"),
path("<int:pk>/", views.DetailView.as_view(), name="detail"),
]
32 changes: 23 additions & 9 deletions hpc/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from collections.abc import Mapping, Iterable

from hpc.openstack_environment import OpenstackEnvironment
from cloud_ilastik.datasets.types import ChannelType

def dict_to_json_data(dictionary, strip_nones=True):
out_dict = {}
Expand Down Expand Up @@ -159,6 +160,7 @@ def __init__(
ILASTIK_PROJECT_FILE: Path,
ILASTIK_JOB_RESULT_ENDPOINT: str,
ILASTIK_EXPORT_SOURCE: str,
ILASTIK_RESULT_CHANNEL_TYPE: ChannelType,
ILASTIK_BLOCK_SIZE: int = 1024,
):
self.hpc_environment = hpc_environment or HpcEnvironment()
Expand All @@ -171,6 +173,7 @@ def __init__(
"ILASTIK_PROJECT_FILE": ILASTIK_PROJECT_FILE.name,
"ILASTIK_JOB_RESULT_ENDPOINT": ILASTIK_JOB_RESULT_ENDPOINT,
"ILASTIK_EXPORT_SOURCE": ILASTIK_EXPORT_SOURCE,
"ILASTIK_RESULT_CHANNEL_TYPE": ILASTIK_RESULT_CHANNEL_TYPE.value,
"ILASTIK_BLOCK_SIZE": ILASTIK_BLOCK_SIZE,
"HPC_PATH_PREFIX": self.hpc_environment.HPC_PATH_PREFIX,
**to_json_data(openstack_environment)
Expand All @@ -188,32 +191,43 @@ def run(self):
site = self.hpc_environment._get_site()
return site.new_job(job_description=self.raw(), inputs=self.inputs)

class WorkflowOutput:
def __init__(self, export_source: str, channel_type: ChannelType):
self.export_source = export_source
self.channel_type = channel_type

class PixelClassificationJobSpec(IlastikJobSpec):
class ExportSource(enum.Enum):
PROBABILITIES = "Probabilities"
class OutputType(enum.Enum):
PROBABILITIES = WorkflowOutput(export_source="Probabilities", channel_type=ChannelType.Intensity)

def __init__(
self,
*,
ILASTIK_EXPORT_SOURCE: ExportSource = ExportSource.PROBABILITIES,
output_type: OutputType = OutputType.PROBABILITIES,
**job_spec_kwargs
):
super().__init__(ILASTIK_EXPORT_SOURCE=ILASTIK_EXPORT_SOURCE.value, **job_spec_kwargs)
super().__init__(
ILASTIK_EXPORT_SOURCE=output_type.value.export_source,
ILASTIK_RESULT_CHANNEL_TYPE=output_type.value.channel_type,
**job_spec_kwargs
)


class ObjectClassificationJobSpec(IlastikJobSpec):
_PREDICTION_MAPS_FILE_NAME = ""
class ExportSource(enum.Enum):
OBJECT_PREDICTIONS = "Object Predictions"
class OutputType(enum.Enum):
OBJECT_PREDICTIONS = WorkflowOutput(export_source="Object Predictions", channel_type=ChannelType.IndexedColor)

def __init__(
self,
*,
ILASTIK_PREDICTION_MAPS: str,
ILASTIK_EXPORT_SOURCE: ExportSource = ExportSource.OBJECT_PREDICTIONS,
output_type: OutputType = OutputType.OBJECT_PREDICTIONS,
**job_spec_kwargs
):
super().__init__(ILASTIK_EXPORT_SOURCE=ILASTIK_EXPORT_SOURCE.value, **job_spec_kwargs)
super().__init__(
ILASTIK_EXPORT_SOURCE=output_type.value.export_source,
ILASTIK_RESULT_CHANNEL_TYPE=output_type.value.channel_type,
**job_spec_kwargs)
self.Executable = "./run_obj_classification.sh"
self.Environment["ILASTIK_PREDICTION_MAPS"] = ILASTIK_PREDICTION_MAPS

3 changes: 2 additions & 1 deletion hpc/remote_scripts/run_ilastik.sh
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ ILASTIK_RAW_DATA="${ILASTIK_RAW_DATA}"
ILASTIK_PROJECT_FILE="${ILASTIK_PROJECT_FILE}"
ILASTIK_JOB_RESULT_ENDPOINT="${ILASTIK_JOB_RESULT_ENDPOINT}"
ILASTIK_EXPORT_SOURCE="${ILASTIK_EXPORT_SOURCE}"
ILASTIK_RESULT_CHANNEL_TYPE="${ILASTIK_RESULT_CHANNEL_TYPE}"
ILASTIK_BLOCK_SIZE="${ILASTIK_BLOCK_SIZE:-1024}"
ILASTIK_EXTRA_OPTIONS="${ILASTIK_EXTRA_OPTIONS:-}"

Expand All @@ -36,4 +37,4 @@ srun --ntasks 1 swift upload "$OUTPUT_BUCKET_NAME" "$OUT_FILE_NAME"

srun --ntasks 1 python -u "$SCRIPTS_DIR/update_status.py" \
"${ILASTIK_JOB_RESULT_ENDPOINT}" "${JOB_ID}" \
--output "${OUT_FILE_NAME}" --bucket "${OUTPUT_BUCKET_NAME}"
--output "${OUT_FILE_NAME}" --bucket "${OUTPUT_BUCKET_NAME}" --channel-type "${ILASTIK_RESULT_CHANNEL_TYPE}"
Loading