Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Neuroglancer Precomputed Mesh #8236

Draft
wants to merge 11 commits into
base: master
Choose a base branch
from
7 changes: 7 additions & 0 deletions frontend/javascripts/admin/api/mesh.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ export function getMeshfileChunksForSegment(
// editableMappingTracingId should be the tracing id, not the editable mapping id.
// If this is set, it is assumed that the request is about an editable mapping.
editableMappingTracingId: string | null | undefined,
meshFileType: string | null | undefined,
meshFilePath: string | null | undefined,
): Promise<SegmentInfo> {
return doWithToken((token) => {
const params = new URLSearchParams();
Expand All @@ -60,6 +62,8 @@ export function getMeshfileChunksForSegment(
data: {
meshFile,
segmentId,
meshFileType,
meshFilePath,
},
showErrorToast: false,
},
Expand All @@ -70,10 +74,13 @@ export function getMeshfileChunksForSegment(
type MeshChunkDataRequest = {
byteOffset: number;
byteSize: number;
segmentId: number | null; // Only relevant for neuroglancer precomputed meshes
};

type MeshChunkDataRequestList = {
meshFile: string;
meshFileType: string | null | undefined;
meshFilePath: string | null | undefined;
requests: MeshChunkDataRequest[];
};

Expand Down
16 changes: 13 additions & 3 deletions frontend/javascripts/oxalis/model/sagas/mesh_saga.ts
Original file line number Diff line number Diff line change
Expand Up @@ -846,6 +846,7 @@ function* loadPrecomputedMeshForSegmentId(
scale,
additionalCoordinates,
mergeChunks,
id,
);

try {
Expand Down Expand Up @@ -882,7 +883,7 @@ function* _getChunkLoadingDescriptors(

const { segmentMeshController } = getSceneController();
const version = meshFile.formatVersion;
const { meshFileName } = meshFile;
const { meshFileName, meshFileType, meshFilePath } = meshFile;

const editableMapping = yield* select((state) =>
getEditableMappingForVolumeTracingId(state, segmentationLayer.tracingId),
Expand Down Expand Up @@ -916,6 +917,8 @@ function* _getChunkLoadingDescriptors(
// without a mapping.
meshFile.mappingName == null ? mappingName : null,
editableMapping != null && tracing ? tracing.tracingId : null,
meshFileType,
meshFilePath,
);
scale = [segmentInfo.transform[0][0], segmentInfo.transform[1][1], segmentInfo.transform[2][2]];
segmentInfo.chunks.lods.forEach((chunks, lodIndex) => {
Expand Down Expand Up @@ -951,9 +954,10 @@ function _getLoadChunksTasks(
scale: Vector3 | null,
additionalCoordinates: AdditionalCoordinate[] | null,
mergeChunks: boolean,
segmentId: number,
) {
const { segmentMeshController } = getSceneController();
const { meshFileName } = meshFile;
const { meshFileName, meshFileType, meshFilePath } = meshFile;
const loader = getDracoLoader();
return _.compact(
_.flatten(
Expand Down Expand Up @@ -981,8 +985,14 @@ function _getLoadChunksTasks(
getBaseSegmentationName(segmentationLayer),
{
meshFile: meshFileName,
meshFileType,
meshFilePath,
// Only extract the relevant properties
requests: chunks.map(({ byteOffset, byteSize }) => ({ byteOffset, byteSize })),
requests: chunks.map(({ byteOffset, byteSize }) => ({
byteOffset,
byteSize,
segmentId: segmentId,
})),
},
);

Expand Down
2 changes: 2 additions & 0 deletions frontend/javascripts/types/api_flow_types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -896,6 +896,8 @@ export type APIMeshFile = {
// 1-2 - the format should behave as v0 (refer to voxelytics for actual differences)
// 3 - is the newer version with draco encoding.
formatVersion: number;
meshFileType: string | null | undefined;
meshFilePath: string | null | undefined;
};
export type APIConnectomeFile = {
connectomeFileName: string;
Expand Down
19 changes: 19 additions & 0 deletions test/backend/MurmurHashTestSuite.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
package backend

import com.scalableminds.webknossos.datastore.datareaders.precomputed.MurmurHash3
import org.scalatestplus.play.PlaySpec

class MurmurHashTestSuite extends PlaySpec {

"Murmur hash" should {
"return the correct hash" in {
val keyString = "Hello World!"
val keyBytes = keyString.getBytes
val seed = 0
val expectedHash = -1505357907696379773L
val actualHash = MurmurHash3.hash64(keyBytes, seed)

assert(actualHash == expectedHash)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,11 @@ class DSMeshController @Inject()(
urlOrHeaderToken(token, request)) {
for {
meshFiles <- meshFileService.exploreMeshFiles(organizationId, datasetDirectoryName, dataLayerName)
} yield Ok(Json.toJson(meshFiles))
neuroglancerMeshFiles <- meshFileService.exploreNeuroglancerPrecomputedMeshes(organizationId,
datasetDirectoryName,
dataLayerName)
allMeshFiles = meshFiles ++ neuroglancerMeshFiles
} yield Ok(Json.toJson(allMeshFiles))
}
}

Expand Down Expand Up @@ -70,11 +74,18 @@ class DSMeshController @Inject()(
omitMissing = false,
urlOrHeaderToken(token, request)
)
chunkInfos <- meshFileService.listMeshChunksForSegmentsMerged(organizationId,
datasetDirectoryName,
dataLayerName,
request.body.meshFile,
segmentIds)
chunkInfos <- request.body.meshFileType match {
case Some("neuroglancerPrecomputed") =>
meshFileService.listMeshChunksForNeuroglancerPrecomputedMesh(
request.body.meshFilePath,
request.body.segmentId) // TODO: Pass segmentIds here
case _ =>
meshFileService.listMeshChunksForSegmentsMerged(organizationId,
datasetDirectoryName,
dataLayerName,
request.body.meshFile,
segmentIds)
}
} yield Ok(Json.toJson(chunkInfos))
}
}
Expand All @@ -88,10 +99,12 @@ class DSMeshController @Inject()(
UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)),
urlOrHeaderToken(token, request)) {
for {
(data, encoding) <- meshFileService.readMeshChunk(organizationId,
datasetDirectoryName,
dataLayerName,
request.body) ?~> "mesh.file.loadChunk.failed"
(data, encoding) <- request.body.meshFileType match {
case Some("neuroglancerPrecomputed") =>
meshFileService.readMeshChunkForNeuroglancerPrecomputed(request.body.meshFilePath, request.body.requests)
case _ =>
meshFileService.readMeshChunk(organizationId, datasetDirectoryName, dataLayerName, request.body) ?~> "mesh.file.loadChunk.failed"
}
} yield {
if (encoding.contains("gzip")) {
Ok(data).withHeaders("Content-Encoding" -> "gzip")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
package com.scalableminds.webknossos.datastore.datareaders.precomputed

object MurmurHash3 {

private def fmix(h: Int): Int = {
var hash = h
hash ^= (hash >>> 16)
hash = (hash * 0x85EBCA6B) & 0xFFFFFFFF
hash ^= (hash >>> 13)
hash = (hash * 0xC2B2AE35) & 0xFFFFFFFF
hash ^= (hash >>> 16)
hash
}

private def hash128(key: Array[Byte], seed: Int): BigInt = {
val c1 = 0x239B961B
val c2 = 0xAB0E9789
val c3 = 0x38B34AE5
val c4 = 0xA1E38B93

val length = key.length
val nblocks = length / 16

var h1 = seed
var h2 = seed
var h3 = seed
var h4 = seed

// Process blocks
for (i <- 0 until nblocks) {
val block = key.slice(i * 16, i * 16 + 16)
val k1 = BigInt(block.slice(0, 4).reverse).toInt
val k2 = BigInt(block.slice(4, 8).reverse).toInt
val k3 = BigInt(block.slice(8, 12).reverse).toInt
val k4 = BigInt(block.slice(12, 16).reverse).toInt

h1 ^= Integer.rotateLeft((k1 * c1) & 0xFFFFFFFF, 15) * c2 & 0xFFFFFFFF
h1 = (Integer.rotateLeft(h1, 19) + h2) * 5 + 0x561CCD1B & 0xFFFFFFFF

h2 ^= Integer.rotateLeft((k2 * c2) & 0xFFFFFFFF, 16) * c3 & 0xFFFFFFFF
h2 = (Integer.rotateLeft(h2, 17) + h3) * 5 + 0x0BCAA747 & 0xFFFFFFFF

h3 ^= Integer.rotateLeft((k3 * c3) & 0xFFFFFFFF, 17) * c4 & 0xFFFFFFFF
h3 = (Integer.rotateLeft(h3, 15) + h4) * 5 + 0x96CD1C35 & 0xFFFFFFFF

h4 ^= Integer.rotateLeft((k4 * c4) & 0xFFFFFFFF, 18) * c1 & 0xFFFFFFFF
h4 = (Integer.rotateLeft(h4, 13) + h1) * 5 + 0x32AC3B17 & 0xFFFFFFFF
}

// Tail
val tail = key.slice(nblocks * 16, length)
var k1, k2, k3, k4 = 0

tail.zipWithIndex.foreach {
case (byte, i) =>
val shift = (i % 4) * 8
i / 4 match {
case 0 => k1 |= (byte & 0xFF) << shift
case 1 => k2 |= (byte & 0xFF) << shift
case 2 => k3 |= (byte & 0xFF) << shift
case 3 => k4 |= (byte & 0xFF) << shift
}
}

if (tail.length > 0) {
k1 = (k1 * c1) & 0xFFFFFFFF
k1 = Integer.rotateLeft(k1, 15) * c2 & 0xFFFFFFFF
h1 ^= k1
}

if (tail.length > 4) {
k2 = (k2 * c2) & 0xFFFFFFFF
k2 = Integer.rotateLeft(k2, 16) * c3 & 0xFFFFFFFF
h2 ^= k2
}

if (tail.length > 8) {
k3 = (k3 * c3) & 0xFFFFFFFF
k3 = Integer.rotateLeft(k3, 17) * c4 & 0xFFFFFFFF
h3 ^= k3
}

if (tail.length > 12) {
k4 = (k4 * c4) & 0xFFFFFFFF
k4 = Integer.rotateLeft(k4, 18) * c1 & 0xFFFFFFFF
h4 ^= k4
}

// Finalization
h1 ^= length
h2 ^= length
h3 ^= length
h4 ^= length

h1 = (h1 + h2 + h3 + h4) & 0xFFFFFFFF
h2 = (h1 + h2) & 0xFFFFFFFF
h3 = (h1 + h3) & 0xFFFFFFFF
h4 = (h1 + h4) & 0xFFFFFFFF

h1 = fmix(h1)
h2 = fmix(h2)
h3 = fmix(h3)
h4 = fmix(h4)

h1 = (h1 + h2 + h3 + h4) & 0xFFFFFFFF
h2 = (h1 + h2) & 0xFFFFFFFF
h3 = (h1 + h3) & 0xFFFFFFFF
h4 = (h1 + h4) & 0xFFFFFFFF

BigInt(h4) << 96 | BigInt(h3) << 64 | BigInt(h2) << 32 | BigInt(h1)
}

def hash64(key: Array[Byte], seed: Int = 0): Long = {
val hash128 = MurmurHash3.hash128(key, seed)
val low = (hash128 & BigInt("FFFFFFFFFFFFFFFF", 16)).toLong
low
}
}
Loading