diff --git a/package.json b/package.json index 9a9fe3a88..b150a5f91 100644 --- a/package.json +++ b/package.json @@ -8,7 +8,7 @@ "url": "git+https://github.com/google/neuroglancer.git" }, "engines": { - "node": ">=20.10 <21 || >=21.2" + "node": ">=20.11 <21 || >=21.2" }, "browserslist": [ "last 2 Chrome versions", @@ -41,8 +41,10 @@ }, "devDependencies": { "@types/codemirror": "5.60.15", + "@types/dotenv": "^6.1.1", "@types/gl-matrix": "^2.4.5", "@types/lodash-es": "^4.17.12", + "@types/mathjs": "^9.4.1", "@types/node": "^20.14.12", "@types/pako": "^2.0.3", "@types/yargs": "^17.0.32", @@ -53,7 +55,6 @@ "css-loader": "^7.1.2", "esbuild": "^0.23.0", "esbuild-loader": "^4.2.2", - "eslint": "^8.56.0", "eslint-formatter-codeframe": "^7.32.1", "eslint-import-resolver-typescript": "^3.6.1", "eslint-interactive": "^10.8.0", @@ -77,11 +78,16 @@ "webpack-merge": "^6.0.1" }, "dependencies": { + "axios": "^1.7.7", + "browserify-fs": "^1.0.0", + "buffer": "^6.0.3", "codemirror": "^5.61.1", + "dotenv": "^16.4.5", "gl-matrix": "3.1.0", "glsl-editor": "^1.0.0", "ikonate": "github:mikolajdobrucki/ikonate#a86b4107c6ec717e7877f880a930d1ccf0b59d89", "lodash-es": "^4.17.21", + "mathjs": "^13.2.0", "nifti-reader-js": "^0.6.8", "numcodecs": "^0.3.2", "pako": "^2.1.0" @@ -96,8 +102,8 @@ "type": "module", "exports": { ".": "./src/main_module.ts", - "./unstable/*.js": "./src/*.ts", - "./unstable/*": "./src/*" + "./*.js": "./src/*.ts", + "./*": "./src/*" }, "imports": { "#src/third_party/jpgjs/jpg.js": "./src/third_party/jpgjs/jpg.js", @@ -335,6 +341,24 @@ "neuroglancer/datasource/render:disabled": "./src/datasource/render/register_default.ts", "default": "./src/datasource/render/register_default.ts" }, + "#datasource/trk/backend": { + "neuroglancer/datasource/trk:enabled": "./src/datasource/trk/backend.ts", + "neuroglancer/datasource:none_by_default": "./src/util/false.ts", + "neuroglancer/datasource/trk:disabled": "./src/datasource/trk/backend.ts", + "default": "./src/datasource/trk/backend.ts" + }, + "#datasource/trk/async_computation": { + "neuroglancer/datasource/trk:enabled": "./src/datasource/trk/async_computation.ts", + "neuroglancer/datasource:none_by_default": "./src/util/false.ts", + "neuroglancer/datasource/trk:disabled": "./src/datasource/trk/async_computation.ts", + "default": "./src/datasource/trk/async_computation.ts" + }, + "#datasource/trk/register_default": { + "neuroglancer/datasource/trk:enabled": "./src/datasource/trk/register_default.ts", + "neuroglancer/datasource:none_by_default": "./src/util/false.ts", + "neuroglancer/datasource/trk:disabled": "./src/datasource/trk/register_default.ts", + "default": "./src/datasource/trk/register_default.ts" + }, "#datasource/vtk/backend": { "neuroglancer/datasource/vtk:enabled": "./src/datasource/vtk/backend.ts", "neuroglancer/datasource:none_by_default": "./src/util/false.ts", diff --git a/src/datasource/enabled_async_computation_modules.ts b/src/datasource/enabled_async_computation_modules.ts index 94f0d6043..6b5b2e5a4 100644 --- a/src/datasource/enabled_async_computation_modules.ts +++ b/src/datasource/enabled_async_computation_modules.ts @@ -9,5 +9,6 @@ import "#datasource/nifti/async_computation"; import "#datasource/obj/async_computation"; import "#datasource/precomputed/async_computation"; import "#datasource/render/async_computation"; +import "#datasource/trk/async_computation"; import "#datasource/vtk/async_computation"; import "#datasource/zarr/async_computation"; diff --git a/src/datasource/enabled_backend_modules.ts b/src/datasource/enabled_backend_modules.ts index 926dea0f2..20b5a82f9 100644 --- a/src/datasource/enabled_backend_modules.ts +++ b/src/datasource/enabled_backend_modules.ts @@ -11,5 +11,6 @@ import "#datasource/obj/backend"; import "#datasource/precomputed/backend"; import "#datasource/python/backend"; import "#datasource/render/backend"; +import "#datasource/trk/backend"; import "#datasource/vtk/backend"; import "#datasource/zarr/backend"; diff --git a/src/datasource/enabled_frontend_modules.ts b/src/datasource/enabled_frontend_modules.ts index 96b934b7b..b7778d92c 100644 --- a/src/datasource/enabled_frontend_modules.ts +++ b/src/datasource/enabled_frontend_modules.ts @@ -15,5 +15,6 @@ import "#datasource/nifti/register_default"; import "#datasource/obj/register_default"; import "#datasource/precomputed/register_default"; import "#datasource/render/register_default"; +import "#datasource/trk/register_default"; import "#datasource/vtk/register_default"; import "#datasource/zarr/register_default"; diff --git a/src/datasource/trk/async_computation.ts b/src/datasource/trk/async_computation.ts new file mode 100644 index 000000000..e69de29bb diff --git a/src/datasource/trk/backend.ts b/src/datasource/trk/backend.ts new file mode 100644 index 000000000..5a1a587ca --- /dev/null +++ b/src/datasource/trk/backend.ts @@ -0,0 +1,48 @@ +/** + * @license + * Copyright 2016 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +import { WithParameters } from "#src/chunk_manager/backend.js"; +import { WithSharedCredentialsProviderCounterpart } from "#src/credentials_provider/shared_counterpart.js"; +import { + SkeletonSourceParameters, +} from "#src/datasource/trk/base.js"; +import type { SkeletonChunk } from "#src/skeleton/backend.js"; +import { SkeletonSource } from "#src/skeleton/backend.js"; +import { decodeSkeletonChunk } from "#src/skeleton/decode_precomputed_skeleton.js"; +import type { + SpecialProtocolCredentials, +} from "#src/util/special_protocol_request.js"; +import { registerSharedObject } from "#src/worker_rpc.js"; + + +console.log(import.meta.url); + +@registerSharedObject() +export class trkSkeletonSource extends WithParameters( + WithSharedCredentialsProviderCounterpart()( + SkeletonSource, + ), + SkeletonSourceParameters, +) { + async download(chunk: SkeletonChunk, + ) { + const { parameters } = this; + + decodeSkeletonChunk(chunk, parameters.skeletonBuffer, parameters.metadata.vertexAttributes); + } +} + diff --git a/src/datasource/trk/base.ts b/src/datasource/trk/base.ts new file mode 100644 index 000000000..5f7446e12 --- /dev/null +++ b/src/datasource/trk/base.ts @@ -0,0 +1,41 @@ +/** + * @license + * Copyright 2016 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { VertexAttributeInfo } from "#src/skeleton/base.js"; +import type { mat4 } from "#src/util/geom.js"; + + +export enum DataEncoding { + RAW = 0, + GZIP = 1, +} + +export enum ShardingHashFunction { + IDENTITY = 0, + MURMURHASH3_X86_128 = 1, +} + +export interface SkeletonMetadata { + transform: mat4; + vertexAttributes: Map; +} + +export class SkeletonSourceParameters { + url: string; + metadata: SkeletonMetadata; + skeletonBuffer: ArrayBuffer; + static RPC_ID = "trk/SkeletonSource"; +} \ No newline at end of file diff --git a/src/datasource/trk/frontend.ts b/src/datasource/trk/frontend.ts new file mode 100644 index 000000000..b4d4fe9f3 --- /dev/null +++ b/src/datasource/trk/frontend.ts @@ -0,0 +1,570 @@ +/** + * @license + * Copyright 2016 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +import type { ChunkManager } from "#src/chunk_manager/frontend.js"; +import { WithParameters } from "#src/chunk_manager/frontend.js"; +import { + emptyValidCoordinateSpace, + makeCoordinateSpace, + makeIdentityTransform, +} from "#src/coordinate_transform.js"; +import { WithCredentialsProvider } from "#src/credentials_provider/chunk_source_frontend.js"; +import type { + CompleteUrlOptions, + DataSource, + DataSubsourceEntry, + GetDataSourceOptions, +} from "#src/datasource/index.js"; +import { DataSourceProvider, RedirectError } from "#src/datasource/index.js"; +import type { + SkeletonMetadata, +} from "#src/datasource/trk/base.js"; +import { + SkeletonSourceParameters, +} from "#src/datasource/trk/base.js"; +import { TrackProcessor } from "#src/datasource/trk/reader/trackProcessor.js"; +import type { + InlineSegmentProperty, + InlineSegmentPropertyMap, +} from "#src/segmentation_display_state/property_map.js"; +import { + normalizeInlineSegmentPropertyMap, + SegmentPropertyMap, +} from "#src/segmentation_display_state/property_map.js"; +import type { VertexAttributeInfo } from "#src/skeleton/base.js"; +import { SkeletonSource } from "#src/skeleton/frontend.js"; + +import { DATA_TYPE_ARRAY_CONSTRUCTOR, DataType } from "#src/util/data_type.js"; +import type { Borrowed } from "#src/util/disposable.js"; +import { mat4 } from "#src/util/geom.js"; +import { completeHttpPath } from "#src/util/http_path_completion.js"; +import { + parseArray, + parseFixedLengthArray, + parseQueryStringParameters, + unparseQueryStringParameters, + verifyEnumString, + verifyFiniteFloat, + verifyObject, + verifyObjectProperty, + verifyOptionalObjectProperty, + verifyOptionalString, + verifyPositiveInt, + verifyString, + verifyStringArray, +} from "#src/util/json.js"; +import type { + SpecialProtocolCredentials, + SpecialProtocolCredentialsProvider, +} from "#src/util/special_protocol_request.js"; +import { + parseSpecialUrl, +} from "#src/util/special_protocol_request.js"; +import { Uint64 } from "#src/util/uint64.js"; + + +class trkSkeletonSource extends WithParameters( + WithCredentialsProvider()(SkeletonSource), + SkeletonSourceParameters, +) { + get skeletonVertexCoordinatesInVoxels() { + return false; + } + get vertexAttributes() { + return this.parameters.metadata.vertexAttributes; + } +} + +function parseTransform(data: any): mat4 { + return verifyObjectProperty(data, "transform", (value) => { + const transform = mat4.create(); + if (value !== undefined) { + parseFixedLengthArray( + transform.subarray(0, 12), + value, + verifyFiniteFloat, + ); + } + mat4.transpose(transform, transform); + return transform; + }); +} + +interface ParsedSkeletonMetadata { + metadata: SkeletonMetadata; + segmentPropertyMap: string | undefined; +} + +function parseSkeletonMetadata(data: any): ParsedSkeletonMetadata { + verifyObject(data); + const t = verifyObjectProperty(data, "@type", verifyString); + if (t !== "neuroglancer_skeletons") { + throw new Error(`Unsupported skeleton type: ${JSON.stringify(t)}`); + } + const transform = parseTransform(data); + const vertexAttributes = new Map(); + verifyObjectProperty(data, "vertex_attributes", (attributes) => { + if (attributes === undefined) return; + parseArray(attributes, (attributeData) => { + verifyObject(attributeData); + const id = verifyObjectProperty(attributeData, "id", verifyString); + if (id === "") throw new Error("vertex attribute id must not be empty"); + if (vertexAttributes.has(id)) { + throw new Error(`duplicate vertex attribute id ${JSON.stringify(id)}`); + } + const dataType = verifyObjectProperty(attributeData, "data_type", (y) => + verifyEnumString(y, DataType), + ); + const numComponents = verifyObjectProperty( + attributeData, + "num_components", + verifyPositiveInt, + ); + vertexAttributes.set(id, { dataType, numComponents }); + }); + }); + const segmentPropertyMap = verifyObjectProperty( + data, + "segment_properties", + verifyOptionalString, + ); + return { + metadata: { + transform, vertexAttributes, + } as SkeletonMetadata, + segmentPropertyMap, + }; +} + +async function getSkeletonMetadata(): Promise { + const metadata = await getMetadata(); + console.log(metadata) + return parseSkeletonMetadata(metadata); +} + +function getDefaultCoordinateSpace() { + return makeCoordinateSpace({ + names: ["x", "y", "z"], + units: ["m", "m", "m"], + scales: Float64Array.of(1e-9, 1e-9, 1e-9), + }); +} + +async function getSkeletonSource( + chunkManager: ChunkManager, + credentialsProvider: SpecialProtocolCredentialsProvider, + url: string, +) { + + const skeletonBuffer = await getSkeletonBuffer(url); + const { metadata, segmentPropertyMap } = await getSkeletonMetadata(); + + return { + source: chunkManager.getChunkSource(trkSkeletonSource, { + credentialsProvider, + parameters: { + url, + metadata, + skeletonBuffer + }, + }), + transform: metadata.transform, + segmentPropertyMap, + }; +} + +let globalHeader: any = null; + +function getMetadata() { + // Start with the default vertex attributes + const vertexAttributes = [ + { + "id": "orientation", + "data_type": "float32", + "num_components": 3 + } + ]; + + // Check if globalHeader, globalHeader_n_scalar, and scalar_name are present + if (globalHeader && globalHeader.n_scalars && globalHeader.scalar_name) { + for (let i = 0; i < globalHeader.n_scalars; i++) { + const scalarName = globalHeader.scalar_name[i]; + if (scalarName && scalarName !== '') { // Ensure scalarName is valid and not empty + vertexAttributes.push({ + "id": scalarName, // Use the scalar name as the ID + "data_type": "float32", // Assuming the scalar data type is float32 + "num_components": 1 // Each scalar is a single component + }); + } + } + } + + return { + "@type": "neuroglancer_skeletons", + "vertex_attributes": vertexAttributes, + "segment_properties": "prop" + }; +} + + +function getPropMetadata() { + return { + "@type": "neuroglancer_segment_properties", + "inline": { + "ids": [ + "1" + ], + "properties": [ + { + "id": "label", + "type": "label", + "values": [ + "1" + ] + } + ] + } + }; +} + +const n_tracks = 1000; +async function getSkeletonBuffer(url: string): Promise { + const trackProcessor = new TrackProcessor(); + await trackProcessor.streamAndProcessHeader(url, 0, 999); + + if (!trackProcessor.globalHeader) { + console.error('Error: Failed to fetch or process the TRK header.'); + return new ArrayBuffer(0); + } + + // Set globalHeader and process tracks + globalHeader = trackProcessor.globalHeader; + console.log(globalHeader); + + const totalTracks = globalHeader?.n_count; + if (totalTracks !== undefined) { + const randomTrackNumbers = trackProcessor.getRandomTrackIndices(totalTracks, n_tracks); + + // Process track data and get the skeleton data in arrayBuffer format + const skeleton = await trackProcessor.processTrackData(randomTrackNumbers, 1, url); + return skeleton.arrayBuffer || new ArrayBuffer(0); // Resolves only after processing all tracks + } else { + console.error("totalTracks is undefined. Cannot proceed."); + return new ArrayBuffer(0); + } +} + + +async function getSkeletonsDataSource( + options: GetDataSourceOptions, + credentialsProvider: SpecialProtocolCredentialsProvider, + url: string, +): Promise { + const { + source: skeletons, + transform, + segmentPropertyMap, + } = await getSkeletonSource(options.chunkManager, credentialsProvider, url); + const subsources: DataSubsourceEntry[] = [ + { + id: "default", + default: true, + subsource: { mesh: skeletons }, + subsourceToModelSubspaceTransform: transform, + }, + ]; + if (segmentPropertyMap !== undefined) { + const metadata = await getPropMetadata(); + const segmentPropertyMapData = getSegmentPropertyMap( + options.chunkManager, + credentialsProvider, + metadata, + ); + subsources.push({ + id: "properties", + default: true, + subsource: { segmentPropertyMap: segmentPropertyMapData }, + }); + } + return { + modelTransform: makeIdentityTransform(getDefaultCoordinateSpace()), + subsources, + }; +} + +function parseInlinePropertyMap(data: unknown): InlineSegmentPropertyMap { + verifyObject(data); + const tempUint64 = new Uint64(); + const ids = verifyObjectProperty(data, "ids", (idsObj) => { + idsObj = verifyStringArray(idsObj); + const numIds = idsObj.length; + const ids = new Uint32Array(numIds * 2); + for (let i = 0; i < numIds; ++i) { + if (!tempUint64.tryParseString(idsObj[i])) { + throw new Error(`Invalid uint64 id: ${JSON.stringify(idsObj[i])}`); + } + ids[2 * i] = tempUint64.low; + ids[2 * i + 1] = tempUint64.high; + } + return ids; + }); + const numIds = ids.length / 2; + const properties = verifyObjectProperty(data, "properties", (propertiesObj) => + parseArray(propertiesObj, (propertyObj): InlineSegmentProperty => { + verifyObject(propertyObj); + const id = verifyObjectProperty(propertyObj, "id", verifyString); + const description = verifyOptionalObjectProperty( + propertyObj, + "description", + verifyString, + ); + const type = verifyObjectProperty(propertyObj, "type", (type) => { + if ( + type !== "label" && + type !== "description" && + type !== "string" && + type !== "tags" && + type !== "number" + ) { + throw new Error(`Invalid property type: ${JSON.stringify(type)}`); + } + return type; + }); + if (type === "tags") { + const tags = verifyObjectProperty( + propertyObj, + "tags", + verifyStringArray, + ); + let tagDescriptions = verifyOptionalObjectProperty( + propertyObj, + "tag_descriptions", + verifyStringArray, + ); + if (tagDescriptions === undefined) { + tagDescriptions = new Array(tags.length); + tagDescriptions.fill(""); + } else { + if (tagDescriptions.length !== tags.length) { + throw new Error( + `Expected tag_descriptions to have length: ${tags.length}`, + ); + } + } + const values = verifyObjectProperty( + propertyObj, + "values", + (valuesObj) => { + if (!Array.isArray(valuesObj) || valuesObj.length !== numIds) { + throw new Error( + `Expected ${numIds} values, but received: ${valuesObj.length}`, + ); + } + return valuesObj.map((tagIndices) => { + return String.fromCharCode(...tagIndices); + }); + }, + ); + return { id, description, type, tags, tagDescriptions, values }; + } + if (type === "number") { + const dataType = verifyObjectProperty(propertyObj, "data_type", (x) => + verifyEnumString(x, DataType), + ); + if (dataType === DataType.UINT64) { + throw new Error("uint64 properties not supported"); + } + const values = verifyObjectProperty( + propertyObj, + "values", + (valuesObj) => { + if (!Array.isArray(valuesObj) || valuesObj.length !== numIds) { + throw new Error( + `Expected ${numIds} values, but received: ${valuesObj.length}`, + ); + } + return DATA_TYPE_ARRAY_CONSTRUCTOR[dataType].from(valuesObj); + }, + ); + let min = Infinity; + let max = -Infinity; + for (let i = values.length - 1; i >= 0; --i) { + const v = values[i]; + if (v < min) min = v; + if (v > max) max = v; + } + return { id, description, type, dataType, values, bounds: [min, max] }; + } + const values = verifyObjectProperty( + propertyObj, + "values", + (valuesObj) => { + verifyStringArray(valuesObj); + if (valuesObj.length !== numIds) { + throw new Error( + `Expected ${numIds} values, but received: ${valuesObj.length}`, + ); + } + return valuesObj; + }, + ); + return { id, description, type, values }; + }), + ); + return normalizeInlineSegmentPropertyMap({ ids, properties }); +} + +export function getSegmentPropertyMap( + chunkManager: Borrowed, + credentialsProvider: SpecialProtocolCredentialsProvider, + data: unknown, +): SegmentPropertyMap { + chunkManager; + credentialsProvider; + try { + const t = verifyObjectProperty(data, "@type", verifyString); + if (t !== "neuroglancer_segment_properties") { + throw new Error( + `Unsupported segment property map type: ${JSON.stringify(t)}`, + ); + } + const inlineProperties = verifyOptionalObjectProperty( + data, + "inline", + parseInlinePropertyMap, + ); + return new SegmentPropertyMap({ inlineProperties }); + } catch (e) { + throw new Error(`Error parsing segment property map: ${e.message}`); + } +} + +async function getSegmentPropertyMapDataSource( + options: GetDataSourceOptions, + credentialsProvider: SpecialProtocolCredentialsProvider, + metadata: unknown, +): Promise { + options; + return { + modelTransform: makeIdentityTransform(emptyValidCoordinateSpace), + subsources: [ + { + id: "default", + default: true, + subsource: { + segmentPropertyMap: getSegmentPropertyMap( + options.chunkManager, + credentialsProvider, + metadata, + ), + }, + }, + ], + }; +} + +const urlPattern = /^([^#]*)(?:#(.*))?$/; + +export function parseProviderUrl(providerUrl: string) { + let [, url, fragment] = providerUrl.match(urlPattern)!; + if (url.endsWith("/")) { + url = url.substring(0, url.length - 1); + } + const parameters = parseQueryStringParameters(fragment || ""); + return { url, parameters }; +} + +export function unparseProviderUrl(url: string, parameters: any) { + const fragment = unparseQueryStringParameters(parameters); + if (fragment) { + url += `#${fragment}`; + } + return url; +} + +export class TrkDataSource extends DataSourceProvider { + get description() { + return "Single trk file"; + } + + get(options: GetDataSourceOptions): Promise { + const { url: providerUrl, parameters } = parseProviderUrl( + options.providerUrl, + ); + return options.chunkManager.memoize.getUncounted( + { type: "trk:get", providerUrl, parameters }, + async (): Promise => { + const { url, credentialsProvider } = parseSpecialUrl( + providerUrl, + options.credentialsManager, + ); + + console.log(url) + + let metadata: any; + try { + metadata = await getMetadata(); + console.log(metadata) + } catch (e) { + throw new Error(`Failed to get metadata for ${url}: ${e}`); + } + + verifyObject(metadata); + + const redirect = verifyOptionalObjectProperty( + metadata, + "redirect", + verifyString, + ); + + if (redirect !== undefined) { + throw new RedirectError(redirect); + } + const t = verifyOptionalObjectProperty(metadata, "@type", verifyString); + console.log(t) + console.log(options) + console.log(credentialsProvider) + console.log(url) + switch (t) { + case "neuroglancer_skeletons": + return await getSkeletonsDataSource( + options, + credentialsProvider, + url, + ); + + case "neuroglancer_segment_properties": + return await getSegmentPropertyMapDataSource( + options, + credentialsProvider, + metadata, + ); + + default: + throw new Error(`Invalid type: ${JSON.stringify(t)}`); + } + }, + ); + } + completeUrl(options: CompleteUrlOptions) { + return completeHttpPath( + options.credentialsManager, + options.providerUrl, + options.cancellationToken, + ); + } +} diff --git a/src/datasource/trk/reader/color.txt b/src/datasource/trk/reader/color.txt new file mode 100644 index 000000000..6b51aba29 --- /dev/null +++ b/src/datasource/trk/reader/color.txt @@ -0,0 +1,39 @@ +vec3 colormapOrient(vec3 orient) { + vec3 result; + result.r = abs(orient[0]); + result.g = abs(orient[1]); + result.b = abs(orient[2]); + return clamp(result, 0.0, 1.0); +} + +vec3 colormapHeat(float scalar, float min, float max) { + float value = (clamp(scalar, min, max) - min) / (max - min + 1e-5); + //float value = clamp(scalar, 0.0, 1.22) / 1.22; // Ensure scalar is between 0 and 1 + vec3 color; + + if (value < 0.33) { + color = mix(vec3(1.0, 0.0, 0.0), vec3(1.0, 0.5, 0.0), value / 0.33); // Red to Orange + } else if (value < 0.66) { + color = mix(vec3(1.0, 0.5, 0.0), vec3(1.0, 1.0, 0.0), (value - 0.33) / 0.33); // Orange to Yellow + } else { + color = vec3(1.0, 1.0, 0.0); // Solid Yellow + } + + return color; +} + +#uicontrol bool orient_color checkbox(default=false) +#uicontrol bool heatmap checkbox(default=true) +#uicontrol float vmin slider(min=0.0, max=1.22, default=0.0) +#uicontrol float vmax slider(min=0.0, max=1.22, default=1.22) + +void main() { + if (orient_color) { + emitRGB(colormapOrient(orientation)); + } else if (heatmap) { + emitRGB(colormapHeat(FA, vmin, vmax)); + } else { + emitDefault(); // Default color if neither heatmap nor orientation color is enabled + } +} + diff --git a/src/datasource/trk/reader/main.ts b/src/datasource/trk/reader/main.ts new file mode 100644 index 000000000..01f396ab8 --- /dev/null +++ b/src/datasource/trk/reader/main.ts @@ -0,0 +1,43 @@ + +// import dotenv from 'dotenv'; +import { TrackProcessor } from '#src/datasource/trk/reader/trackProcessor.js'; +// dotenv.config(); + +/** + * This function serves as the entry point for the application. It handles the sequence of operations involving track processing and skeleton precomputed format file creation. + * The function first sets up the track processor, fetches and processes only the header of the track file, and checks if the header was successfully processed. + * If the header is present, it then processes a specified number of track data and uploads the results to an S3 bucket. + * + * @async + * @returns {Promise} A promise that resolves when all operations are successfully completed or throws an error if any step fails. + */ +async function main() { + + // Create a global instance + const trackProcessor = new TrackProcessor(); + // Upload data from cloud + const trkFileUrl = 'https://dandiarchive.s3.amazonaws.com/blobs/d4a/c43/d4ac43bd-6896-4adf-a911-82edbea21f67'; + // Upload data from local machine + // const trkFilePath = '/Users/shrutiv/MyDocuments/GitHub/d4ac43bd-6896-4adf-a911-82edbea21f67.trk'; + + /* Process the header informtion from first 1000 bytes (0-999). */ + await trackProcessor.streamAndProcessHeader(trkFileUrl, 0, 999); + if (!trackProcessor.globalHeader) { + console.error('Error: Failed to fetch or process the TRK header.'); + return; + } + + /* Process all the tracks from starting from 1 and generate precomputed file for all + the tracks present in the randomTrackNumbers array. */ + const totalTracks = trackProcessor.globalHeader.n_count; + const randomTrackNumbers = trackProcessor.getRandomTrackIndices(totalTracks, 1000); + // const randomTrackNumbers = [1]; // process only single track + + await trackProcessor.processTrackData(randomTrackNumbers, 1, trkFileUrl); + // await trackProcessor.processTrackData(randomTrackNumbers, 1, trkFilePath); + + + +} + +main().catch(console.error); diff --git a/src/datasource/trk/reader/skeletonWriter.ts b/src/datasource/trk/reader/skeletonWriter.ts new file mode 100644 index 000000000..f0981e4dd --- /dev/null +++ b/src/datasource/trk/reader/skeletonWriter.ts @@ -0,0 +1,88 @@ +// import axios from "axios"; + +/** + * Represents a 3D vertex with coordinates. + * @interface + */ +export interface Vertex { + x: number; + y: number; + z: number; +} + +/** + * Represents an edge connecting two vertices by their indices. + * @interface + */ +export interface Edge { + vertex1: number; + vertex2: number; +} + +/** + * Provides utilities for creating skeleton data, storing it in an ArrayBuffer, + * and sending it to a backend service. + */ +export class SkeletonWriter { + + /** + * Creates an ArrayBuffer with skeleton data, including vertices, edges, and orientations. + * @static + * @param {Vertex[]} vertices - The list of vertices to store. + * @param {Edge[]} edges - The list of edges connecting the vertices. + * @param {number[][]} orientations - The orientations of each vertex. + * @returns {ArrayBuffer} - The created ArrayBuffer containing the skeleton data. + */ + static createArrayBuffer(vertices: Vertex[], edges: Edge[], orientations: number[][], scalarsArray: { [key: string]: number }[]): ArrayBuffer { + const vertexCount = vertices.length; + const edgeCount = edges.length; + + const vertexSize = 12; // 3 floats (x, y, z), each 4 bytes + const edgeSize = 8; // 2 uint32s (source and target), each 4 bytes + const orientationSize = 12; // 3 floats (x, y, z) for orientations + const scalarSize = scalarsArray.length > 0 ? 4 * Object.keys(scalarsArray[0]).length : 0; + + const bufferSize = 4 + 4 + (vertexSize * vertexCount) + (edgeSize * edgeCount) + (orientationSize * vertexCount) + scalarSize * vertexCount; + + const buffer = new ArrayBuffer(bufferSize); + const dataView = new DataView(buffer); + let offset = 0; + + dataView.setUint32(offset, vertexCount, true); + offset += 4; + dataView.setUint32(offset, edgeCount, true); + offset += 4; + + for (let i = 0; i < vertexCount; i++) { + dataView.setFloat32(offset, vertices[i].x * 1E6, true); + dataView.setFloat32(offset + 4, vertices[i].y * 1E6, true); + dataView.setFloat32(offset + 8, vertices[i].z * 1E6, true); + offset += 12; + } + + for (let i = 0; i < edgeCount; i++) { + dataView.setUint32(offset, edges[i].vertex1, true); + dataView.setUint32(offset + 4, edges[i].vertex2, true); + offset += 8; + } + + for (let i = 0; i < vertexCount; i++) { + dataView.setFloat32(offset, orientations[i][0], true); + dataView.setFloat32(offset + 4, orientations[i][1], true); + dataView.setFloat32(offset + 8, orientations[i][2], true); + offset += 12; + } + + for (let i = 0; i < vertexCount; i++) { + const scalarValues = scalarsArray[i]; + Object.values(scalarValues).forEach(scalar => { + dataView.setFloat32(offset, scalar, true); + offset += 4; + }); + } + + return buffer; + } + + +} diff --git a/src/datasource/trk/reader/trackProcessor.ts b/src/datasource/trk/reader/trackProcessor.ts new file mode 100644 index 000000000..35424144f --- /dev/null +++ b/src/datasource/trk/reader/trackProcessor.ts @@ -0,0 +1,259 @@ + +import { Buffer } from 'buffer'; +import axios from 'axios'; +import { type Vertex, type Edge, SkeletonWriter } from '#src/datasource/trk/reader/skeletonWriter.js'; +import type { TrkHeader } from '#src/datasource/trk/reader/trkHeader.js'; +import { TrkHeaderProcessor } from '#src/datasource/trk/reader/trkHeader.js'; +import { VoxelToRASConverter } from '#src/datasource/trk/reader/voxelToRASConverter.js'; + + +/** + * Represents the processing state of track data, indicating progress in bytes and tracks. + * @interface + */ +export interface ProcessState { + byteOffset: number; + trackNumber: number; + offset: number; +} + +/** + * Represents a 3D orientation vector. + * @typedef Orientation + */ +type Orientation = [number, number, number]; + +/** + * Manages the processing of track data from TRK files, including streaming, and processing track data. + */ +export class TrackProcessor { + globalHeader: TrkHeader | null; + + /** + * Initializes a new instance of the TrackProcessor class with an optional global header. + * @param {TrkHeader | null} globalHeader - The global header of the TRK file. + */ + constructor(globalHeader: TrkHeader | null = null) { + this.globalHeader = globalHeader; + } + + /** + * Streams the TRK file header from a URL and processes it to set the global header. + * @async + * @param {string} url - The URL of the TRK file. + * @param {number} start - The start byte position for the range request. + * @param {number} end - The end byte position for the range request. + */ + async streamAndProcessHeader(url: string, start: number, end: number) { + + try { + const response = await axios.get(url, { + responseType: 'arraybuffer', + headers: { + 'Range': `bytes=${start}-${end}`, + }, + }); + const buffer = Buffer.from(response.data); + this.globalHeader = TrkHeaderProcessor.readTrkHeader(buffer); + // TrkHeaderProcessor.printTrkHeader(this.globalHeader); + } catch (error) { + console.error('Error streaming or processing the TRK file header:', error); + } + } + + /** + * Computes the 3D orientation vectors for track points, normalizing them to unit vectors. + * @static + * @param {number[][]} points - The array of 3D points for which to compute orientations. + * @returns {number[][]} An array of normalized 3D orientation vectors. + */ + static computeOrientation(points: number[][]): number[][] { + // Step 1: Compute directed orientation of each edge + let orient: number[][] = points.slice(1).map((point, i) => { + return [ + point[0] - points[i][0], + point[1] - points[i][1], + point[2] - points[i][2] + ]; + }); + + // Step 2: Compute orientation for each vertex + const originalOrient = [...orient]; + orient = [ + ...originalOrient.slice(0, 1), // First vertex (only one edge) + ...originalOrient.slice(0, -1).map((o, i) => { + return [ + o[0] + orient[i + 1][0], // x + o[1] + orient[i + 1][1], // y + o[2] + orient[i + 1][2] // z + ]; + }), + ...originalOrient.slice(-1) // Last vertex (only one edge) + ]; + + // Step 3: Normalize orientation vectors to unit length + orient = orient.map((o: number[]) => { + const length = Math.sqrt(o[0] * o[0] + o[1] * o[1] + o[2] * o[2]); + const normalizedLength = Math.max(length, 1e-12); // Avoid division by 0 + return [o[0] / normalizedLength, o[1] / normalizedLength, o[2] / normalizedLength] as Orientation; + }); + + + return orient; + } + + /** + * Processes the track data for selected track numbers and writes the result to disk. + * @async + * @param {number[]} randomTrackNumbers - The array of track numbers to be processed. + * @param {number} trackNumber - The current track number being processed. + * @param {string} filePath - The file path of the TRK file. + * @returns {Promise<{processState: ProcessState; timestamp: string}>} A promise that resolves to the processing state and a timestamp. + */ + async processTrackData(randomTrackNumbers: number[], trackNumber: number, filePath: string): Promise<{ processState: ProcessState; timestamp: string, arrayBuffer?: ArrayBuffer }> { + const now = new Date(); + const timestamp = now.toISOString().replace(/[-:]/g, '').replace('T', '_').slice(0, 15); + + if (!this.globalHeader) { + console.error('Error: Global header is not initialized.'); + return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp }; + } + + const maxTracksToProcess = randomTrackNumbers.length; + const vertices: Vertex[] = []; + const edges: Edge[] = []; + const orientations: number[][] = []; + const scalarsArray: { [key: string]: number }[] = []; + let trackProcessedCount = 0; + let vertexIndex = 0; + + try { + const { dataView, buffer } = await this.loadFileBuffer(filePath); + let offset = 1000; + + const numScalarsPerPoint = this.globalHeader.n_scalars || 0; + const scalarNames = this.globalHeader.scalar_name || []; + let minScalar = Infinity; + let maxScalar = -Infinity; + + while (trackProcessedCount < maxTracksToProcess && offset < buffer.byteLength) { + const n_points = dataView.getInt32(offset, true); + offset += 4; + + if (randomTrackNumbers.includes(trackNumber)) { + const points: number[][] = []; + for (let i = 0; i < n_points; i++) { + const x = dataView.getFloat32(offset, true); + const y = dataView.getFloat32(offset + 4, true); + const z = dataView.getFloat32(offset + 8, true); + offset += 12; + points.push([x, y, z]); + + const voxelPoint: [number, number, number] = [x, y, z]; + const affine = VoxelToRASConverter.getAffineToRasmm(this.globalHeader); + const rasPoint = VoxelToRASConverter.applyAffineMatrix(voxelPoint, affine); + + vertices.push({ x: rasPoint[0], y: rasPoint[1], z: rasPoint[2] }); + + if (i > 0) { + edges.push({ vertex1: vertexIndex - 1, vertex2: vertexIndex }); + } + vertexIndex++; + + const scalars: number[] = []; + const normalizedScalars: number[] = []; + + if (numScalarsPerPoint > 0) { + for (let s = 0; s < numScalarsPerPoint; s++) { + const scalarValue = dataView.getFloat32(offset, true); + scalars.push(scalarValue); + offset += 4; + + // Update the min and max scalar values + if (scalarValue < minScalar) minScalar = scalarValue; + if (scalarValue > maxScalar) maxScalar = scalarValue; + } + + // Normalize scalars after finding min and max + for (const scalar of scalars) { + const normalizedScalar = (scalar - minScalar) / (maxScalar - minScalar); + normalizedScalars.push(normalizedScalar); + } + + scalarsArray.push( + normalizedScalars.reduce((acc, scalar, idx) => { + acc[scalarNames[idx] || `scalar${idx + 1}`] = scalar; + return acc; + }, {} as { [key: string]: number }) + ); + } + + + } + + const orient = TrackProcessor.computeOrientation(points); + orientations.push(...orient); + + trackProcessedCount++; + + if (trackProcessedCount >= maxTracksToProcess) { + // After processing, log the min and max values + console.log(`Scalar range: min = ${minScalar}, max = ${maxScalar}`); + const arrayBuffer = SkeletonWriter.createArrayBuffer(vertices, edges, orientations, scalarsArray); + return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp, arrayBuffer }; + } + } else { + offset += n_points * (12 + numScalarsPerPoint * 4); + } + + trackNumber++; + } + + return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp }; + + } catch (error) { + console.error('Error fetching or processing track data:', error); + return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp }; + } + } + + + /** + * Shuffles and selects a random number of track indices from a total number of tracks. + * @param {number} totalTracks - The total number of tracks available. + * @param {number} numTracks - The number of tracks to select. + * @returns {number[]} An array of randomly selected track indices. + */ + getRandomTrackIndices(totalTracks: number, numTracks: number): number[] { + const trackIndices = Array.from({ length: totalTracks }, (_, i) => i + 1); // Create an array of track numbers + for (let i = trackIndices.length - 1; i > 0; i--) { + const j = Math.floor(Math.random() * (i + 1)); + [trackIndices[i], trackIndices[j]] = [trackIndices[j], trackIndices[i]]; // Shuffle array + } + return trackIndices.slice(0, numTracks); // Return the first `numTracks` tracks + } + + /** + * Loads the binary data of a file from a URL or local path into a buffer and creates a DataView for processing. + * @param {string} filePath - The URL or local path of the file to load. + * @returns {Promise<{dataView: DataView; buffer: Buffer}>} A promise that resolves to the DataView and buffer of the file. + */ + loadFileBuffer(filePath: string) { + + return axios.get(filePath, { responseType: 'arraybuffer' }) + .then(response => { + const buffer = Buffer.from(response.data); + const dataView = new DataView(buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength)); + console.log('Data loaded from URL successfully.'); + return { + dataView, + buffer + }; + }) + .catch(error => { + console.error('Failed to load file from URL:', error); + throw error; + }); + } + +} diff --git a/src/datasource/trk/reader/trkHeader.ts b/src/datasource/trk/reader/trkHeader.ts new file mode 100644 index 000000000..ba07d0ee5 --- /dev/null +++ b/src/datasource/trk/reader/trkHeader.ts @@ -0,0 +1,182 @@ +/** + * Represents the header of a Track (TRK) file, describing how track data is structured. + * @interface + */ +export interface TrkHeader { + id_string: string; + dim: [number, number, number]; + voxel_size: [number, number, number]; + origin: [number, number, number]; + n_scalars: number; + scalar_name: string[]; + n_properties: number; + property_name: string[]; + vox_to_ras: number[][]; + voxel_order: string; + image_orientation_patient: [number, number, number, number, number, number]; + invert_x: boolean; + invert_y: boolean; + invert_z: boolean; + swap_xy: boolean; + swap_yz: boolean; + swap_zx: boolean; + n_count: number; + version: number; + hdr_size: number; +} + +/** + * Handles reading and processing the header of TRK files. + */ +export class TrkHeaderProcessor { + + /** + * Reads the header from a buffer and returns a structured object. + * @static + * @param {Buffer} buffer The buffer containing binary data of a TRK file header. + * @returns {TrkHeader} The parsed header as a structured object. + */ + static readTrkHeader(buffer: Buffer): TrkHeader { + + let offset = 0; + + const readChars = (length: number) => { + const value = buffer.toString('ascii', offset, offset + length).replace(/\0/g, ''); + offset += length; + return value; + }; + + const readShorts = (length: number): number[] => { + const values: number[] = []; + for (let i = 0; i < length; i++) { + values.push(buffer.readInt16LE(offset)); + offset += 2; + } + return values; + }; + + const readFloats = (length: number): number[] => { + const values: number[] = []; + for (let i = 0; i < length; i++) { + values.push(buffer.readFloatLE(offset)); + offset += 4; + } + return values; + }; + + const readMatrix = (rows: number, cols: number): number[][] => { + const matrix: number[][] = []; + for (let i = 0; i < rows; i++) { + const row: number[] = []; + for (let j = 0; j < cols; j++) { + row.push(buffer.readFloatLE(offset)); + offset += 4; + } + matrix.push(row); + } + return matrix; + }; + + const readUChar = (): boolean => { + const value = buffer.readUInt8(offset); + offset += 1; + return value !== 0; + }; + + const header: TrkHeader = { + id_string: readChars(6), + dim: readShorts(3) as [number, number, number], + voxel_size: readFloats(3) as [number, number, number], + origin: readFloats(3) as [number, number, number], + n_scalars: buffer.readInt16LE(offset), + scalar_name: [], + n_properties: 0, + property_name: [], + vox_to_ras: [], + voxel_order: '', + image_orientation_patient: [0, 0, 0, 0, 0, 0], + invert_x: false, + invert_y: false, + invert_z: false, + swap_xy: false, + swap_yz: false, + swap_zx: false, + n_count: 0, + version: 0, + hdr_size: 0, + }; + offset += 2; + + // Scalar names (10 names x 20 chars each = 200 bytes) + for (let i = 0; i < 10; i++) { + header.scalar_name.push(readChars(20)); + } + + header.n_properties = buffer.readInt16LE(offset); + offset += 2; + + // Property names (10 names x 20 chars each = 200 bytes) + for (let i = 0; i < 10; i++) { + header.property_name.push(readChars(20)); + } + + header.vox_to_ras = readMatrix(4, 4); + + offset += 444; // Skipped: Reserved space for future version. + + header.voxel_order = readChars(4); + offset += 4; // Skipped: paddings + + header.image_orientation_patient = readFloats(6) as [number, number, number, number, number, number]; + + offset += 2; // Skipped: paddings + + header.invert_x = readUChar(); + header.invert_y = readUChar(); + header.invert_z = readUChar(); + header.swap_xy = readUChar(); + header.swap_yz = readUChar(); + header.swap_zx = readUChar(); + + header.n_count = buffer.readInt32LE(offset); + offset += 4; + + header.version = buffer.readInt32LE(offset); + offset += 4; + + header.hdr_size = buffer.readInt32LE(offset); + offset += 4; + + return header; + } + + /** + * Prints detailed information about the TRK file header to the console. + * @static + * @param {TrkHeader} header The TRK header to be printed. + */ + static printTrkHeader(header: TrkHeader): void { + console.log('--- TRK File Metadata ---'); + console.log(`ID String: ${header.id_string}`); + console.log(`Dimensions: ${header.dim.join(' x ')}`); + console.log(`Voxel Size: ${header.voxel_size.join(', ')}`); + console.log(`Origin: ${header.origin.join(', ')}`); + console.log(`Number of Scalars per Point: ${header.n_scalars}`); + console.log(`Scalar Names: ${header.scalar_name.filter(name => name).join(', ')}`); + console.log(`Number of Properties per Track: ${header.n_properties}`); + console.log(`Property Names: ${header.property_name.filter(name => name).join(', ')}`); + console.log('Voxel to RAS Matrix:'); + header.vox_to_ras.forEach(row => console.log(` [${row.join(', ')}]`)); + console.log(`Voxel Order: ${header.voxel_order}`); + console.log(`Image Orientation (Patient): ${header.image_orientation_patient.join(', ')}`); + console.log(`Invert X: ${header.invert_x}`); + console.log(`Invert Y: ${header.invert_y}`); + console.log(`Invert Z: ${header.invert_z}`); + console.log(`Swap XY: ${header.swap_xy}`); + console.log(`Swap YZ: ${header.swap_yz}`); + console.log(`Swap ZX: ${header.swap_zx}`); + console.log(`Number of Tracks: ${header.n_count}`); + console.log(`Version: ${header.version}`); + console.log(`Header Size: ${header.hdr_size}`); + } +} diff --git a/src/datasource/trk/reader/voxelToRASConverter.ts b/src/datasource/trk/reader/voxelToRASConverter.ts new file mode 100644 index 000000000..71765feeb --- /dev/null +++ b/src/datasource/trk/reader/voxelToRASConverter.ts @@ -0,0 +1,271 @@ +import * as math from 'mathjs'; +import { multiply } from 'mathjs'; +import type { TrkHeader } from '#src/datasource/trk/reader/trkHeader.js'; + +/** + * Provides methods for converting voxel coordinates to RAS coordinates using affine transformations. + */ +export class VoxelToRASConverter { + + /** + * Applies an affine transformation to a 3D point to convert voxel coordinates to RAS coordinates. + * @param {number[]} point - The voxel coordinates to transform. + * @param {number[][]} aff - The 4x4 affine transformation matrix. + * @returns {number[]} The RAS coordinates resulting from the transformation. + */ + static applyAffineMatrix(point: number[], aff: number[][]): number[] { + const [x, y, z] = point; + const transformed = [ + aff[0][0] * x + aff[0][1] * y + aff[0][2] * z + aff[0][3], + aff[1][0] * x + aff[1][1] * y + aff[1][2] * z + aff[1][3], + aff[2][0] * x + aff[2][1] * y + aff[2][2] * z + aff[2][3] + ]; + return transformed; + } + + /** + * Applies an affine transformation to a 3D point to convert voxel coordinates to RAS coordinates. + * + * This function is derived from the `getAffineToRasmm()` function in the nibabel library. + * See the original implementation here: [Nibabel Repository](https://github.com/nipy/nibabel/blob/83eaf0b55be9e9079bf9ad64975b71c22523f5f0/nibabel/streamlines/trk.py#L60C5-L60C33) + * + * @param {number[]} point - The voxel coordinates to transform. + * @param {number[][]} aff - The 4x4 affine transformation matrix. + * @returns {number[]} The RAS coordinates resulting from the transformation. + */ + static getAffineToRasmm(header: TrkHeader): number[][] { + + // Create an identity matrix for the affine transformation + let affine = math.identity(4) as math.Matrix; + + // Apply scale: adjust voxel space based on voxel size + const scale = math.identity(4) as math.Matrix; + for (let i = 0; i < 3; i++) { + scale.set([i, i], 1 / header.voxel_size[i]); // Scale by voxel size + } + affine = math.multiply(scale, affine) as math.Matrix; + + // Apply offset: Shift by half a voxel to account for corner/center discrepancy + const offset = math.identity(4) as math.Matrix; + for (let i = 0; i < 3; i++) { + offset.set([i, 3], -0.5); + } + affine = math.multiply(offset, affine) as math.Matrix; + + // Apply Orientation: If the voxel order implied by the affine does not match the voxel order in the TRK header, change the orientation. + const vox_order = header.voxel_order; + const affine_ornt = VoxelToRASConverter.aff2axcodes(header.vox_to_ras); + // Convert voxel order to orientation array + const header_ornt = VoxelToRASConverter.axcodes2orntTrans(vox_order.split('')); + // Convert affine orientation string to orientation array + const affine_ornt_array = VoxelToRASConverter.axcodes2orntTrans(affine_ornt); + // Create a transformation between the header and affine orientations + const ornt = VoxelToRASConverter.orntTransform(header_ornt, affine_ornt_array) + // Compute the affine transformation matrix M + const M = VoxelToRASConverter.invOrntAff(ornt, header.dim); + // Update the affine matrix by applying M to the existing affine matrix + + const affine_transformed = multiply(math.matrix(M), math.matrix(affine)).toArray(); + const voxelToRASMatrix = math.matrix(header.vox_to_ras); + const affine_voxmm_to_rasmm = math.multiply(voxelToRASMatrix, affine_transformed) as math.Matrix; + + // Convert the final affine matrix back to a 2D array (number[][]) and return + return affine_voxmm_to_rasmm.toArray() as number[][]; + + } + + /** + * Converts an affine matrix to axis direction codes. + * @param {number[][]} aff - The affine transformation matrix. + * @param {string[][]} [labels=[['L', 'R'], ['P', 'A'], ['I', 'S']]] - Optional labels representing the axis directions. + * @returns {string[]} An array of strings representing the axis directions. + */ + static aff2axcodes(aff: number[][], labels: [string, string][] = [['L', 'R'], ['P', 'A'], ['I', 'S']]): string[] { + const ornt = VoxelToRASConverter.io_orientation(aff); + return VoxelToRASConverter.orntInfo2axcodes(ornt, labels); + } + + /** + * Computes the orientation of the axes from an affine matrix using Singular Value Decomposition. + * @param {number[][]} aff - The affine transformation matrix. + * @returns {number[][]} An array representing the orientation of each axis. + */ + static io_orientation(aff: number[][]): number[][] { + const n = aff.length - 1; + const m = aff[0].length - 1; + + if (n !== m) { + throw new Error('Affine matrix must be square'); + } + + // Extract rotation part of the affine matrix (ignoring translation) + const rotation = aff.slice(0, n).map(row => row.slice(0, m)); + + // Singular Value Decomposition (SVD) to get the axis orientation + const invRotation = math.inv(rotation); + const invTrans = math.transpose(invRotation); + + // Calculate the orientation using absolute values + const orientation = math.zeros([n, 2]) as number[][]; + for (let i = 0; i < n; i++) { + let maxVal = 0; + let maxIndex = 0; + for (let j = 0; j < m; j++) { + const val = math.abs(invTrans[i][j]); + if (val > maxVal) { + maxVal = val; + maxIndex = j; + } + } + const direction = invTrans[i][maxIndex] > 0 ? 1 : -1; + orientation[i] = [maxIndex, direction]; + } + + return orientation; + } + + /** + * Converts orientation information into axis direction labels. + * @param {number[][]} ornt - The orientation information. + * @param {string[][]} [labels=[['L', 'R'], ['P', 'A'], ['I', 'S']]] - Optional labels representing the axis directions. + * @returns {string[]} An array of strings representing the axis directions. + */ + static orntInfo2axcodes(ornt: number[][], labels: [string, string][] = [['L', 'R'], ['P', 'A'], ['I', 'S']]): string[] { + return ornt.map(([axis, direction]) => { + if (isNaN(axis)) { + return ''; + } + const axisInt = Math.round(axis); + if (direction === 1) { + return labels[axisInt][1]; // Positive direction + } else if (direction === -1) { + return labels[axisInt][0]; // Negative direction + } else { + throw new Error('Direction should be -1 or 1'); + } + }); + } + + /** + * Converts axis codes to an orientation array. + * @param {string[]} axcodes - The axis codes. + * @param {string[][]} [labels=[['L', 'R'], ['P', 'A'], ['I', 'S']]] - Optional labels representing the axis directions. + * @returns {number[][]} An array representing the orientation of each axis. + */ + static axcodes2orntTrans(axcodes: string[], labels?: [string, string][]): number[][] { + // Default labels corresponding to RAS coordinate system + labels = labels || [['L', 'R'], ['P', 'A'], ['I', 'S']]; + + // Flatten labels for validation + const allowedLabels: Set = new Set(labels.flat()); + + // Validate input axcodes + if (!axcodes.every(axcode => allowedLabels.has(axcode))) { + throw new Error(`Not all axis codes [${axcodes}] are in label set [${Array.from(allowedLabels)}]`); + } + + // Create orientation array + const nAxes: number = axcodes.length; + const ornt: number[][] = Array.from({ length: nAxes }, () => [NaN, NaN]); + + // Fill orientation array + axcodes.forEach((code, codeIdx) => { + labels.forEach((codes, labelIdx) => { + if (code === codes[0]) { + ornt[codeIdx] = [labelIdx, -1]; // Negative direction + } else if (code === codes[1]) { + ornt[codeIdx] = [labelIdx, 1]; // Positive direction + } + }); + }); + + return ornt; + } + + /** + * Computes the transformation required to match a starting orientation to an ending orientation. + * @param {number[][]} startOrnt - The starting orientation. + * @param {number[][]} endOrnt - The desired ending orientation. + * @returns {number[][]} An array representing the transformation matrix to adjust the orientation. + */ + static orntTransform(startOrnt: number[][], endOrnt: number[][]): number[][] { + if (startOrnt.length !== endOrnt.length || startOrnt[0].length !== 2 || endOrnt[0].length !== 2) { + throw new Error('The orientations must have the same shape and each should be an (n,2) array'); + } + + const result: number[][] = new Array(startOrnt.length).fill(null).map(() => [0, 0]); + + endOrnt.forEach((end, endInIdx) => { + const endOutIdx = end[0]; + const endFlip = end[1]; + let found = false; + + startOrnt.forEach((start, startInIdx) => { + const startOutIdx = start[0]; + const startFlip = start[1]; + + if (endOutIdx === startOutIdx) { + if (startFlip === endFlip) { + result[startInIdx] = [endInIdx, 1]; + } else { + result[startInIdx] = [endInIdx, -1]; + } + found = true; + } + }); + + if (!found) { + throw new Error(`Unable to find out axis ${endOutIdx} in startOrnt`); + } + }); + + return result; + } + + /** + * Computes the inverse of the orientation transform for an affine matrix. + * @param {number[][]} orntInput - The orientation information. + * @param {number[]} shapeInput - The shape of the data corresponding to the orientation. + * @returns {number[][]} An array representing the inverse transformation matrix. + */ + static invOrntAff(orntInput: number[][], shapeInput: number[]) { + const ornt = math.matrix(orntInput); + const p = ornt.size()[0]; + const shape = shapeInput.slice(0, p); + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const axisTranspose = ornt.toArray().map((row: any) => row[0]); + const identityMatrix = math.identity(p + 1) as math.Matrix; + + let undoReorder = math.matrix(math.zeros(p + 1, p + 1)); // Create a zero matrix + axisTranspose.push(p); + axisTranspose.forEach((newIndex: number, i: number) => { + const row = identityMatrix.subset(math.index(i, math.range(0, p + 1))) as math.Matrix; + undoReorder = math.subset(undoReorder, math.index(newIndex, math.range(0, p + 1)), row); + }); + + // Create undo_flip as a diagonal matrix + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const flips = ornt.toArray().map((row: any) => row[1]); + let undoFlip = math.diag([...flips, 1.0]) as math.Matrix; + + // Calculate center transformation corrections for flip + const centerTrans = math.multiply(math.subtract(shape, 1), -0.5); + const correction = math.multiply(flips, centerTrans); + const updatedCenterTrans = math.subtract(correction, centerTrans); + + // Manually set the translations for flip corrections + flips.forEach((flip: number, index: number) => { + if (flip !== 1) { // Only adjust if there is a flip + const value = updatedCenterTrans.get([index]); + undoFlip = math.subset(undoFlip, math.index(index, p), value); + } + }); + + // Compose the transformations to get the final affine transformation matrix + const transformAffine = math.multiply(undoFlip, undoReorder); + return transformAffine; + } +} + + diff --git a/src/datasource/trk/register_default.ts b/src/datasource/trk/register_default.ts new file mode 100644 index 000000000..44fd84f90 --- /dev/null +++ b/src/datasource/trk/register_default.ts @@ -0,0 +1,20 @@ +/** + * @license + * Copyright 2017 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { registerProvider } from "#src/datasource/default_provider.js"; +import { TrkDataSource } from "#src/datasource/trk/frontend.js"; + +registerProvider("trk", () => new TrkDataSource()); diff --git a/src/kvstore/special/index.ts b/src/kvstore/special/index.ts index 22423b00e..ac9ee232a 100644 --- a/src/kvstore/special/index.ts +++ b/src/kvstore/special/index.ts @@ -23,7 +23,7 @@ import type { } from "#src/kvstore/index.js"; import { composeByteRangeRequest } from "#src/kvstore/index.js"; import { uncancelableToken } from "#src/util/cancellation.js"; -import { HttpError, isNotFoundError } from "#src/util/http_request.js"; +import { isNotFoundError } from "#src/util/http_request.js"; import type { SpecialProtocolCredentialsProvider } from "#src/util/special_protocol_request.js"; import { cancellableFetchSpecialOk } from "#src/util/special_protocol_request.js"; @@ -85,97 +85,95 @@ class SpecialProtocolKvStore implements ReadableKvStore { const { cancellationToken = uncancelableToken } = options; let { byteRange: byteRangeRequest } = options; const url = this.baseUrl + key; - for (let attempt = 0; ; ++attempt) { - try { - const requestInit: RequestInit = {}; - const rangeHeader = getRangeHeader(byteRangeRequest); - if (rangeHeader !== undefined) { - requestInit.headers = { range: rangeHeader }; - requestInit.cache = byteRangeCacheMode; + + try { + // The HTTP spec supports suffixLength requests directly via "Range: + // bytes=-N" requests, which avoids the need for a separate HEAD request. + // However, per + // https://fetch.spec.whatwg.org/#cors-safelisted-request-header a suffix + // length byte range request header will always trigger an OPTIONS preflight + // request, which would otherwise be avoided. This negates the benefit of + // using a suffixLength request directly. Additionally, some servers such as + // the npm http-server package and https://uk1s3.embassy.ebi.ac.uk/ do not + // correctly handle suffixLength requests or do not correctly handle CORS + // preflight requests. To avoid those issues, always just issue a separate + // HEAD request to determine the length. + let totalSize: number | undefined; + if ( + byteRangeRequest !== undefined && + "suffixLength" in byteRangeRequest + ) { + const totalSize = await this.getObjectLength(url, options); + byteRangeRequest = composeByteRangeRequest( + { offset: 0, length: totalSize }, + byteRangeRequest, + ).outer; + } + const requestInit: RequestInit = {}; + const rangeHeader = getRangeHeader(byteRangeRequest); + if (rangeHeader !== undefined) { + requestInit.headers = { range: rangeHeader }; + requestInit.cache = byteRangeCacheMode; + } + const { response, data } = await cancellableFetchSpecialOk( + this.credentialsProvider, + url, + requestInit, + async (response) => ({ + response, + data: await response.arrayBuffer(), + }), + cancellationToken, + ); + let byteRange: ByteRange | undefined; + if (response.status === 206) { + const contentRange = response.headers.get("content-range"); + if (contentRange === null) { + // Content-range should always be sent, but some buggy servers don't + // send it. + if (byteRangeRequest !== undefined) { + byteRange = { + offset: byteRangeRequest.offset, + length: data.byteLength, + }; + } else { + throw new Error( + "Unexpected HTTP 206 response when no byte range specified.", + ); + } } - const { response, data } = await cancellableFetchSpecialOk( - this.credentialsProvider, - url, - requestInit, - async (response) => ({ - response, - data: await response.arrayBuffer(), - }), - cancellationToken, - ); - let byteRange: ByteRange | undefined; - let totalSize: number | undefined; - if (response.status === 206) { - const contentRange = response.headers.get("content-range"); - if (contentRange === null) { - if (byteRangeRequest !== undefined) { - if ("suffixLength" in byteRangeRequest) { - const objectSize = await this.getObjectLength(url, options); - byteRange = { - offset: objectSize - byteRangeRequest.suffixLength, - length: Number(response.headers.get("content-length")), - }; - } else { - byteRange = { - offset: byteRangeRequest.offset, - length: data.byteLength, - }; - } - } else { - throw new Error( - "Unexpected HTTP 206 response when no byte range specified.", - ); - } + if (contentRange !== null) { + const m = contentRange.match(/bytes ([0-9]+)-([0-9]+)\/([0-9]+|\*)/); + if (m === null) { + throw new Error( + `Invalid content-range header: ${JSON.stringify(contentRange)}`, + ); } - if (contentRange !== null) { - const m = contentRange.match( - /bytes ([0-9]+)-([0-9]+)\/([0-9]+|\*)/, + const beginPos = parseInt(m[1], 10); + const endPos = parseInt(m[2], 10); + if (endPos !== beginPos + data.byteLength - 1) { + throw new Error( + `Length in content-range header ${JSON.stringify( + contentRange, + )} does not match content length ${data.byteLength}`, ); - if (m === null) { - throw new Error( - `Invalid content-range header: ${JSON.stringify(contentRange)}`, - ); - } - const beginPos = parseInt(m[1], 10); - const endPos = parseInt(m[2], 10); - if (endPos !== beginPos + data.byteLength - 1) { - throw new Error( - `Length in content-range header ${JSON.stringify( - contentRange, - )} does not match content length ${data.byteLength}`, - ); - } - totalSize = m[3] === "*" ? undefined : parseInt(m[3], 10); - byteRange = { offset: beginPos, length: data.byteLength }; } + if (m[3] !== "*") { + totalSize = parseInt(m[3], 10); + } + byteRange = { offset: beginPos, length: data.byteLength }; } - if (byteRange === undefined) { - byteRange = { offset: 0, length: data.byteLength }; - totalSize = data.byteLength; - } - return { data: new Uint8Array(data), dataRange: byteRange, totalSize }; - } catch (e) { - if ( - attempt === 0 && - e instanceof HttpError && - e.status === 416 && - options.byteRange !== undefined && - "suffixLength" in options.byteRange - ) { - // Some servers, such as the npm http-server package, do not support suffixLength - // byte-range requests. - const contentLengthNumber = await this.getObjectLength(url, options); - byteRangeRequest = composeByteRangeRequest( - { offset: 0, length: contentLengthNumber }, - byteRangeRequest, - ).outer; - continue; - } - if (isNotFoundError(e)) { - return undefined; - } - throw e; } + if (byteRange === undefined) { + byteRange = { offset: 0, length: data.byteLength }; + totalSize = data.byteLength; + } + return { data: new Uint8Array(data), dataRange: byteRange, totalSize }; + } catch (e) { + if (isNotFoundError(e)) { + return undefined; + } + throw e; } } } diff --git a/src/skeleton/decode_precomputed_skeleton.ts b/src/skeleton/decode_precomputed_skeleton.ts index 034939642..fd30d96e3 100644 --- a/src/skeleton/decode_precomputed_skeleton.ts +++ b/src/skeleton/decode_precomputed_skeleton.ts @@ -29,6 +29,7 @@ export function decodeSkeletonChunk( response: ArrayBuffer, vertexAttributes: Map, ) { + const dv = new DataView(response); const numVertices = dv.getUint32(0, true); const numEdges = dv.getUint32(4, true);