Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] Refactor volume undo mechanism #7506

Draft
wants to merge 6 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 12 additions & 18 deletions frontend/javascripts/oxalis/model/actions/volumetracing_actions.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,17 @@
import type { ServerEditableMapping, ServerVolumeTracing } from "types/api_flow_types";
import type { Vector2, Vector3, OrthoView, ContourMode, BucketAddress } from "oxalis/constants";
import type { BucketDataArray } from "oxalis/model/bucket_data_handling/bucket";
import type { Segment, SegmentGroup, SegmentMap } from "oxalis/store";
import Deferred from "libs/async/deferred";
import type { Dispatch } from "redux";
import { AllUserBoundingBoxActions } from "oxalis/model/actions/annotation_actions";
import type { ContourMode, OrthoView, Vector2, Vector3 } from "oxalis/constants";
import { QuickSelectGeometry } from "oxalis/geometries/helper_geometries";
import { AllUserBoundingBoxActions } from "oxalis/model/actions/annotation_actions";
import type { Segment, SegmentGroup, SegmentMap } from "oxalis/store";
import type { Dispatch } from "redux";
import { batchActions } from "redux-batched-actions";
import type {
BucketDataArray,
ServerEditableMapping,
ServerVolumeTracing,
} from "types/api_flow_types";
import { type AdditionalCoordinate } from "types/api_flow_types";
import BucketSnapshot from "../bucket_data_handling/bucket_snapshot";

export type InitializeVolumeTracingAction = ReturnType<typeof initializeVolumeTracingAction>;
export type InitializeEditableMappingAction = ReturnType<typeof initializeEditableMappingAction>;
Expand Down Expand Up @@ -299,20 +303,10 @@ export const setContourTracingModeAction = (mode: ContourMode) =>
mode,
} as const);

export const addBucketToUndoAction = (
zoomedBucketAddress: BucketAddress,
bucketData: BucketDataArray,
maybeUnmergedBucketLoadedPromise: MaybeUnmergedBucketLoadedPromise,
pendingOperations: Array<(arg0: BucketDataArray) => void>,
tracingId: string,
) =>
export const addBucketToUndoAction = (bucketSnapshot: BucketSnapshot) =>
({
type: "ADD_BUCKET_TO_UNDO",
zoomedBucketAddress,
bucketData,
maybeUnmergedBucketLoadedPromise,
pendingOperations: pendingOperations.slice(),
tracingId,
bucketSnapshot,
} as const);

export const importVolumeTracingAction = () =>
Expand Down
73 changes: 36 additions & 37 deletions frontend/javascripts/oxalis/model/bucket_data_handling/bucket.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { createNanoEvents, Emitter } from "nanoevents";
import * as THREE from "three";
import _ from "lodash";
import type { ElementClass } from "types/api_flow_types";
import type { BucketDataArray, ElementClass } from "types/api_flow_types";
import { PullQueueConstants } from "oxalis/model/bucket_data_handling/pullqueue";
import type { MaybeUnmergedBucketLoadedPromise } from "oxalis/model/actions/volumetracing_actions";
import { addBucketToUndoAction } from "oxalis/model/actions/volumetracing_actions";
Expand All @@ -16,6 +16,8 @@ import TemporalBucketManager from "oxalis/model/bucket_data_handling/temporal_bu
import window from "libs/window";
import { getActiveMagIndexForLayer } from "../accessors/flycam_accessor";
import { type AdditionalCoordinate } from "types/api_flow_types";
import { uint8ToTypedBuffer } from "../helpers/bucket_compression";
import BucketSnapshot, { PendingOperation } from "./bucket_snapshot";

export const enum BucketStateEnum {
UNREQUESTED = "UNREQUESTED",
Expand All @@ -24,12 +26,7 @@ export const enum BucketStateEnum {
LOADED = "LOADED",
}
export type BucketStateEnumType = keyof typeof BucketStateEnum;
export type BucketDataArray =
| Uint8Array
| Uint16Array
| Uint32Array
| Float32Array
| BigUint64Array;

export const bucketDebuggingFlags = {
// For visualizing buckets which are passed to the GPU
visualizeBucketsOnGPU: false,
Expand Down Expand Up @@ -58,7 +55,7 @@ export function assertNonNullBucket(bucket: Bucket): asserts bucket is DataBucke
}

export class NullBucket {
type: "null" = "null";
readonly type: "null" = "null";

hasData(): boolean {
return false;
Expand Down Expand Up @@ -127,8 +124,9 @@ export function markVolumeTransactionEnd() {
}

export class DataBucket {
type: "data" = "data";
elementClass: ElementClass;
readonly type: "data" = "data";
readonly elementClass: ElementClass;
readonly zoomedAddress: BucketAddress;
visualizedMesh: Record<string, any> | null | undefined;
// @ts-expect-error ts-migrate(2564) FIXME: Property 'visualizationColor' has no initializer a... Remove this comment to see the full error message
visualizationColor: THREE.Color;
Expand All @@ -141,12 +139,11 @@ export class DataBucket {
// - not yet created by the PushQueue, since the PushQueue creates the snapshots
// in a debounced manner
dirtyCount: number = 0;
pendingOperations: Array<(arg0: BucketDataArray) => void> = [];
pendingOperations: Array<PendingOperation> = [];
state: BucketStateEnumType;
accessed: boolean;
data: BucketDataArray | null | undefined;
temporalBucketManager: TemporalBucketManager;
zoomedAddress: BucketAddress;
cube: DataCube;
_fallbackBucket: Bucket | null | undefined;
throttledTriggerLabeled: () => void;
Expand Down Expand Up @@ -323,7 +320,7 @@ export class DataBucket {
return dataClone;
}

async label_DEPRECATED(labelFunc: (arg0: BucketDataArray) => void): Promise<void> {
async label_DEPRECATED(labelFunc: PendingOperation): Promise<void> {
/*
* It's not recommended to use this method (repeatedly), as it can be
* very slow. See the docstring for Bucket.getOrCreateData() for alternatives.
Expand All @@ -349,6 +346,17 @@ export class DataBucket {
}

bucketsAlreadyInUndoState.add(this);

Store.dispatch(
// Always use the current state of this.maybeUnmergedBucketLoadedPromise, since
// this bucket could be added to multiple undo batches while it's fetched. All entries
// need to have the corresponding promise for the undo to work correctly.
addBucketToUndoAction(this.getSnapshot()),
);
}

getSnapshot(): BucketSnapshot {
// todop: this potentially creates data via getOrCreateData. do we want this?
const dataClone = this.getCopyOfData();

if (this.needsBackendData() && this.maybeUnmergedBucketLoadedPromise == null) {
Expand All @@ -361,20 +369,22 @@ export class DataBucket {
});
}

Store.dispatch(
// Always use the current state of this.maybeUnmergedBucketLoadedPromise, since
// this bucket could be added to multiple undo batches while it's fetched. All entries
// need to have the corresponding promise for the undo to work correctly.
addBucketToUndoAction(
this.zoomedAddress,
dataClone,
this.maybeUnmergedBucketLoadedPromise,
this.pendingOperations,
this.getTracingId(),
),
return new BucketSnapshot(
this.zoomedAddress,
dataClone,
this.maybeUnmergedBucketLoadedPromise,
this.pendingOperations.slice(),
this.getTracingId(),
this.elementClass,
);
}

async restoreToSnapshot(snapshot: BucketSnapshot): Promise<void> {
const { newData, newPendingOperations } = await snapshot.getDataForRestore();
// Set the new bucket data. This will add the bucket directly to the pushqueue, too.
this.setData(newData, newPendingOperations);
}

hasData(): boolean {
return this.data != null;
}
Expand All @@ -389,25 +399,14 @@ export class DataBucket {
return data;
}

setData(newData: BucketDataArray, newPendingOperations: Array<(arg0: BucketDataArray) => void>) {
setData(newData: BucketDataArray, newPendingOperations: Array<PendingOperation>) {
this.data = newData;
this.invalidateValueSet();
this.pendingOperations = newPendingOperations;
this.dirty = true;
this.endDataMutation();
}

uint8ToTypedBuffer(arrayBuffer: Uint8Array | null | undefined) {
const [TypedArrayClass, channelCount] = getConstructorForElementClass(this.elementClass);
return arrayBuffer != null
? new TypedArrayClass(
arrayBuffer.buffer,
arrayBuffer.byteOffset,
arrayBuffer.byteLength / TypedArrayClass.BYTES_PER_ELEMENT,
)
: new TypedArrayClass(channelCount * Constants.BUCKET_SIZE);
}

markAsNeeded(): void {
this.accessed = true;
}
Expand Down Expand Up @@ -577,7 +576,7 @@ export class DataBucket {
}

receiveData(arrayBuffer: Uint8Array | null | undefined): void {
const data = this.uint8ToTypedBuffer(arrayBuffer);
const data = uint8ToTypedBuffer(arrayBuffer, this.elementClass);
const [TypedArrayClass, channelCount] = getConstructorForElementClass(this.elementClass);

if (data.length !== channelCount * Constants.BUCKET_SIZE) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
import type { BucketAddress } from "oxalis/constants";
import type { MaybeUnmergedBucketLoadedPromise } from "oxalis/model/actions/volumetracing_actions";
import type { BucketDataArray, ElementClass } from "types/api_flow_types";
import { compressTypedArray, decompressToTypedArray } from "../helpers/bucket_compression";

export type PendingOperation = (arg0: BucketDataArray) => void;

export default class BucketSnapshot {
readonly zoomedAddress: BucketAddress;
readonly pendingOperations: PendingOperation[];
readonly tracingId: string;
readonly needsMergeWithBackendData: boolean;
readonly elementClass: ElementClass;

// A copy of the bucket's data. Either stored
// uncompressed:
dataClone: BucketDataArray | null;
// ... or compressed:
compressedData: Uint8Array | null = null;

// A pending promise of the unmerged backend data. Once the promise
// is fulfilled, it will be set to null.
maybeUnmergedBucketLoadedPromise: MaybeUnmergedBucketLoadedPromise;
// Afterwards, the backend data is either stored
// uncompressed:
backendBucketData: BucketDataArray | null = null;
// ... or compressed:
compressedBackendData: Uint8Array | null = null;

constructor(
zoomedAddress: BucketAddress,
dataClone: BucketDataArray,
maybeUnmergedBucketLoadedPromise: MaybeUnmergedBucketLoadedPromise,
pendingOperations: PendingOperation[],
tracingId: string,
elementClass: ElementClass,
) {
this.zoomedAddress = zoomedAddress;
this.dataClone = dataClone;
this.maybeUnmergedBucketLoadedPromise = maybeUnmergedBucketLoadedPromise;
this.pendingOperations = pendingOperations;
this.tracingId = tracingId;
this.elementClass = elementClass;

this.needsMergeWithBackendData = maybeUnmergedBucketLoadedPromise != null;

this.startCompression();
}

private startCompression() {
if (this.dataClone != null) {
compressTypedArray(this.dataClone).then((compressedData) => {
this.compressedData = compressedData;
this.dataClone = null;
});
}
if (this.maybeUnmergedBucketLoadedPromise == null) {
return;
}
this.maybeUnmergedBucketLoadedPromise.then((backendBucketData) => {
// Once the backend data is fetched, do not directly merge it with the local data
// as this operation is only needed, when the volume action is undone. Additionally merging is more
// expensive than saving the backend data. Thus the data is only merged when it is needed.
this.backendBucketData = backendBucketData;
this.maybeUnmergedBucketLoadedPromise = null;
compressTypedArray(backendBucketData).then((compressedBackendData) => {
this.backendBucketData = null;
this.compressedBackendData = compressedBackendData;
});
});
}

private async getLocalData(): Promise<BucketDataArray> {
if (this.dataClone != null) {
return this.dataClone;
}
if (this.compressedData == null) {
throw new Error("BucketSnapshot has neither data nor compressedData.");
}
return await decompressToTypedArray(this.compressedData, this.elementClass);
}

private isBackendDataAvailable() {
return this.backendBucketData != null || this.compressedBackendData != null;
}

private async getBackendData(): Promise<BucketDataArray> {
if (this.backendBucketData != null) {
return this.backendBucketData;
}
if (this.compressedBackendData == null) {
throw new Error("getBackendData was called even though no backend data exists.");
}
return await decompressToTypedArray(this.compressedBackendData, this.elementClass);
}

async getDataForRestore(): Promise<{
newData: BucketDataArray;
newPendingOperations: PendingOperation[];
}> {
// todop: clarify case with
// this.needsMergeWithBackendData && !isBackendDataAvailable...
if (this.needsMergeWithBackendData && this.isBackendDataAvailable()) {
const [decompressedBucketData, decompressedBackendData] = await Promise.all([
this.getLocalData(),
this.getBackendData(),
]);
mergeDataWithBackendDataInPlace(
decompressedBucketData,
decompressedBackendData,
this.pendingOperations,
);
return {
newData: decompressedBucketData,
newPendingOperations: [],
};
}

// Either, no merge is necessary (e.g., because the snapshot was already
// created with the merged data) or the backend data hasn't arrived yet.
// In both cases, simply return the available data.
// If back-end data needs to be merged, this will happen within Bucket.receiveData?
const newData = await this.getLocalData();

// todop: right after the above await, could it happen that the back-end data is now available?

return {
newData,
newPendingOperations: this.pendingOperations,
};
}
}

function mergeDataWithBackendDataInPlace(
originalData: BucketDataArray,
backendData: BucketDataArray,
pendingOperations: Array<(arg0: BucketDataArray) => void>,
) {
if (originalData.length !== backendData.length) {
throw new Error("Cannot merge data arrays with differing lengths");
}

// Transfer backend to originalData
// The `set` operation is not problematic, since the BucketDataArray types
// won't be mixed (either, they are BigInt or they aren't)
// @ts-ignore
originalData.set(backendData);

for (const op of pendingOperations) {
op(originalData);
}
}
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import _ from "lodash";
import type { Bucket, BucketDataArray } from "oxalis/model/bucket_data_handling/bucket";
import type { Bucket } from "oxalis/model/bucket_data_handling/bucket";
import { DataBucket, NULL_BUCKET, NullBucket } from "oxalis/model/bucket_data_handling/bucket";
import type { AdditionalAxis, ElementClass } from "types/api_flow_types";
import type { AdditionalAxis, BucketDataArray, ElementClass } from "types/api_flow_types";
import type { ProgressCallback } from "libs/progress_callback";
import { V3 } from "libs/mjs";
import { VoxelNeighborQueue2D, VoxelNeighborQueue3D } from "oxalis/model/volumetracing/volumelayer";
Expand Down Expand Up @@ -710,20 +710,6 @@ class DataCube {
};
}

setBucketData(
zoomedAddress: BucketAddress,
data: BucketDataArray,
newPendingOperations: Array<(arg0: BucketDataArray) => void>,
) {
const bucket = this.getOrCreateBucket(zoomedAddress);

if (bucket.type === "null") {
return;
}

bucket.setData(data, newPendingOperations);
}

triggerPushQueue() {
this.pushQueue.push();
}
Expand Down
Loading