Skip to content

Commit

Permalink
Merge pull request #1586 from silx-kit/hdf5-errors-2
Browse files Browse the repository at this point in the history
Propagate and improve handling of HDF5 errors
  • Loading branch information
axelboc authored Mar 4, 2024
2 parents 689d589 + 66414ce commit 9a69c49
Show file tree
Hide file tree
Showing 10 changed files with 237 additions and 50 deletions.
14 changes: 14 additions & 0 deletions packages/app/src/App.module.css
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,20 @@
margin-left: 0.5rem;
}

.detailedError {
composes: error from global;
overflow: auto;
scrollbar-width: thin;
}

.detailedError > summary {
cursor: pointer;
}

.detailedError > pre {
font-size: 0.875em;
}

.retryBtn {
composes: btnClean btnLink from global;
}
10 changes: 10 additions & 0 deletions packages/app/src/ErrorFallback.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,16 @@ import { CANCELLED_ERROR_MSG } from './providers/utils';
function ErrorFallback(props: FallbackProps) {
const { error, resetErrorBoundary } = props;

if (error.cause || error.cause instanceof Error) {
const { message } = error.cause;
return (
<details className={styles.detailedError}>
<summary>{error.message}</summary>
<pre>{message}</pre>
</details>
);
}

return (
<p className={styles.error}>
{error.message}
Expand Down
2 changes: 1 addition & 1 deletion packages/app/src/visualizer/Visualizer.module.css
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
grid-template:
'dimMapper bar' auto
'dimMapper vis' 1fr
/ auto 1fr;
/ min-content 1fr;
}

.fallback {
Expand Down
2 changes: 1 addition & 1 deletion packages/h5wasm/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
}
},
"dependencies": {
"h5wasm": "0.7.1",
"h5wasm": "0.7.2",
"nanoid": "5.0.5"
},
"devDependencies": {
Expand Down
38 changes: 19 additions & 19 deletions packages/h5wasm/src/__snapshots__/h5wasm-api.test.ts.snap
Original file line number Diff line number Diff line change
Expand Up @@ -1786,9 +1786,9 @@ exports[`test file matches snapshot 1`] = `
0,
0,
0,
248,
215,
18,
240,
13,
19,
0,
],
],
Expand All @@ -1802,9 +1802,9 @@ exports[`test file matches snapshot 1`] = `
0,
0,
0,
104,
238,
18,
96,
36,
19,
0,
],
],
Expand All @@ -1818,9 +1818,9 @@ exports[`test file matches snapshot 1`] = `
0,
0,
0,
128,
238,
18,
120,
36,
19,
0,
],
],
Expand Down Expand Up @@ -2095,9 +2095,9 @@ exports[`test file matches snapshot 1`] = `
0,
0,
0,
96,
210,
18,
88,
8,
19,
0,
],
},
Expand Down Expand Up @@ -2126,25 +2126,25 @@ exports[`test file matches snapshot 1`] = `
0,
0,
0,
176,
25,
168,
79,
18,
0,
2,
0,
0,
0,
96,
12,
88,
66,
18,
0,
3,
0,
0,
0,
240,
239,
18,
232,
37,
19,
0,
],
},
Expand Down
24 changes: 18 additions & 6 deletions packages/h5wasm/src/h5wasm-api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import { assertH5WasmDataset } from './guards';
import type { H5WasmEntity } from './models';
import type { Plugin } from './utils';
import {
getEnhancedError,
hasBigInts,
parseEntity,
PLUGINS_BY_FILTER_ID,
Expand Down Expand Up @@ -61,8 +62,12 @@ export class H5WasmApi extends DataProviderApi {
// Ensure all filters are supported and loaded (if available)
await this.processFilters(h5wDataset.filters);

const value = readSelectedValue(h5wDataset, selection);
return hasBigInts(dataset.type) ? sanitizeBigInts(value) : value;
try {
const value = readSelectedValue(h5wDataset, selection);
return hasBigInts(dataset.type) ? sanitizeBigInts(value) : value;
} catch (error) {
throw getEnhancedError(error);
}
}

public async getAttrValues(entity: Entity) {
Expand Down Expand Up @@ -118,6 +123,9 @@ export class H5WasmApi extends DataProviderApi {
private async initH5Wasm(): Promise<typeof Module> {
const module = await h5wasmReady;

// Throw HDF5 errors instead of just logging them
module.activate_throwing_error_handler();

// Replace default plugins path
module.remove_plugin_search_path(0);
module.insert_plugin_search_path(PLUGINS_PATH, 0);
Expand Down Expand Up @@ -162,10 +170,12 @@ export class H5WasmApi extends DataProviderApi {
}

const plugin = PLUGINS_BY_FILTER_ID[filter.id];
if (!plugin || !this.getPlugin) {
throw new Error(
if (!plugin) {
// eslint-disable-next-line no-console
console.warn(
`Compression filter ${filter.id} not supported (${filter.name})`,
);
continue;
}

const pluginPath = `${PLUGINS_PATH}/libH5Z${plugin}.so`;
Expand All @@ -174,9 +184,11 @@ export class H5WasmApi extends DataProviderApi {
continue; // plugin already loaded
}

const buffer = await this.getPlugin(plugin);
const buffer = await this.getPlugin?.(plugin);
if (!buffer) {
throw new Error(`Compression plugin ${plugin} not supported`);
// eslint-disable-next-line no-console
console.warn(`Compression plugin ${plugin} not available`);
continue;
}

h5Module.FS.writeFile(pluginPath, new Uint8Array(buffer));
Expand Down
7 changes: 7 additions & 0 deletions packages/h5wasm/src/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,10 @@ import type { Group as H5WasmGroup } from 'h5wasm';
export type H5WasmEntity = ReturnType<H5WasmGroup['get']>;

export type H5WasmAttributes = H5WasmGroup['attrs'];

export interface HDF5Diag {
major: string;
minor: string;
message: string;
origin: string;
}
56 changes: 56 additions & 0 deletions packages/h5wasm/src/utils.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { describe, expect, it } from 'vitest';

import { parseDiagnostics } from './utils';

const HDF5_ERROR_MESSAGE = `HDF5-DIAG: Error detected in HDF5 (1.14.2) thread 0:
#000: /__w/libhdf5-wasm/libhdf5-wasm/build/1.14.2/_deps/hdf5-src/src/H5D.c line 1061 in H5Dread(): can't synchronously read data
major: Dataset
minor: Read failed
#001: /__w/libhdf5-wasm/libhdf5-wasm/build/1.14.2/_deps/hdf5-src/src/H5VLcallback.c line 2092 in H5VL_dataset_read_direct(): dataset read failed
major: Virtual Object Layer
minor: Read failed
#002: /__w/libhdf5-wasm/libhdf5-wasm/build/1.14.2/_deps/hdf5-src/src/H5Dchunk.c line 4468 in H5D__chunk_lock(): data pipeline read failed
major: Dataset
minor: Filter operation failed
#003: /__w/libhdf5-wasm/libhdf5-wasm/build/1.14.2/_deps/hdf5-src/src/H5Z.c line 1356 in H5Z_pipeline(): required filter 'SZ3 compressor/decompressor for floating-point data.' is not registered
major: Data filters
minor: Read failed
`;

describe('parseDiagnostics', () => {
it('should parse HDF5 error message and return diagnostics', () => {
const diagnostics = parseDiagnostics(HDF5_ERROR_MESSAGE);
expect(diagnostics).toEqual([
{
major: 'Dataset',
minor: 'Read failed',
message: "Can't synchronously read data",
origin:
'/__w/libhdf5-wasm/libhdf5-wasm/build/1.14.2/_deps/hdf5-src/src/H5D.c line 1061 in H5Dread()',
},
{
major: 'Virtual Object Layer',
minor: 'Read failed',
message: 'Dataset read failed',
origin:
'/__w/libhdf5-wasm/libhdf5-wasm/build/1.14.2/_deps/hdf5-src/src/H5VLcallback.c line 2092 in H5VL_dataset_read_direct()',
},
{
major: 'Dataset',
minor: 'Filter operation failed',
message: 'Data pipeline read failed',
origin:
'/__w/libhdf5-wasm/libhdf5-wasm/build/1.14.2/_deps/hdf5-src/src/H5Dchunk.c line 4468 in H5D__chunk_lock()',
},
{
major: 'Data filters',
minor: 'Read failed',
message:
"Required filter 'SZ3 compressor/decompressor for floating-point data.' is not registered",
origin:
'/__w/libhdf5-wasm/libhdf5-wasm/build/1.14.2/_deps/hdf5-src/src/H5Z.c line 1356 in H5Z_pipeline()',
},
]);
});
});
65 changes: 64 additions & 1 deletion packages/h5wasm/src/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ import {
Group as H5WasmGroup,
} from 'h5wasm';

import type { H5WasmAttributes, H5WasmEntity } from './models';
import type { H5WasmAttributes, H5WasmEntity, HDF5Diag } from './models';

// https://github.com/h5wasm/h5wasm-plugins#included-plugins
export enum Plugin {
Expand Down Expand Up @@ -303,3 +303,66 @@ export function sanitizeBigInts(value: unknown): unknown {

return value;
}

const DIAG_PREDICATES: ((diag: HDF5Diag) => boolean)[] = [
(diag: HDF5Diag) => {
return (
diag.major === 'Data filters' &&
/^required filter.*not registered$/iu.test(diag.message)
);
},
];

export function getEnhancedError(error: unknown): unknown {
if (!(error instanceof Error) || !error.message.startsWith('HDF5-DIAG')) {
return error;
}

const diagnostics = parseDiagnostics(error.message);
const opts = { cause: error };

for (const predicate of DIAG_PREDICATES) {
const diag = diagnostics.find(predicate);
if (diag) {
return new Error(diag.message, opts);
}
}

return new Error('Error detected in HDF5', opts);
}

const MESSAGE_LINE_REGEX = /#\d{3}: (\/.+ line \d+ in .+\(\)): (.+)$/u;
const MAJOR_LINE_REGEX = /major: (.+)$/u;
const MINOR_LINE_REGEX = /minor: (.+)$/u;

/* Each HDF5 diagnostic entry is made up of three lines:
* 1. "#000: <origin>: <message>"
* 2. "major: <major>"
* 3. "major: <minor>"
*/
export function parseDiagnostics(msg: string): HDF5Diag[] {
// Ignore first line (generic error) and last line (empty)
const lines = msg.split(/\n/u).slice(1, -1);

if (lines.length % 3 !== 0) {
return [];
}

const diags: HDF5Diag[] = [];
for (let i = 0; i < lines.length; i += 3) {
const [, origin, message] = MESSAGE_LINE_REGEX.exec(lines[i]) || [];
const [, major] = MAJOR_LINE_REGEX.exec(lines[i + 1]) || [];
const [, minor] = MINOR_LINE_REGEX.exec(lines[i + 2]) || [];

if (origin && message && major && minor) {
diags.push({
major,
minor,
message: `${message.charAt(0).toUpperCase()}${message.slice(1)}`,
origin,
});
}
}

return diags;
}
Loading

0 comments on commit 9a69c49

Please sign in to comment.