Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,15 @@ dependencies = [
]

# Entry Points
[project.entry-points."aiida.data"]
"fans" = "aiida_fans.data:FANSParameters"
# [project.entry-points."aiida.data"]
# "fans" = "aiida_fans.data:FANSParameters"
[project.entry-points."aiida.calculations"]
"fans.stashed" = "aiida_fans.calculations:FansStashedCalculation"
"fans.fragmented" = "aiida_fans.calculations:FansFragmentedCalculation"
[project.entry-points."aiida.parsers"]
"fans" = "aiida_fans.parsers:FansParser"
[project.entry-points."aiida.cmdline.data"]
"fans" = "aiida_fans.cli:data_cli"
# [project.entry-points."aiida.cmdline.data"]
# "fans" = "aiida_fans.cli:data_cli"

# Build System
[build-system]
Expand Down Expand Up @@ -64,7 +64,7 @@ dependencies = {aiida-fans = "==0.1.5"}
# [tool.pixi.feature.aiida]
# dependencies = {aiida-core = "2.6.*"}
[tool.pixi.feature.fans]
dependencies = {fans = "0.3.*"}
dependencies = {fans = "0.4.*"}
[tool.pixi.feature.ruff]
dependencies = {ruff = "*"}
tasks = {fmt = "ruff check", dummy = "echo dummy", my-dummy="echo my-dummy"}
Expand Down
63 changes: 13 additions & 50 deletions src/aiida_fans/calculations.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,11 @@
from aiida.common.folders import Folder
from aiida.engine import CalcJob
from aiida.engine.processes.process_spec import CalcJobProcessSpec
from aiida.orm import ArrayData, Dict, Float, Int, List, SinglefileData, Str
from aiida.orm import Dict, Float, Int, List, SinglefileData, Str
from h5py import File as h5File

from aiida_fans.helpers import make_input_dict


class FansCalcBase(CalcJob):
"""Base class of all calculations using FANS."""
Expand All @@ -20,9 +22,8 @@ def define(cls, spec: CalcJobProcessSpec) -> None:
"""Define inputs, outputs, and exit codes of the calculation."""
super().define(spec)

# Metadata
# Default Metadata
spec.inputs["metadata"]["label"].default = "FANS"
# spec.inputs["metadata"]["dry_run"].default = True
## Processing Power
spec.inputs["metadata"]["options"]["withmpi"].default = True
spec.inputs["metadata"]["options"]["resources"].default = {
Expand All @@ -35,6 +36,10 @@ def define(cls, spec: CalcJobProcessSpec) -> None:
## Parser
spec.inputs["metadata"]["options"]["parser_name"].default = "fans"

# Custom Metadata
spec.input("metadata.options.results_prefix", valid_type=str, default="")
spec.input("metadata.options.results", valid_type=list, default=[])

# Input Ports
## Microstructure Definition
spec.input_namespace("microstructure")
Expand All @@ -53,9 +58,7 @@ def define(cls, spec: CalcJobProcessSpec) -> None:
spec.input("error_parameters.type", valid_type=Str)
spec.input("error_parameters.tolerance", valid_type=Float)
## Macroscale Loading Conditions
spec.input("macroscale_loading", valid_type=ArrayData)
## Results Specification
spec.input("results", valid_type=List)
spec.input("macroscale_loading", valid_type=List)

# Output Ports
spec.output("output", valid_type=SinglefileData)
Expand Down Expand Up @@ -107,28 +110,8 @@ def prepare_for_submission(self, folder: Folder) -> CalcInfo:
copyfileobj(source, target)

# input.json as dict
input_dict = {
## Microstructure Definition
"ms_filename": str(ms_filepath), # path to stashed microstructure
"ms_datasetname": self.inputs.microstructure.datasetname.value,
"ms_L": self.inputs.microstructure.L.get_list(),
## Problem Type and Material Model
"problem_type": self.inputs.problem_type.value,
"matmodel": self.inputs.matmodel.value,
"material_properties": self.inputs.material_properties.get_dict(),
## Solver Settings
"method": self.inputs.method.value,
"n_it": self.inputs.n_it.value,
"error_parameters": {
"measure": self.inputs.error_parameters.measure.value,
"type": self.inputs.error_parameters.type.value,
"tolerance": self.inputs.error_parameters.tolerance.value
},
## Macroscale Loading Conditions
"macroscale_loading": [a[1].tolist() for a in self.inputs.macroscale_loading.get_iterarrays()],
## Results Specification
"results": self.inputs.results.get_list()
}
input_dict = make_input_dict(self)
input_dict["microstructure"]["filepath"] = str(ms_filepath)
# write input.json to working directory
with folder.open(self.options.input_filename, "w", "utf8") as json:
dump(input_dict, json, indent=4)
Expand All @@ -154,28 +137,8 @@ def prepare_for_submission(self, folder: Folder) -> CalcInfo:
h5_src.copy(datasetname, h5_dest, name=datasetname)

# input.json as dict
input_dict = {
## Microstructure Definition
"ms_filename": "microstructure.h5", # path to fragmented microstructure
"ms_datasetname": self.inputs.microstructure.datasetname.value,
"ms_L": self.inputs.microstructure.L.get_list(),
## Problem Type and Material Model
"problem_type": self.inputs.problem_type.value,
"matmodel": self.inputs.matmodel.value,
"material_properties": self.inputs.material_properties.get_dict(),
## Solver Settings
"method": self.inputs.method.value,
"n_it": self.inputs.n_it.value,
"error_parameters": {
"measure": self.inputs.error_parameters.measure.value,
"type": self.inputs.error_parameters.type.value,
"tolerance": self.inputs.error_parameters.tolerance.value
},
## Macroscale Loading Conditions
"macroscale_loading": [a[1].tolist() for a in self.inputs.macroscale_loading.get_iterarrays()],
## Results Specification
"results": self.inputs.results.get_list()
}
input_dict = make_input_dict(self)
input_dict["microstructure"]["filepath"] = "microstructure.h5"
# write input.json to working directory
with folder.open(self.options.input_filename, "w", "utf8") as json:
dump(input_dict, json, indent=4)
Expand Down
52 changes: 30 additions & 22 deletions src/aiida_fans/helpers.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,38 @@
"""Tools required by aiida_fans."""
"""Tools required by aiida-fans."""

import json
from typing import Any

from aiida.orm import ArrayData, Dict, Float, Int, List, SinglefileData, Str
from aiida.engine import CalcJob
from numpy import allclose, ndarray


class InputEncoder(json.JSONEncoder):
"""Prepares a dictionary of calcjob inputs for json representation."""

def default(self, obj):
"""Converts aiida datatypes to their python counterparts."""
match obj:
case Str() | Int() | Float():
return obj.value
case List():
return obj.get_list()
case Dict():
return obj.get_dict()
case ArrayData():
return [a[1].tolist() for a in obj.get_iterarrays()] #! Caution: may be disordered
case SinglefileData():
return obj.filename
case _:
# Let the base class default method raise the TypeError
return super().default(obj)
def make_input_dict(job: CalcJob) -> dict[str, Any]:
"""Prepares a dictionary that maps to an input.json from calcjob inputs."""
return {
## Microstructure Definition
"microstructure": {
"filepath": None, # path to stashed microstructure, must be overwritten by impl
"datasetname": job.inputs.microstructure.datasetname.value,
"L": job.inputs.microstructure.L.get_list()
},
"results_prefix": job.inputs.metadata.options.results_prefix,
## Problem Type and Material Model
"problem_type": job.inputs.problem_type.value,
"matmodel": job.inputs.matmodel.value,
"material_properties": job.inputs.material_properties.get_dict(),
## Solver Settings
"method": job.inputs.method.value,
"n_it": job.inputs.n_it.value,
"error_parameters": {
"measure": job.inputs.error_parameters.measure.value,
"type": job.inputs.error_parameters.type.value,
"tolerance": job.inputs.error_parameters.tolerance.value
},
## Macroscale Loading Conditions
"macroscale_loading": job.inputs.macroscale_loading.get_list(),
## Results Specification
"results": job.inputs.metadata.options.results
}

def arraydata_equal(first: dict[str, ndarray], second: dict[str, ndarray]) -> bool:
"""Return whether two dicts of arrays are roughly equal."""
Expand Down
6 changes: 5 additions & 1 deletion src/aiida_fans/parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,11 @@ def parse(self, **kwargs) -> ExitCode | None:
return self.exit_codes.ERROR_MISSING_OUTPUT

with h5File(output_path) as h5:
results = h5[self.node.inputs.microstructure.datasetname.value]
results = h5[
self.node.inputs.microstructure.datasetname.value + \
"_results/" + \
self.node.get_option('results_prefix')
]
results.visititems(self.parse_h5)

if self.results_dict:
Expand Down
20 changes: 14 additions & 6 deletions src/aiida_fans/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from aiida_fans.helpers import arraydata_equal


def aiida_type(value : Any) -> type[Data]:
def aiida_type(value: Any) -> type[Data]:
"""Find the corresponding AiiDA datatype for a variable with pythonic type.

Args:
Expand Down Expand Up @@ -39,7 +39,7 @@ def aiida_type(value : Any) -> type[Data]:
case _:
raise NotImplementedError(f"Received an input of value: {value} with type: {type(value)}")

def fetch(label : str, value : Any) -> list[Node]:
def fetch(label: str, value: Any) -> list[Node]:
"""Return a list of nodes matching the label and value provided.

Args:
Expand All @@ -61,12 +61,16 @@ def fetch(label : str, value : Any) -> list[Node]:
else:
array_nodes = []
for array_node in nodes:
array_value = {k:v for k, v in [(name, array_node.get_array(name)) for name in array_node.get_arraynames()]}
array_value = {
k: v for k, v in [
(name, array_node.get_array(name)) for name in array_node.get_arraynames() # type: ignore
]
}
if arraydata_equal(value, array_value):
array_nodes.append(array_node)
return array_nodes

def generate(label : str, value : Any) -> Node:
def generate(label: str, value: Any) -> Node:
"""Return a single node with the label and value provided.

Uses an existing node when possible, but otherwise creates one instead.
Expand All @@ -89,7 +93,7 @@ def generate(label : str, value : Any) -> Node:
else:
raise RuntimeError

def convert(ins : dict[str, Any], path : list[str] = []):
def convert(ins: dict[str, Any], path: list[str] = []):
"""Takes a dictionary of inputs and converts the values to their respective Nodes.

Args:
Expand All @@ -104,7 +108,7 @@ def convert(ins : dict[str, Any], path : list[str] = []):
else:
ins[k] = generate(".".join([*path, k]), v)

def compile_query(ins : dict[str,Any], qb : QueryBuilder) -> None:
def compile_query(ins: dict[str,Any], qb: QueryBuilder) -> None:
"""Interate over the converted input dictionary and append to the QueryBuilder for each node.

Args:
Expand Down Expand Up @@ -175,6 +179,10 @@ def execute_fans(
print("ERROR: Calculation strategy must be either 'Fragmented' or 'Stashed'.")
raise ValueError

# move results_prefix and results items to metadata.options
inputs.setdefault("metadata", {}).setdefault("options", {})["results_prefix"] = inputs.pop("results_prefix", "")
inputs.setdefault("metadata", {}).setdefault("options", {})["results"] = inputs.pop("results", [])

# fetch the inputs if possible or otherwise create them
convert(inputs)

Expand Down