diff --git a/.gitattributes b/.gitattributes deleted file mode 100644 index e69de29..0000000 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index df71251..386e6b1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,43 +6,6 @@ on: jobs: - static-analysis: - name: Perform static analysis - runs-on: ubuntu-latest - timeout-minutes: 15 - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Set up Pixi - uses: prefix-dev/setup-pixi@v0.8.1 - with: - pixi-version: v0.40.2 - cache: false - environments: fmt - activate-environment: true - - name: Run formatter and linter - run: pixi run fmt - - test: - name: Perform tests - runs-on: ubuntu-latest - timeout-minutes: 15 - strategy: - matrix: - environment: [test-py311, test-py312] - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Set up Pixi - uses: prefix-dev/setup-pixi@v0.8.1 - with: - pixi-version: v0.40.2 - cache: false - environments: ${{ matrix.environment }} - activate-environment: true - - name: Run tests - run: pixi run test - docs: name: Generate documentation runs-on: ubuntu-latest diff --git a/.gitignore b/.gitignore index c743f91..7ca5003 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,142 @@ +# Ignore build directories +build/ +bin/ +lib/ +lib64/ +*/build*/ + +# Ignore CMake generated files +CMakeFiles/ +CMakeCache.txt +cmake_install.cmake +Makefile +CMakeLists.txt.user* +.cmake/ + +# Ignore compiled binaries and executables +*.exe +*.out +*.app +*.so +*.dylib + +# Ignore object files and libraries +*.o +*.a +*.lib + +# Ignore editor-specific files +.vscode/ +.idea/ +*.vs/ +*.suo +*.ntvs* +*.njsproj +*.sln +*.suo +*.swp +*.sln.docstates +*.user +*.userosscache +*.suo +*.tsserver.log.* +*.dbmdl +*.dbproj +*.jfm +*.pfx +*.publishsettings +node_modules/ +bower_components/ + +# Ignore OS generated files +.DS_Store +.DS_Store? +._* +.Spotlight-V100 +.Trashes +ehthumbs.db +Thumbs.db + +# Ignore package manager directories +pip-wheel-metadata/ + +!fans_input.grid.json +!fans_input.slides.json + +# C extensions +*.so + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ +docs/_static/ +docs/_autosummary/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# MkDocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pyright type checker +.pyright/ + +# End of standard Python ignores + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] @@ -61,14 +200,15 @@ diff.md # Lock file *.lock -# Tutorial required files +# Project specific files +*.h5 !tutorial_dataset.h5 +*.yaml +!.readthedocs.yml +!.pre-commit-config.yaml -# Tutorial generated files -configure_profile.yaml -configure_computer.yaml -configure_code.yaml +# Ignore AiiDA specific files .aiida_run -# Development test sandbox -test_dev +# Dedicated folder for personal projects +**/scratch/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..3f890af --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,10 @@ +repos: +- repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.12.0 + hooks: + # Run the linter. + - id: ruff-check + args: [ --fix ] + # Run the formatter. + - id: ruff-format diff --git a/pyproject.toml b/pyproject.toml index 81e1f08..3d6dfd8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,17 +5,17 @@ description = "AiiDA plugin for FANS, an FFT-based homogenization solver." urls = {Documentation = "http://aiida-fans.readthedocs.io/en/latest/", Source = "https://github.com/ethan-shanahan/aiida-fans" } authors = [{name = "Ethan Shanahan", email = "ethan.shanahan@gmail.com"}] readme = "README.md" -license = {file = "LICENSE"} +license = "LGPL-3.0-or-later" +license-files = ["LICENSE"] classifiers = [ "Natural Language :: English", "Programming Language :: Python", "Operating System :: POSIX :: Linux", "Intended Audience :: Science/Research", - "License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)", "Development Status :: 3 - Alpha", "Framework :: AiiDA" ] -keywords = ["aiida", "plugin", "fans"] +keywords = ["aiida", "fans", "plugin"] requires-python = ">=3.11" dependencies = [ "aiida-core>=2.6", @@ -23,15 +23,10 @@ dependencies = [ ] # Entry Points -# [project.entry-points."aiida.data"] -# "fans" = "aiida_fans.data:FANSParameters" [project.entry-points."aiida.calculations"] -"fans.stashed" = "aiida_fans.calculations:FansStashedCalculation" -"fans.fragmented" = "aiida_fans.calculations:FansFragmentedCalculation" +"fans" = "aiida_fans.calculations:FansCalculation" [project.entry-points."aiida.parsers"] "fans" = "aiida_fans.parsers:FansParser" -# [project.entry-points."aiida.cmdline.data"] -# "fans" = "aiida_fans.cli:data_cli" # Build System [build-system] @@ -46,63 +41,41 @@ build-backend = "setuptools.build_meta" channels = ["conda-forge"] platforms = ["linux-64"] -### pixi: default dependencies (in addition to aiida-core) -[tool.pixi.dependencies] -# None -[tool.pixi.pypi-dependencies] -# None - -### pixi: default tasks -[tool.pixi.tasks] -# None - ### pixi: features -[tool.pixi.feature.self] -pypi-dependencies = {aiida-fans = { path = ".", editable = true }} -[tool.pixi.feature.plugin] -dependencies = {aiida-fans = "==0.1.5"} -# [tool.pixi.feature.aiida] -# dependencies = {aiida-core = "2.6.*"} -[tool.pixi.feature.fans] -dependencies = {fans = "0.4.*"} -[tool.pixi.feature.ruff] -dependencies = {ruff = "*"} -tasks = {fmt = "ruff check", dummy = "echo dummy", my-dummy="echo my-dummy"} +[tool.pixi.feature.py3] +dependencies = {python = "3.*"} [tool.pixi.feature.build] pypi-dependencies = {build = "*"} tasks = {build-dist = "python -m build"} [tool.pixi.feature.sphinx] dependencies = {sphinx = "*", sphinx-book-theme = "*"} tasks = {build-docs = "sphinx-build -M html docs/source docs/build"} -[tool.pixi.feature.pytest] -dependencies = {pytest = "*"} -tasks = {test = "echo dummy test passes"} [tool.pixi.feature.marimo] -dependencies = {marimo = "0.13.*"} +dependencies = {marimo = "0.14.*"} tasks = {tutorial = "marimo edit tutorial.py"} -[tool.pixi.feature.py311] -dependencies = {python = "3.11.*"} -[tool.pixi.feature.py312] -dependencies = {python = "3.12.*"} -# [tool.pixi.feature.py313] -# dependencies = {python = "3.13.*"} +[tool.pixi.feature.fans] +dependencies = {fans = "0.4.*"} + +### pixi: default environment dependencies +[tool.pixi.dependencies] +aiida-core = "2.6.*" +h5py = "*" +pre-commit = "*" +[tool.pixi.pypi-dependencies] +aiida-fans = { path = ".", editable = true } ### pixi: environments [tool.pixi.environments] -dev = { features = ["self", "ruff", "pytest"], solve-group = "default" } -fmt = { features = ["ruff", "py312"], no-default-feature = true } -dist = { features = ["build", "py312"], no-default-feature = true } -docs = { features = ["sphinx", "py312"], no-default-feature = true } -test-py311 = { features = ["self", "fans", "pytest", "py311"], solve-group = "py311" } -test-py312 = { features = ["self", "fans", "pytest", "py312"], solve-group = "py312" } -# test-py313 = { features = ["self", "fans", "pytest", "py313"], solve-group = "py313" } -tutorial = { features = ["plugin", "fans", "marimo"], no-default-feature = true} +dist = { no-default-feature = true, features = ["py3", "build"] } # CI env +docs = { no-default-feature = true, features = ["py3", "sphinx"] } # CI env +tutorial = { features = ["marimo", "fans"] } ## Build Tools: setuptools_scm [tool.setuptools_scm] version_file = "src/aiida_fans/_version.py" + ## Style Tools: ruff [tool.ruff] extend-exclude = [ @@ -131,10 +104,5 @@ select = [ dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" # Allow unused variables when underscore-prefixed. pydocstyle = {convention = "google"} -## Test Tools: pytest -[tool.pytest.ini_options] -[tool.coverage] -source = ["src/aiida_fans"] - ## Docs Tools: sphinx # None diff --git a/src/aiida_fans/calculations.py b/src/aiida_fans/calculations.py index a53965a..8b6311e 100644 --- a/src/aiida_fans/calculations.py +++ b/src/aiida_fans/calculations.py @@ -14,8 +14,8 @@ from aiida_fans.helpers import make_input_dict -class FansCalcBase(CalcJob): - """Base class of all calculations using FANS.""" +class FansCalculation(CalcJob): + """Calculations using FANS.""" @classmethod def define(cls, spec: CalcJobProcessSpec) -> None: @@ -26,10 +26,7 @@ def define(cls, spec: CalcJobProcessSpec) -> None: spec.inputs["metadata"]["label"].default = "FANS" ## Processing Power spec.inputs["metadata"]["options"]["withmpi"].default = True - spec.inputs["metadata"]["options"]["resources"].default = { - "num_machines": 1, - "num_mpiprocs_per_machine": 4 - } + spec.inputs["metadata"]["options"]["resources"].default = {"num_machines": 1, "num_mpiprocs_per_machine": 4} ## Filenames spec.inputs["metadata"]["options"]["input_filename"].default = "input.json" spec.inputs["metadata"]["options"]["output_filename"].default = "output.h5" @@ -39,6 +36,7 @@ def define(cls, spec: CalcJobProcessSpec) -> None: # Custom Metadata spec.input("metadata.options.results_prefix", valid_type=str, default="") spec.input("metadata.options.results", valid_type=list, default=[]) + spec.input("metadata.options.stashed_microstructure", valid_type=bool, default=True) # Input Ports ## Microstructure Definition @@ -69,6 +67,42 @@ def define(cls, spec: CalcJobProcessSpec) -> None: def prepare_for_submission(self, folder: Folder) -> CalcInfo: """Prepare the calculation for submission.""" + # Stashed Strategy: + if self.options.stashed_microstructure: + ms_filepath: Path = ( + Path(self.inputs.code.computer.get_workdir()) + / "stash/microstructures" + / self.inputs.microstructure.file.filename + ) + # if microstructure does not exist in stash, make it + if not ms_filepath.is_file(): + ms_filepath.parent.mkdir(parents=True, exist_ok=True) + with self.inputs.microstructure.file.open(mode="rb") as source: + with ms_filepath.open(mode="wb") as target: + copyfileobj(source, target) + + # input.json as dict + input_dict = make_input_dict(self) + input_dict["microstructure"]["filepath"] = str(ms_filepath) + # write input.json to working directory + with folder.open(self.options.input_filename, "w", "utf8") as json: + dump(input_dict, json, indent=4) + # Fragmented Strategy: + else: + datasetname: str = self.inputs.microstructure.datasetname.value + with folder.open("microstructure.h5", "bw") as f_dest: + with h5File(f_dest, "w") as h5_dest: + with self.inputs.microstructure.file.open(mode="rb") as f_src: + with h5File(f_src, "r") as h5_src: + h5_src.copy(datasetname, h5_dest, name=datasetname) + + # input.json as dict + input_dict = make_input_dict(self) + input_dict["microstructure"]["filepath"] = "microstructure.h5" + # write input.json to working directory + with folder.open(self.options.input_filename, "w", "utf8") as json: + dump(input_dict, json, indent=4) + # Specifying the code info: codeinfo = CodeInfo() codeinfo.code_uuid = self.inputs.code.uuid @@ -82,65 +116,6 @@ def prepare_for_submission(self, folder: Folder) -> CalcInfo: calcinfo.local_copy_list = [] calcinfo.remote_copy_list = [] calcinfo.retrieve_list = [codeinfo.stdout_name, codeinfo.stderr_name] - calcinfo.retrieve_temporary_list = [ - self.options.output_filename - ] + calcinfo.retrieve_temporary_list = [self.options.output_filename] return calcinfo - - -class FansStashedCalculation(FansCalcBase): - """Calculations using FANS and the "Stashed" microstructure distribution strategy.""" - - @classmethod - def define(cls, spec: CalcJobProcessSpec) -> None: - """Define inputs, outputs, and exit codes of the calculation.""" - return super().define(spec) - - def prepare_for_submission(self, folder: Folder) -> CalcInfo: - """Prepare the calculation for submission.""" - ms_filepath: Path = Path(self.inputs.code.computer.get_workdir()) / \ - "stash/microstructures" / \ - self.inputs.microstructure.file.filename - # if microstructure does not exist in stash, make it - if not ms_filepath.is_file(): - ms_filepath.parent.mkdir(parents=True, exist_ok=True) - with self.inputs.microstructure.file.open(mode='rb') as source: - with ms_filepath.open(mode='wb') as target: - copyfileobj(source, target) - - # input.json as dict - input_dict = make_input_dict(self) - input_dict["microstructure"]["filepath"] = str(ms_filepath) - # write input.json to working directory - with folder.open(self.options.input_filename, "w", "utf8") as json: - dump(input_dict, json, indent=4) - - return super().prepare_for_submission(folder) - -class FansFragmentedCalculation(FansCalcBase): - """Calculations using FANS and the "Fragmented" microstructure distribution strategy.""" - - @classmethod - def define(cls, spec: CalcJobProcessSpec) -> None: - """Define inputs, outputs, and exit codes of the calculation.""" - return super().define(spec) - - def prepare_for_submission(self, folder: Folder) -> CalcInfo: - """Prepare the calculation for submission.""" - # Write Microstructure Subset to Folder - datasetname : str = self.inputs.microstructure.datasetname.value - with folder.open("microstructure.h5","bw") as f_dest: - with h5File(f_dest,"w") as h5_dest: - with self.inputs.microstructure.file.open(mode="rb") as f_src: - with h5File(f_src,'r') as h5_src: - h5_src.copy(datasetname, h5_dest, name=datasetname) - - # input.json as dict - input_dict = make_input_dict(self) - input_dict["microstructure"]["filepath"] = "microstructure.h5" - # write input.json to working directory - with folder.open(self.options.input_filename, "w", "utf8") as json: - dump(input_dict, json, indent=4) - - return super().prepare_for_submission(folder) diff --git a/src/aiida_fans/helpers.py b/src/aiida_fans/helpers.py index f527c54..e5cbd79 100644 --- a/src/aiida_fans/helpers.py +++ b/src/aiida_fans/helpers.py @@ -11,9 +11,9 @@ def make_input_dict(job: CalcJob) -> dict[str, Any]: return { ## Microstructure Definition "microstructure": { - "filepath": None, # path to stashed microstructure, must be overwritten by impl + "filepath": None, # path to stashed microstructure, must be overwritten by impl "datasetname": job.inputs.microstructure.datasetname.value, - "L": job.inputs.microstructure.L.get_list() + "L": job.inputs.microstructure.L.get_list(), }, "results_prefix": job.inputs.metadata.options.results_prefix, ## Problem Type and Material Model @@ -26,14 +26,15 @@ def make_input_dict(job: CalcJob) -> dict[str, Any]: "error_parameters": { "measure": job.inputs.error_parameters.measure.value, "type": job.inputs.error_parameters.type.value, - "tolerance": job.inputs.error_parameters.tolerance.value + "tolerance": job.inputs.error_parameters.tolerance.value, }, ## Macroscale Loading Conditions "macroscale_loading": job.inputs.macroscale_loading.get_list(), ## Results Specification - "results": job.inputs.metadata.options.results + "results": job.inputs.metadata.options.results, } + def arraydata_equal(first: dict[str, ndarray], second: dict[str, ndarray]) -> bool: """Return whether two dicts of arrays are roughly equal.""" if first.keys() != second.keys(): diff --git a/src/aiida_fans/parsers.py b/src/aiida_fans/parsers.py index f755ba8..f22b21e 100644 --- a/src/aiida_fans/parsers.py +++ b/src/aiida_fans/parsers.py @@ -19,7 +19,7 @@ def __init__(self, node: CalcJobNode): def parse(self, **kwargs) -> ExitCode | None: """Parse outputs and store results as nodes.""" - output_path: Path = Path(kwargs["retrieved_temporary_folder"]) / self.node.get_option("output_filename") # type: ignore + output_path: Path = Path(kwargs["retrieved_temporary_folder"]) / self.node.get_option("output_filename") # type: ignore if output_path.is_file(): self.out("output", node=SinglefileData(output_path)) else: @@ -27,9 +27,7 @@ def parse(self, **kwargs) -> ExitCode | None: with h5File(output_path) as h5: results = h5[ - self.node.inputs.microstructure.datasetname.value + \ - "_results/" + \ - self.node.get_option('results_prefix') + self.node.inputs.microstructure.datasetname.value + "_results/" + self.node.get_option("results_prefix") ] results.visititems(self.parse_h5) diff --git a/src/aiida_fans/utils.py b/src/aiida_fans/utils.py index 6aa478a..ba9e1ac 100644 --- a/src/aiida_fans/utils.py +++ b/src/aiida_fans/utils.py @@ -24,21 +24,22 @@ def aiida_type(value: Any) -> type[Data]: """ match value: case str(): - return DataFactory("core.str") # Str + return DataFactory("core.str") # Str case int(): - return DataFactory("core.int") # Int + return DataFactory("core.int") # Int case float(): - return DataFactory("core.float") # Float + return DataFactory("core.float") # Float case list(): - return DataFactory("core.list") # List + return DataFactory("core.list") # List case dict(): if all(map(lambda t: isinstance(t, ndarray), value.values())): - return DataFactory("core.array") # ArrayData + return DataFactory("core.array") # ArrayData else: - return DataFactory("core.dict") # Dict + return DataFactory("core.dict") # Dict case _: raise NotImplementedError(f"Received an input of value: {value} with type: {type(value)}") + def fetch(label: str, value: Any) -> list[Node]: """Return a list of nodes matching the label and value provided. @@ -50,26 +51,31 @@ def fetch(label: str, value: Any) -> list[Node]: list[Node]: the list of nodes matching the give criteria """ datatype = aiida_type(value) - nodes = QueryBuilder( - ).append(cls=datatype, tag="n" - ).add_filter("n", {"label": label} - ).add_filter("n", {"attributes": {"==": datatype(value).base.attributes.all}} - ).all(flat=True) + nodes = ( + QueryBuilder() + .append(cls=datatype, tag="n") + .add_filter("n", {"label": label}) + .add_filter("n", {"attributes": {"==": datatype(value).base.attributes.all}}) + .all(flat=True) + ) if datatype != DataFactory("core.array"): - return nodes # type: ignore + return nodes # type: ignore else: array_nodes = [] for array_node in nodes: array_value = { - k: v for k, v in [ - (name, array_node.get_array(name)) for name in array_node.get_arraynames() # type: ignore + k: v + for k, v in [ + (name, array_node.get_array(name)) + for name in array_node.get_arraynames() # type: ignore ] } if arraydata_equal(value, array_value): array_nodes.append(array_node) return array_nodes + def generate(label: str, value: Any) -> Node: """Return a single node with the label and value provided. @@ -93,6 +99,7 @@ def generate(label: str, value: Any) -> Node: else: raise RuntimeError + def convert(ins: dict[str, Any], path: list[str] = []): """Takes a dictionary of inputs and converts the values to their respective Nodes. @@ -108,7 +115,8 @@ def convert(ins: dict[str, Any], path: list[str] = []): else: ins[k] = generate(".".join([*path, k]), v) -def compile_query(ins: dict[str,Any], qb: QueryBuilder) -> None: + +def compile_query(ins: dict[str, Any], qb: QueryBuilder) -> None: """Interate over the converted input dictionary and append to the QueryBuilder for each node. Args: @@ -121,18 +129,10 @@ def compile_query(ins: dict[str,Any], qb: QueryBuilder) -> None: if k in ["microstructure", "error_parameters"] and isinstance(v, dict): compile_query(v, qb) else: - qb.append( - cls=type(v), - with_outgoing="calc", - filters={"pk": v.pk} - ) - - -def execute_fans( - mode: Literal["Submit", "Run"], - inputs: dict[str, Any], - strategy: Literal["Fragmented", "Stashed"] = "Fragmented", - ): + qb.append(cls=type(v), with_outgoing="calc", filters={"pk": v.pk}) + + +def execute_fans(mode: Literal["Submit", "Run"], inputs: dict[str, Any]): """This utility function simplifies the process of executing aiida-fans jobs. The only nodes you must provide are the `code` and `microstructure` inputs. @@ -169,15 +169,7 @@ def execute_fans( execute_fans("Submit", inputs, "Stashed") ``` """ - # update inputs with metadata.options.stash if necessary: - match strategy: - case "Stashed": - calcjob = CalculationFactory("fans.stashed") - case "Fragmented": - calcjob = CalculationFactory("fans.fragmented") - case _: - print("ERROR: Calculation strategy must be either 'Fragmented' or 'Stashed'.") - raise ValueError + calcjob = CalculationFactory("fans") # move results_prefix and results items to metadata.options inputs.setdefault("metadata", {}).setdefault("options", {})["results_prefix"] = inputs.pop("results_prefix", "") @@ -191,7 +183,7 @@ def execute_fans( compile_query(inputs, qb) results = qb.all(flat=True) if (count := len(results)) != 0: - print(f"It seems this calculation has already been performed {count} time{"s" if count > 1 else ""}. {results}") + print(f"It seems this calculation has already been performed {count} time{'s' if count > 1 else ''}. {results}") confirmation = input("Are you sure you want to rerun it? [y/N] ").strip().lower() in ["y", "yes"] else: confirmation = True @@ -199,20 +191,16 @@ def execute_fans( if confirmation: match mode: case "Run": - run(calcjob, inputs) # type: ignore + run(calcjob, inputs) # type: ignore case "Submit": - submit(calcjob, inputs) # type: ignore + submit(calcjob, inputs) # type: ignore -def submit_fans( - inputs: dict[str, Any], - strategy: Literal["Fragmented", "Stashed"] = "Fragmented", -): + +def submit_fans(inputs: dict[str, Any]): """See `execute_fans` for implementation and usage details.""" - execute_fans("Submit", inputs, strategy) + execute_fans("Submit", inputs) + -def run_fans( - inputs: dict[str, Any], - strategy: Literal["Fragmented", "Stashed"] = "Fragmented", -): +def run_fans(inputs: dict[str, Any]): """See `execute_fans` for implementation and usage details.""" - execute_fans("Run", inputs, strategy) + execute_fans("Run", inputs) diff --git a/tutorial/tutorial.py b/tutorial/tutorial.py index be2abc6..015d36c 100644 --- a/tutorial/tutorial.py +++ b/tutorial/tutorial.py @@ -1,8 +1,6 @@ - - import marimo -__generated_with = "0.13.0" +__generated_with = "0.14.7" app = marimo.App(app_title="AiiDA-FANS Tutorial") @@ -90,10 +88,10 @@ def _(mo): def _(mo): mo.md( r""" - ### 1. Create a Profile + ### 1. Create a Profile - By default, AiiDA stores app data at the user level. Even when AiiDA is installed in a virtual environment, it will still read and write to `.aiida` in your home directory. However, AiiDA provides users a way to seperate their data into "profiles". Let's create a profile for this tutorial. - """ + By default, AiiDA stores app data at the user level. Even when AiiDA is installed in a virtual environment, it will still read and write to `.aiida` in your home directory. However, AiiDA provides users a way to seperate their data into "profiles". Let's create a profile for this tutorial. + """ ) return @@ -201,15 +199,15 @@ def _(mo, profile_settings): def _(mo): mo.md( r""" - ### 2. Specify a Computer + ### 2. Specify a Computer - Before you proceed, ensure that your local computer satisfies the following requirements: + Before you proceed, ensure that your local computer satisfies the following requirements: - - it runs a Unix-like operating system (Linux distros and MacOS should work fine) - - it has `bash` installed + - it runs a Unix-like operating system (Linux distros and MacOS should work fine) + - it has `bash` installed - AiiDA does not assume what computer you wish to run jobs on, so even if you are only using your local machine, you must tell it as much. That is what we will do here; specify the localhost computer. - """ + AiiDA does not assume what computer you wish to run jobs on, so even if you are only using your local machine, you must tell it as much. That is what we will do here; specify the localhost computer. + """ ) return @@ -284,7 +282,8 @@ def _(Path, computer_settings, mo): @app.cell(hide_code=True) def _(computer_settings, mo): - mo.md(rf""" + mo.md( + rf""" To specify your new computer from this file run: ``` @@ -313,10 +312,10 @@ def _(computer_settings, mo): def _(mo): mo.md( r""" - ### 3. Define a Code + ### 3. Define a Code - The final step to setup AiiDA is to define the "code" you wish to utilise. Here, the "code" refers to FANS. This step is important as it tells AiiDA how to execute FANS and which plugin should handle its jobs. AiiDA provides many ways of handling the "code" of your project. Since we installed FANS in the environment, we can simply make use of it there. - """ + The final step to setup AiiDA is to define the "code" you wish to utilise. Here, the "code" refers to FANS. This step is important as it tells AiiDA how to execute FANS and which plugin should handle its jobs. AiiDA provides many ways of handling the "code" of your project. Since we installed FANS in the environment, we can simply make use of it there. + """ ) return @@ -429,105 +428,105 @@ def _(code_settings, mo): def _(mo): mo.md( r""" - ## FANS Rundown + ## FANS Rundown - FANS requires a JSON input file. The input file can be thought of in 5 sections, each specifying the various problem parameters as well as runtime settings. Each setting also notes the appropriate AiiDA datatype. This is the type of node that you must give AiiDA when running jobs, as we will see later. + FANS requires a JSON input file. The input file can be thought of in 5 sections, each specifying the various problem parameters as well as runtime settings. Each setting also notes the appropriate AiiDA datatype. This is the type of node that you must give AiiDA when running jobs, as we will see later. - ### Microstructure Definition + ### Microstructure Definition - ```json - "ms_filename": "microstructures/sphere32.h5", - "ms_datasetname": "/sphere/32x32x32/ms", - "ms_L": [1.0, 1.0, 1.0] - ``` + ```json + "ms_filename": "microstructures/sphere32.h5", + "ms_datasetname": "/sphere/32x32x32/ms", + "ms_L": [1.0, 1.0, 1.0] + ``` - - `ms_filename`: This specifies the path to the HDF5 file that contains the microstructure data. (AiiDA type: `SinglefileData`) - - `ms_datasetname`: This is the path within the HDF5 file to the specific dataset that represents the microstructure. (AiiDA type: `Str`) - - `ms_L`: Microstructure length defines the physical dimensions of the microstructure in the x, y, and z directions. (AiiDA type: `List`) + - `ms_filename`: This specifies the path to the HDF5 file that contains the microstructure data. (AiiDA type: `SinglefileData`) + - `ms_datasetname`: This is the path within the HDF5 file to the specific dataset that represents the microstructure. (AiiDA type: `Str`) + - `ms_L`: Microstructure length defines the physical dimensions of the microstructure in the x, y, and z directions. (AiiDA type: `List`) - ### Problem Type and Material Model + ### Problem Type and Material Model - ```json - "problem_type": "mechanical", - "matmodel": "LinearElasticIsotropic", - "material_properties": { - "bulk_modulus": [62.5000, 222.222], - "shear_modulus": [28.8462, 166.6667] - } - ``` + ```json + "problem_type": "mechanical", + "matmodel": "LinearElasticIsotropic", + "material_properties": { + "bulk_modulus": [62.5000, 222.222], + "shear_modulus": [28.8462, 166.6667] + } + ``` - - `problem_type`: This defines the type of physical problem you are solving. Common options include "thermal" problems and "mechanical" problems. (AiiDA type: `Str`) - - `matmodel`: This specifies the material model to be used in the simulation. Examples include `LinearThermalIsotropic` for isotropic linear thermal problems, `LinearElasticIsotropic` for isotropic linear elastic mechanical problems, `PseudoPlasticLinearHardening`/`PseudoPlasticNonLinearHardening` for plasticity mimicking model with linear/nonlinear hardening, and `J2ViscoPlastic_LinearIsotropicHardening`/ `J2ViscoPlastic_NonLinearIsotropicHardening` for rate dependent J2 plasticity model with linear/nonlinear isotropic hardening. (AiiDA type: `Str`) - - `material_properties`: This provides the necessary material parameters for the chosen material model. For thermal problems, you might specify `conductivity`, while mechanical problems might require `bulk_modulus`, `shear_modulus`, and more properties for advanced material models. These properties can be defined as arrays to represent multiple phases within the microstructure. (AiiDA type: `Dict`) + - `problem_type`: This defines the type of physical problem you are solving. Common options include "thermal" problems and "mechanical" problems. (AiiDA type: `Str`) + - `matmodel`: This specifies the material model to be used in the simulation. Examples include `LinearThermalIsotropic` for isotropic linear thermal problems, `LinearElasticIsotropic` for isotropic linear elastic mechanical problems, `PseudoPlasticLinearHardening`/`PseudoPlasticNonLinearHardening` for plasticity mimicking model with linear/nonlinear hardening, and `J2ViscoPlastic_LinearIsotropicHardening`/ `J2ViscoPlastic_NonLinearIsotropicHardening` for rate dependent J2 plasticity model with linear/nonlinear isotropic hardening. (AiiDA type: `Str`) + - `material_properties`: This provides the necessary material parameters for the chosen material model. For thermal problems, you might specify `conductivity`, while mechanical problems might require `bulk_modulus`, `shear_modulus`, and more properties for advanced material models. These properties can be defined as arrays to represent multiple phases within the microstructure. (AiiDA type: `Dict`) - ### Solver Settings + ### Solver Settings - ```json - "method": "cg", - "error_parameters":{ - "measure": "Linfinity", - "type": "absolute", - "tolerance": 1e-10 - }, - "n_it": 100 - ``` - - - `method`: This indicates the numerical method to be used for solving the system of equations. `cg` stands for the Conjugate Gradient method, and `fp` stands for the Fixed Point method. (AiiDA type: `Str`) - - `error_parameters`: This section defines the error parameters for the solver. Error control is applied on the finite element nodal residual of the problem. - - `measure`: Specifies the norm used to measure the error. Options include `Linfinity`, `L1`, or `L2`. (AiiDA type: `Str`) - - `type`: Defines the type of error measurement. Options are `absolute` or `relative`. (AiiDA type: `Str`) - - `tolerance`: Sets the tolerance level for the solver, defining the convergence criterion based on the chosen error measure. The solver iterates until the solution meets this tolerance. (AiiDA type: `Float`) - - `n_it`: Specifies the maximum number of iterations allowed for the FANS solver. (AiiDA type: `Int`) - - - ### Macroscale Loading Conditions - - ```json - "macroscale_loading": [ - [ - [0.004, -0.002, -0.002, 0, 0, 0], - [0.008, -0.004, -0.004, 0, 0, 0], - [0.012, -0.006, -0.006, 0, 0, 0], - [0.016, -0.008, -0.008, 0, 0, 0], - ], - [ - [0, 0, 0, 0.002, 0, 0], - [0, 0, 0, 0.004, 0, 0], - [0, 0, 0, 0.006, 0, 0], - [0, 0, 0, 0.008, 0, 0], - ] + ```json + "method": "cg", + "error_parameters":{ + "measure": "Linfinity", + "type": "absolute", + "tolerance": 1e-10 + }, + "n_it": 100 + ``` + + - `method`: This indicates the numerical method to be used for solving the system of equations. `cg` stands for the Conjugate Gradient method, and `fp` stands for the Fixed Point method. (AiiDA type: `Str`) + - `error_parameters`: This section defines the error parameters for the solver. Error control is applied on the finite element nodal residual of the problem. + - `measure`: Specifies the norm used to measure the error. Options include `Linfinity`, `L1`, or `L2`. (AiiDA type: `Str`) + - `type`: Defines the type of error measurement. Options are `absolute` or `relative`. (AiiDA type: `Str`) + - `tolerance`: Sets the tolerance level for the solver, defining the convergence criterion based on the chosen error measure. The solver iterates until the solution meets this tolerance. (AiiDA type: `Float`) + - `n_it`: Specifies the maximum number of iterations allowed for the FANS solver. (AiiDA type: `Int`) + + + ### Macroscale Loading Conditions + + ```json + "macroscale_loading": [ + [ + [0.004, -0.002, -0.002, 0, 0, 0], + [0.008, -0.004, -0.004, 0, 0, 0], + [0.012, -0.006, -0.006, 0, 0, 0], + [0.016, -0.008, -0.008, 0, 0, 0], + ], + [ + [0, 0, 0, 0.002, 0, 0], + [0, 0, 0, 0.004, 0, 0], + [0, 0, 0, 0.006, 0, 0], + [0, 0, 0, 0.008, 0, 0], ] - ``` - - - `macroscale_loading`: This defines the external loading applied to the microstructure. It is an array of arrays, where each sub-array represents a loading condition applied to the system. The format of the loading array depends on the problem type (AiiDA type: `ArrayData`): - - For `thermal` problems, the array typically has 3 components, representing the temperature gradients in the x, y, and z directions. - - For `mechanical` problems, the array must have 6 components, corresponding to the components of the strain tensor in Mandel notation (e.g., $[[ε_{11}, ε_{22}, ε_{33}, \sqrt{2} ε_{12}, \sqrt{2} ε_{13}, \sqrt{2} ε_{23}]]$). - - In the case of path/time-dependent loading as shown, for example as in plasticity problems, the `macroscale_loading` array can include multiple steps with corresponding loading conditions. - - ### Results Specification - - ```json - "results": [ - "stress", "strain", - "stress_average", "strain_average", - "phase_stress_average", "phase_strain_average", - "microstructure", - "displacement", - "absolute_error", - ] - ``` - - - `results`: This array lists the quantities that should be stored into the results HDF5 file during the simulation. Each string in the array corresponds to a specific result (AiiDA type: `List`): - - `stress` and `strain`: The stress and strain fields at each voxel in the microstructure. - - `stress_average` and `strain_average`: Volume averaged- homogenized stress and strain over the entire microstructure. - - `phase_stress_average` and `phase_strain_average`: Volume averaged- homogenized stress and strain for each phase within the microstructure. - - `microstructure`: The original microstructure data. - - `displacement`: The displacement fluctuation field (for mechanical problems) and temperature fluctuation field (for thermal problems). - - `absolute_error`: The L-infinity error of finite element nodal residual at each iteration. - - Additional material model specific results can be included depending on the problem type and material model. - """ + ] + ``` + + - `macroscale_loading`: This defines the external loading applied to the microstructure. It is an array of arrays, where each sub-array represents a loading condition applied to the system. The format of the loading array depends on the problem type (AiiDA type: `ArrayData`): + - For `thermal` problems, the array typically has 3 components, representing the temperature gradients in the x, y, and z directions. + - For `mechanical` problems, the array must have 6 components, corresponding to the components of the strain tensor in Mandel notation (e.g., $[[ε_{11}, ε_{22}, ε_{33}, \sqrt{2} ε_{12}, \sqrt{2} ε_{13}, \sqrt{2} ε_{23}]]$). + + In the case of path/time-dependent loading as shown, for example as in plasticity problems, the `macroscale_loading` array can include multiple steps with corresponding loading conditions. + + ### Results Specification + + ```json + "results": [ + "stress", "strain", + "stress_average", "strain_average", + "phase_stress_average", "phase_strain_average", + "microstructure", + "displacement", + "absolute_error", + ] + ``` + + - `results`: This array lists the quantities that should be stored into the results HDF5 file during the simulation. Each string in the array corresponds to a specific result (AiiDA type: `List`): + - `stress` and `strain`: The stress and strain fields at each voxel in the microstructure. + - `stress_average` and `strain_average`: Volume averaged- homogenized stress and strain over the entire microstructure. + - `phase_stress_average` and `phase_strain_average`: Volume averaged- homogenized stress and strain for each phase within the microstructure. + - `microstructure`: The original microstructure data. + - `displacement`: The displacement fluctuation field (for mechanical problems) and temperature fluctuation field (for thermal problems). + - `absolute_error`: The L-infinity error of finite element nodal residual at each iteration. + + Additional material model specific results can be included depending on the problem type and material model. + """ ) return @@ -536,10 +535,10 @@ def _(mo): def _(mo): mo.md( r""" - ## Submitting Jobs + ## Submitting Jobs - Now that AiiDA is suitably prepared and we're familiar with the FANS parameter specifications, its time to get to work. We will conduct a mock experiment to demonstrate the simplicity and flexibility that using the plugin offers. Breaking down the submission of jobs into two steps makes for a clean workflow. - """ + Now that AiiDA is suitably prepared and we're familiar with the FANS parameter specifications, its time to get to work. We will conduct a mock experiment to demonstrate the simplicity and flexibility that using the plugin offers. Breaking down the submission of jobs into two steps makes for a clean workflow. + """ ) return @@ -900,7 +899,8 @@ def node_definition( @app.cell def _(def_nodes_button, mo): - mo.md(rf""" + mo.md( + rf""" While the cell above defined all the parameters, they still need to be stored in the database. Otherwise, they will be lost when the session ends. AiiDA automatically stores nodes when submitting them to a job, but it is good practice to handle this yourself. Moreover, you get to see your database grow step by step. After clicking the button below, try running `verdi node list` in your terminal to see all the new additions we've made so far, and `verdi node show ` for more information about specific nodes. It is important to note that this time we did not make any checks through the QueryBuilder to ensure that indentical nodes don't already exist. This means that if you click the button below repeatedly, you *may* cause duplicate nodes to be created. Since these are some the first nodes we're making, it is not so critical, but in practice you would want to first fetch existing nodes you want to reuse before creating the remainder of the nodes you wish to study. @@ -912,7 +912,8 @@ def _(def_nodes_button, mo): node.store() # store each one in the database inputs.add_nodes(node) # assign each one to the "inputs" group ``` - """) + """ + ) return @@ -1333,10 +1334,10 @@ def _(mo): def _(mo): mo.md( r""" - ## A. `fetch()` + ## A. `fetch()` - This is a helper function to simplify the querying of individual nodes when the label and value are known. - """ + This is a helper function to simplify the querying of individual nodes when the label and value are known. + """ ) return