diff --git a/doc/source/_static/custom.css b/doc/source/_static/custom.css index c621c7b647..a778107dd9 100644 --- a/doc/source/_static/custom.css +++ b/doc/source/_static/custom.css @@ -2,3 +2,15 @@ .wy-table-responsive table td, .wy-table-responsive table th { white-space: normal; } +/* Check https://www.w3schools.com/cssref/css_colors.php for colors */ +/* Ansys gold for MAPDL with black text*/ +.sd-bg-mapdl{background-color: #FFB71B} +.sd-bg-text-mapdl{color: Black} +/* Ansys orange accent color for Fluent with black text*/ +.sd-bg-lsdyna{background-color: #FB471F} +.sd-bg-text-lsdyna{color: Black} +/* Ansys blue accent color #00629F for Fluent with black text*/ +.sd-bg-fluent{background-color: #0081D0} +.sd-bg-text-fluent{color: Black} +.sd-bg-cfx{background-color: LightSeaGreen} +.sd-bg-text-cfx{color: Black} diff --git a/doc/source/conf.py b/doc/source/conf.py index 7f6b77d4da..869c55a1af 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -113,6 +113,8 @@ "sphinx_design", "sphinx_jinja", 'sphinx_reredirects', + "ansys_sphinx_theme.extension.autoapi", + "jupyter_sphinx", ] redirects = { @@ -156,7 +158,14 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] +exclude_patterns = ["links_and_refs.rst"] + +# make rst_epilog a variable, so you can add other epilog parts to it +rst_epilog = "" + +# Read links and targets from file +with open("links_and_refs.rst") as f: + rst_epilog += f.read() # The name of the Pygments (syntax highlighting) style to use. pygments_style = None @@ -377,6 +386,20 @@ def reset_servers(gallery_conf, fname, when): # A list of files that should not be packed into the epub file. epub_exclude_files = ["search.html"] +# Define custom docutils roles for solver badges +from sphinx_design.badges_buttons import BadgeRole + +def setup(app): + badge_roles = { + "bdg-mapdl": "mapdl", + "bdg-cfx": "cfx", + "bdg-fluent": "fluent", + "bdg-lsdyna": "lsdyna" + } + + for role_name, color in badge_roles.items(): + app.add_role(name=role_name, role=BadgeRole(color=color)) + # Common content for every RST file such us links rst_epilog = "" links_filepath = Path(__file__).parent.absolute() / "links.rst" @@ -405,4 +428,4 @@ def reset_servers(gallery_conf, fname, when): BUILD_EXAMPLES = True if os.environ.get("BUILD_EXAMPLES", "true") == "true" else False if BUILD_EXAMPLES: - extensions.extend(["sphinx_gallery.gen_gallery"]) \ No newline at end of file + extensions.extend(["sphinx_gallery.gen_gallery"]) diff --git a/doc/source/getting_started/contributing.rst b/doc/source/getting_started/contributing.rst index 19baac5e63..13e057fab7 100644 --- a/doc/source/getting_started/contributing.rst +++ b/doc/source/getting_started/contributing.rst @@ -3,10 +3,19 @@ Contributing ############ +.. include:: ../links_and_refs.rst + +There are several ways to contribute to PyDPF-Core: + +- :ref:`ref_contributing_answer_discussions` +- :ref:`ref_contributing_post_issues` +- :ref:`ref_contributing_develop_code` +- :ref:`ref_contributing_improve_doc` + Overall guidance on contributing to a PyAnsys repository appears in -`Contribute `_ -in the *PyAnsys Developer's Guide*. Ensure that you are thoroughly familiar -with this guide before attempting to contribute to PyDPF-Core. +`Contributing `_ in the *PyAnsys Developer's Guide*. +Ensure that you are thoroughly familiar with this guide before attempting +to contribute to PyDPF-Core. .. important:: diff --git a/doc/source/getting_started/install.rst b/doc/source/getting_started/install.rst index 628ab2a8b7..6bb908dc7f 100644 --- a/doc/source/getting_started/install.rst +++ b/doc/source/getting_started/install.rst @@ -4,10 +4,12 @@ Installation ************ +.. include:: ../links_and_refs.rst + Install using ``pip`` --------------------- -The standard package installer for Python is `pip `_. +The standard package installer for Python is `pip `_. To use PyDPF-Core with Ansys 2022 R2 or later, install the latest version with this command: @@ -16,7 +18,7 @@ with this command: pip install ansys-dpf-core -PyDPF-Core plotting capabilities require you to have `PyVista `_ installed. +PyDPF-Core plotting capabilities require you to have `PyVista `_ installed. To install PyDPF-Core with its optional plotting functionalities, run this command: .. code:: @@ -58,7 +60,7 @@ Install without internet If you are unable to install PyDPF-Core on the host machine using ``pip`` due to network isolation, download the wheelhouse corresponding to your platform and Python interpreter version -for the latest release of PyDPF-Core from the assets section of the `latest PyDPF-Core release on GitHub `_. +for the latest release of PyDPF-Core from the assets section of the `latest PyDPF-Core release on GitHub `_. The wheelhouse is a ZIP file containing Python wheels for all the packages PyDPF-Core requires to run. To install PyDPF-Core using the downloaded wheelhouse, unzip the wheelhouse to a local directory, @@ -70,8 +72,8 @@ then use the following command from within this local directory: Note that PyDPF-Core wheelhouses do not include the optional plotting dependencies. To use the plotting capabilities, also download the wheels corresponding to your platform and Python interpreter version -for `PyVista `_ and -`matplotlib `_. Then, place them in the same local directory and run the preceding command. +for `PyVista `_ and +`matplotlib `_. Then, place them in the same local directory and run the preceding command. Install in development mode diff --git a/doc/source/getting_started/write_code/index.rst b/doc/source/getting_started/write_code/index.rst new file mode 100644 index 0000000000..bb024e15a6 --- /dev/null +++ b/doc/source/getting_started/write_code/index.rst @@ -0,0 +1,63 @@ +.. _ref_write_code: + +============ +Develop code +============ + +.. include:: ../../links_and_refs.rst + +You can help improve PyDPF-Core by fixing a bug. To do it, you must set up the repository +on your local machine as per the following steps: + +- :ref:`ref_write_code_install` +- :ref:`ref_write_code_clone` +- :ref:`ref_write_code_check_install` +- :ref:`ref_write_code_develop_code` + +.. _ref_write_code_install: + +Install the repository and the DPF server +----------------------------------------- + +Install the PyDPF-Core repository by following the steps in :ref:`installation` and :ref:`ref_dpf_server`. + +.. _ref_write_code_clone: + +Clone the repository +-------------------- + +Before cloning the PyDPF-Core repository, you must install a version control system such as Git. + +Then, clone the latest version of PyDPF-Core in development mode (using ``pip`` with the ``-e`` +development flag) by running this code: + +.. code:: + + git clone https://github.com/ansys/pydpf-core + cd pydpf-core + pip install -e . + +.. _ref_write_code_check_install: + +Check the installation +---------------------- + +Run the following Python code to verify your PyDPF-Core installation: + +.. code:: + + from ansys.dpf.core import Model + from ansys.dpf.core import examples + model = Model(examples.find_simple_bar()) + print(model) + +.. _ref_write_code_develop_code: + +Develop the PyDPF-Core code +--------------------------- + +Overall guidance on contributing to the code of a PyAnsys repository appears in +`Contributing `_ in the *PyAnsys Developer's Guide*. + +You must also follow the `Coding style `_ guide to ensure +that all source code looks the same across the project. \ No newline at end of file diff --git a/doc/source/getting_started/write_doc/guidelines_tutorials.rst b/doc/source/getting_started/write_doc/guidelines_tutorials.rst new file mode 100644 index 0000000000..73cf3af39b --- /dev/null +++ b/doc/source/getting_started/write_doc/guidelines_tutorials.rst @@ -0,0 +1,695 @@ +.. _ref_guidelines_tutorials: + +================= +Writing tutorials +================= + +.. include:: ../../links_and_refs.rst + +You can improve the PyDPF-Core documentation by adding a: + +- :ref:`New tutorials section`; +- :ref:`New tutorial`. + +To do so, you must follow the guidelines presented here. + +You also need to understand the structure of the ``doc`` directory on the PyDPF-Core library: + +.. code-block:: + + . + ├── doc + │ ├── source + │ │ ├── api + │ │ ├── examples + │ │ ├── getting_started + │ │ ├── images + │ │ ├── user_guide + │ │ ├── conf.py + │ │ ├── index.rst + │ ├── styles + │ ├── make.bat + + +Tutorials are located in the ``doc/source/user_guide`` directory. + +.. _ref_guidelines_add_new_tutorial_section: + +============================= +Adding a new tutorial section +============================= + +:download:`Download the new tutorial section template` + +.. note:: + + Avoid creating new folders unless absolutely necessary. + When in doubt, mention the location of the new section in the pull request for approval. + If you must create a new folder, make sure to add an ``index.rst`` file with a reference, a title, and a description of the section. + The documentation ignores folders lacking this file. + +Location and naming +------------------- + +The new tutorial section must reside in a new folder such as ``doc/source/user_guide/tutorials/new_section_name``. + +.. code-block:: + + . + ├── doc + │ ├── source + │ │ ├── user_guide + │ │ │ ├── tutorials + │ │ │ ├── new_section + +Structure +--------- + +The section folder must contain an ``index.rst`` file with: + +- a reference tag for referencing this section in other parts of the documentation, +- a title for the tutorial section, +- a general description of the topics covered in the tutorials in this section, +- cards with links to the tutorials, titles, descriptions and applicable solvers, +- a ``Toctree`` for the tutorials in the section to appear in the navigation pane. + +.. literalinclude:: tutorial_section_template.rst + +You must reference the new section ``index.rst`` file in the main user guide page toctree +for it to appear in the sidebar of the user guide main page. You can find this toctree +at the end of the ``doc/source/user_guide/index.rst`` file. +For example: + +.. code-block:: + + .. toctree:: + :maxdepth: 2 + :hidden: + :caption: Tutorials + + tutorials/section_x/index.rst + tutorials/section_y/index.rst + tutorials/section_z/index.rst + tutorials/new_section/index.rst + +.. _ref_guidelines_add_new_tutorial: + +===================== +Adding a new tutorial +===================== + +:download:`Download the tutorial card template` +:download:`Download the tutorial structure template` +:download:`Download the tutorial content formating template` + +Location and naming +------------------- + +New tutorials correspond to new ``.rst`` files in tutorial section folders, +for example: ``doc/source/user_guide/tutorials/section/new_tutorial.rst`` + +.. code-block:: + + . + ├── doc + │ ├── source + │ │ ├── user_guide + │ │ │ ├── tutorials + │ │ │ ├── section + │ │ │ ├── new_tutorial.rst + +You must also add a new card in the ``index.rst`` file for the tutorial section as well as modify +its toctree. The card must include: + +- a tutorial title, +- a short description, +- badges for the applicable solvers, +- a link (in this case, the reference tag) to the tutorial file. + +.. topic:: Card example + + .. card:: Tutorial title + :text-align: center + :width: 25% + + Short description of the tutorial + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +Structure +--------- + +The tutorial is divided in two main parts: + +- :ref:`Preamble` +- :ref:`Content` + +.. _ref_guidelines_tutorial_header: + +Header +^^^^^^ + +This first part is essential for clarity, organization and usability of the tutorial. It establishes the purpose +of the tutorial, making it easier to understand what is going to be explained and reference it within the other parts of +the documentation. + +The header must have : + +- a reference tag, +- a tutorial title, +- any substitution text for references to the PyDPF-Core library used in the tutorial, +- a short description (same as for the tutorial card in the tutorial section), +- an introduction, +- download buttons for Python script and Jupyter notebook versions of the tutorial. + +.. literalinclude:: tutorial_structure_template.rst + :end-before: First Step + +The main PyDPF-Core library references are available in the ``doc/source/links_and_refs.rst`` file. +To add a reference, use the substitution text as usual: + +.. code-block:: + + .. _ref_tutorial_template: + + + ============== + Tutorial title + ============== + + Here some text. Here we use the |MeshedRegion| substitution text + +For more information about the predefined references, see the +:download:`links and references file <../../links_and_refs.rst>`. + +.. _ref_guidelines_tutorial_content: + +Content +^^^^^^^ + +The goal of a tutorial is to present a feature or explain how to perform a common task step by step while explaining a behavior or underlying concepts. +Thus, its structure must prioritize clarity, simplicity, and logical flow. + +Sections +~~~~~~~~ + +A well-organized tutorial breaks down complex tasks into manageable steps, presenting information incrementally +to avoid overwhelming the user. It combines concise explanations with actionable instructions, ensuring users +can follow along easily while building their understanding. + +Thus, the sections of the content are the steps themselves. These steps are generally similar to: + +#. A first step where you get some data and create DPF objects based on the data; +#. One or more steps where you manipulate the data or the DPF objects; +#. A final step where you reach the objective of the tutorial and obtain the expected result. + +For example: + +A tutorial explains how to plot a mesh using PyDPF-Core. +The steps to achieve this task are: + +#. Import a result file; +#. Extract the mesh; +#. Plot the mesh. + +To create those section, underline it with the appropriate characters (here: ``-``). + +.. code-block:: + + Import result file + ------------------ + + First, you ... + + + Extract the mesh + ---------------- + + Then, you extract ... + + + Plot the mesh + ------------- + + Finally, you plot ... + +Tabs +~~~~ + +You must use tabs when a step requires a solver-specific implementation. + +These tabs looks like: + +.. tab-set:: + + .. tab-item:: MAPDL + + Explanation 1 ... + + .. jupyter-execute:: + + # Code block 1 + + .. tab-item:: LSDYNA + + Explanation 2 ... + + .. jupyter-execute:: + + # Code block 2 + + .. tab-item:: Fluent + + Explanation 3 ... + + .. jupyter-execute:: + + # Code block 3 + + .. tab-item:: CFX + + Explanation 4 ... + + .. jupyter-execute:: + + # Code block 4 + + +You can also use tabs if you want to show different approaches to one step and it having the code blocks +in different tabs is clearer. You can see an example of this in the +:ref:`ref_tutorials_animate_time` tutorial. + + +Code blocks +~~~~~~~~~~~ + +The tutorials must have code blocks where you show how you actually implement the code. +In addition to the guidelines presented here, you must also follow the `Coding style `_ +guide to ensure that all code looks the same across the project. + +- Use the `jupyter sphinx `_ extension to show code blocks. It executes embedded code in + a Jupyter kernel and embeds outputs of that code in the document: + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + .. jupyter-execute:: + + # This is a executable code block + from ansys.dpf import core as dpf + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + .. code-block:: + + # This is a simple code block + from ansys.dpf import core as dpf + +- Use comments within a code block to clarify the purpose of a line: + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + # Define the model + model = dpf.Model() + + # Get the stress results + stress_fc = model.results.stress.eval() + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + model = dpf.Model() + stress_fc = model.results.stress.eval() + +- Split your code in several parts to include longer explanations in text format or force showing an intermediate code output: + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + Explanation for a first code block and its output + + .. code-block:: + + # Code comment 1 + code1 + + Explanation for a second code block and its output + + .. code-block:: + + # Code comment 2 + code2 + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + A single broad explanation for two steps with outputs mixed together + + .. code-block:: + + # First explanation + # Code comment 1 + code1 + + # Second explanation + # Code comment 2 + code2 + +- When using a PyDPF-Core object or method you must name arguments: + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + # Get the stress results + stress_fc = model.results.stress(time_scoping=time_steps).eval() + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + # Get the stress results + stress_fc = model.results.stress(time_steps).eval() + +- When quoting APIs in the code comments you must always use their scripting name. Mind the use of + a capital letter to name the DPF objects + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + # Define the DataSources object + ds = dpf.DataSources() + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + # Define the data sources object + ds = dpf.DataSources() + + .. code-block:: + + # Define the Data Sources object + ds = dpf.DataSources() + +- Use blank lines between code lines for better clarity. + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + # Define the result file path + result_file_path_1 = '/tmp/file.rst' + + # Define the DataSources object + ds_1 = dpf.DataSources(result_path=result_file_path_1) + + # Create a Model + model_1 = dpf.Model(data_sources=ds_1) + + # Get the stress results + stress_fc = model_1.results.stress.eval() + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + # Define the result file path + result_file_path_1 = '/tmp/file.rst' + # Define the DataSources object + ds_1 = dpf.DataSources(result_path=result_file_path_1) + # Create a Model + model_1 = dpf.Model(data_sources=ds_1) + # Get the stress results + stress_fc = model_1.results.stress.eval() + +- Avoid naming the variables with the same name as an argument or an API. You can get inspirations from the + tutorials available at :ref:`ref_tutorials`. + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + # Define the result file path + result_file_path = '/tmp/file.rst' + + # Define the DataSources object + ds = dpf.DataSources(result_path=result_file_path) + + # Create a Model + my_model = dpf.Model(data_sources=ds) + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + # Define the result file path + result_path = '/tmp/file.rst' + + # Define the DataSources object + data_sources = dpf.DataSources(result_path=result_path) + + # Create a Model + model = dpf.Model(data_sources=data_sources) + +Text formating +~~~~~~~~~~~~~~ + +In addition to the guidelines presented here, you must also follow the `Documentation style `_ +guide to ensure that the tutorials follow a coherent writing style across the project. + +- When quoting APIs in the text you must always use a reference to redirect it to the API reference + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + Here we use the |MeshedRegion| substitution text + + **Rendered text:** + + Here is some text. Here we use the |MeshedRegion| substitution text + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + Here we do not use the MeshedRegion substitution text + + **Rendered text:** + + Here is some text. Here we do not use the MeshedRegion substitution text + +- Use bullet lists when enumerating items: + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + This operator accepts as arguments: + + - A Result + - An Operator + - A FieldsContainer + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + This operator accepts a Result, an Operator or a + FieldsContainer as arguments. + +- Use a numbered list for ordered items: + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + To extract the mesh you need to follow those steps: + + #. Get the result file; + #. Create a Model; + #. Get the MeshedRegion. + + The ``#.`` renders as a numbered list. + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + To extract the mesh you need to follow those steps: + + - Get the result file; + - Create a Model; + - Get the MeshedRegion. + +- If you need to develop explanations for each item of the list, first, enumerate and reference them. Then, + explore each of them separately in sub headings. + +.. grid:: 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: + + :octicon:`check-circle-fill` **Correct** + + .. code-block:: + + Section title + ------------- + + This section presents two items: + + - :ref:`Item 1 ` + - :ref:`Content` + + + .. _ref_tutorial_name_item_1: + + Item 1 + ^^^^^^ + + Presentation of the first item... + + + .. _ref_tutorial_name_item_2: + + Item 2 + ^^^^^^ + + Presentation of the second item... + + .. grid-item-card:: + + :octicon:`x-circle-fill` **Incorrect** + + .. code-block:: + + Section title + ------------- + + This section presents two items: + + - Item 1 + - Item 2 + + Item 1 + ^^^^^^ + Presentation of the first item... + + Item 2 + ^^^^^^ + Presentation of the second item... + + + .. code-block:: + + Section title + ------------- + + This section presents two items: + + - Item 1 + Presentation of the first item... + + + - Item 2 + Presentation of the second item... diff --git a/doc/source/getting_started/write_doc/index.rst b/doc/source/getting_started/write_doc/index.rst new file mode 100644 index 0000000000..9462673183 --- /dev/null +++ b/doc/source/getting_started/write_doc/index.rst @@ -0,0 +1,138 @@ +.. _ref_write_doc: + +============= +Documentation +============= + +.. include:: ../../links_and_refs.rst + +Overall guidance on contributing to the documentation of a PyAnsys repository appears in +`Documenting `_ in the *PyAnsys Developer's Guide*. + +You must also follow the `Documentation style `_ guide to +ensure that all the documentation looks the same across the project. + +To improve the documentation you need to: + +- Start by `cloning the repository `_; +- Follow the `guidelines `_ to the corresponding documentation part you want to develop; +- Check the new documentation by `viewing the documentaion `_ + +Clone the repository +-------------------- + +Clone and install the latest version of PyDPF-Core in +development mode by running this code: + +.. code:: + + git clone https://github.com/ansys/pydpf-core + cd pydpf-core + pip install -e . + + +Guidelines +---------- + +Our documentation tries to follow a structure principle that respects four different functions of the documentation. +Each of them fulfills a different need for people working with our tool at different times, in different circumstances. + +Here is an overview of how our documentation is organized to help you know where you should include your contributions. +Each section has their own guidelines that must be followed when creating new content. + +.. grid:: 1 1 2 2 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: **TUTORIALS** + :link: ref_guidelines_tutorials + :link-type: ref + :class-title: sd-text-center sd-bg-light + :class-header: sd-text-center + + Learning oriented + ^^^^^^^^^^^^^^^^^ + + **Function:** Teach how to get started and use PYDPF-core step by step + + They are designed to teach how to perform a task and understand the underlying concepts, + providing detailed explanations at each stage. The task is built around the application of specific features. + + +++ + .. rubric:: Guidelines + + Here you find guidelines and templates to write new tutorials. + + .. grid-item-card:: **EXAMPLES** + :link: ref + :link-type: ref + :class-title: sd-text-center sd-bg-light + :class-header: sd-text-center + + Use-cases oriented + ^^^^^^^^^^^^^^^^^^ + + **Function:** Show how to solve specifics key problems + + They showcase specific key problems and use-cases. They are more advanced than + tutorials as they present end-to-end engineering workflows and assume basic knowledge of PyDPF-Core. + + +++ + .. rubric:: Guidelines + + Here you find guidelines and templates to write new examples. + + .. grid-item-card:: **CONCEPTS** + :link: ref + :link-type: ref + :class-title: sd-text-center sd-bg-light + :class-header: sd-text-center + + Understanding oriented + ^^^^^^^^^^^^^^^^^^^^^^ + + **Function:** Provide useful theoretical explanations for PyDPF-Core + + They discuss and explain key DPF principles and concepts, enabling the reader to understand the spirit of the underlying tool. + + +++ + .. rubric:: Guidelines + + Here you find guidelines and templates to write more concepts. + + + .. grid-item-card:: **API REFERENCE** + :link: ref + :link-type: ref + :class-title: sd-text-center sd-bg-light + :class-header: sd-text-center + + Informing oriented + ^^^^^^^^^^^^^^^^^^ + + **Function:** Describe PyDPF-Core APIs + + They contain technical reference on how PyDPF-Core works and how to use it but assume basic + understanding of key DPF concepts. It is generated automatically along the documentation and + is based on the source code. + + +++ + .. rubric:: Guidelines + + Here you find guidelines and templates to improve the API reference. + +View the documentation +---------------------- + +Documentation for the latest stable release of PyDPF-Core is hosted at +`PyDPF-Core Documentation `_. + +You can locally build the documentation by following the steps in +`Contributing `_ in the *PyAnsys Developer's Guide*. + +.. toctree:: + :maxdepth: 2 + :hidden: + + guidelines_tutorials.rst diff --git a/doc/source/getting_started/write_doc/tutorial_card_template.rst b/doc/source/getting_started/write_doc/tutorial_card_template.rst new file mode 100644 index 0000000000..646a9e98c8 --- /dev/null +++ b/doc/source/getting_started/write_doc/tutorial_card_template.rst @@ -0,0 +1,9 @@ +.. grid-item-card:: Tutorial title + :link: ref + :link-type: ref + :text-align: center + + This tutorial ... + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` \ No newline at end of file diff --git a/doc/source/getting_started/write_doc/tutorial_content_template.rst b/doc/source/getting_started/write_doc/tutorial_content_template.rst new file mode 100644 index 0000000000..a9ab884794 --- /dev/null +++ b/doc/source/getting_started/write_doc/tutorial_content_template.rst @@ -0,0 +1,96 @@ + +Tabs for different solvers +-------------------------- + +Showcase a different script for each supported solvers + +.. tab-set:: + + .. tab-item:: MAPDL + + Explanation ... + + .. jupyter-execute:: + + # Code block + + .. tab-item:: LSDYNA + + Explanation ... + + .. jupyter-execute:: + + # Code block + + .. tab-item:: Fluent + + Explanation ... + + .. jupyter-execute:: + + # Code block + + .. tab-item:: CFX + + Explanation ... + + .. jupyter-execute:: + + # Code block + +Bullet lists +------------ + +Enumerate something: + +- something 1; +- something 2; +- something 3. + +Enumerate something with a numbered list: + +#. something 1; +#. something 2; +#. something 3. + +Bullet lists with explanations between items +-------------------------------------------- + +Enumerate something and reference them to use each item as a subheading: + +- :ref:`Something 1`; +- :ref:`Something 2`; +- :ref:`Something 3`. + +.. _ref_something_1: + +Something 1 +^^^^^^^^^^^ + +Explanation 1 + +.. jupyter-execute:: + + # Code block 1 + +.. _ref_something_2: + +Something 2 +^^^^^^^^^^^ + +Explanation 2 + +.. jupyter-execute:: + + # Code block 2 + +.. _ref_something_3: + +Something 3 +^^^^^^^^^^^ + +Explanation 3 + +.. jupyter-execute:: + + # Code block 3 diff --git a/doc/source/getting_started/write_doc/tutorial_section_template.rst b/doc/source/getting_started/write_doc/tutorial_section_template.rst new file mode 100644 index 0000000000..16ea42a6a3 --- /dev/null +++ b/doc/source/getting_started/write_doc/tutorial_section_template.rst @@ -0,0 +1,28 @@ +.. _ref_tutorial_new_section_template: + +============= +Section title +============= + +These tutorials demonstrate how to ... + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Tutorial title + :link: ref + :link-type: ref + :text-align: center + + This tutorial ... + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +.. toctree:: + :maxdepth: 2 + :hidden: + + tutorial_file.rst \ No newline at end of file diff --git a/doc/source/getting_started/write_doc/tutorial_structure_template.rst b/doc/source/getting_started/write_doc/tutorial_structure_template.rst new file mode 100644 index 0000000000..8b7536c1cb --- /dev/null +++ b/doc/source/getting_started/write_doc/tutorial_structure_template.rst @@ -0,0 +1,41 @@ +.. _ref_tutorial_template: + +============== +Tutorial title +============== + +.. |displacement_op| replace:: :class:`ansys.dpf.core.operators.result.displacement.displacement` + +A single sentence describing the goal of the tutorial, which must match the one on the tutorial card in the section page. + +Introduction to the tutorial. Here, you provide the necessary context or foundational information for understanding the tutorial. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +First Step +---------- + +First, you ... + +.. jupyter-execute:: + + # Code block 1 + +Second step +----------- + +Then, you ... + +.. jupyter-execute:: + + # Code block 2 + +Final Step +---------- + +Finally, you ... + +.. jupyter-execute:: + + # Code block 3 diff --git a/doc/source/links_and_refs.rst b/doc/source/links_and_refs.rst new file mode 100644 index 0000000000..5cf85c2d2f --- /dev/null +++ b/doc/source/links_and_refs.rst @@ -0,0 +1,67 @@ +.. _ref_links_and_refs: + +.. LINKS + +.. PyDPF-Core +.. _pydpfcore_issues: https://github.com/ansys/pydpf-core/issues +.. _pydpfcore_discussions: https://github.com/ansys/pydpf-core/discussions +.. _pydpfcore_latest_release: https://github.com/ansys/pydpf-core/releases/latest +.. _pydpfcore_documentation: https://dpf.docs.pyansys.com/ + +.. Pyansys +.. _pyansys: https://docs.pyansys.com/version/dev/ + +.. PyAnsys Developer Guide +.. _dev_guide_pyansys: https://dev.docs.pyansys.com +.. _dev_guide_contributing: https://dev.docs.pyansys.com/how-to/contributing.html +.. _dev_guide_coding_style: https://dev.docs.pyansys.com/coding-style/index.html +.. _dev_guide_setup_your_environment: https://dev.docs.pyansys.com/how-to/setting-up.html +.. _dev_guide_branch_names: https://dev.docs.pyansys.com/how-to/contributing.html#branch-naming-conventions +.. _dev_guide_commit_names: https://dev.docs.pyansys.com/how-to/contributing.html#commit-naming-conventions +.. _dev_guide_doc_style: https://dev.docs.pyansys.com/doc-style/index.html +.. _dev_guide_documenting: https://dev.docs.pyansys.com/how-to/documenting.html# + +.. Other libraries documentations +.. _pyvista_docs: https://docs.pyvista.org/version/stable/ +.. _pyvista_doc_plot_method: https://docs.pyvista.org/api/plotting/_autosummary/pyvista.plot.html#pyvista.plot +.. _pyvista_org: https://pyvista.org/ +.. _jupyter: https://jupyter.org/ +.. _numpy_org: https://numpy.org/ +.. _numpy_docs: https://numpy.org/doc/stable/ +.. _jupyter_sphinx_ext: https://jupyter-sphinx.readthedocs.io/en/latest/ + +.. Other libraries repos +.. _pyvista_github : https://github.com/pyvista/pyvista +.. _matplotlib_github : https://github.com/matplotlib/matplotlib + +.. External links +.. _sphinx: https://www.sphinx-doc.org/en/master/ +.. _sphinx_directives: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html +.. _sphinx_basics: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html +.. _vale: https://www.vale.sh +.. _docutils_directives: https://docutils.sourceforge.io/docs/ref/rst/directives.html +.. _numpy_sphinx_ext_doc: https://numpydoc.readthedocs.io/en/latest/ +.. _pip_pypi_page: https://pypi.org/project/pip/ +.. _pyvista_download_files: https://pypi.org/project/pyvista/#files +.. _matplotlib_download_files: https://pypi.org/project/matplotlib/#files + +.. REFERENCES + +.. Main DPF objects +.. |Examples| replace:: :mod:`Examples` +.. |Field| replace:: :class:`Field` +.. |FieldsContainer| replace:: :class:`FieldsContainer` +.. |Model| replace:: :class:`Model ` +.. |DataSources| replace:: :class:`DataSources ` +.. |Scoping| replace:: :class:`Scoping ` +.. |ScopingsContainer| replace:: :class:`ScopingsContainer ` +.. |MeshedRegion| replace:: :class:`MeshedRegion ` +.. |MeshesContainer| replace:: :class:`MeshesContainer ` +.. |MeshInfo| replace:: :class:`MeshInfo ` +.. |Nodes| replace:: :class:`Nodes ` +.. |Elements| replace:: :class:`Elements ` +.. |Faces| replace:: :class:`Faces ` +.. |DpfPlotter| replace:: :class:`DpfPlotter` +.. |Result| replace:: :class:`Result ` +.. |Operator| replace:: :class:`Operator` +.. |TimeFreqSupport| replace:: :class:`TimeFreqSupport ` \ No newline at end of file diff --git a/doc/source/user_guide/index.rst b/doc/source/user_guide/index.rst index 7d1dfab4ae..4cc30c3dad 100644 --- a/doc/source/user_guide/index.rst +++ b/doc/source/user_guide/index.rst @@ -4,24 +4,24 @@ User guide ========== -PyDPF-Core is a Python client API for accessing DPF postprocessing -capabilities. The ``ansys.dpf.core`` package makes highly efficient -computation, customization, and remote postprocessing accessible in Python. +**DPF** provides numerical simulation users and engineers with a toolbox for accessing and +transforming data. -The goals of this section are to: +**PyDPF-Core** is a Python client API for accessing DPF +capabilities. The ``ansys.dpf.core`` package makes highly efficient +computation, customization, and remote data processing accessible in Python. - - Describe the most-used DPF entities and how they can help you to access and modify solver data. - - Provide simple how-tos for tackling the most common use cases. +The goals of this section are to: -.. include:: - concepts/index.rst + - Describe some DPF entities and how they can help you to access and modify solver data. + - Provide detailed tutorials to demonstrate PyDPF-Core functionalities. + - Explain how to resolve the most common issues encountered when using PyDPF-Core .. include:: - main_entities.rst + tutorials/index.rst .. include:: - how_to.rst - + concepts/index.rst Troubleshooting --------------- @@ -52,6 +52,27 @@ Troubleshooting :text-align: center +.. toctree:: + :maxdepth: 2 + :hidden: + :caption: Tutorials + + tutorials/data_structures/index.rst + tutorials/language_and_usage/index.rst + tutorials/post_processing_basics/index.rst + tutorials/import_data/index.rst + tutorials/mesh/index.rst + tutorials/operators_and_workflows/index.rst + tutorials/export_data/index.rst + tutorials/plot/index.rst + tutorials/animate/index.rst + tutorials/enriching_dpf_capabilities/index.rst + tutorials/distributed_files/index.rst + tutorials/dpf_server/index.rst + tutorials/licensing/index.rst + tutorials/mathematics/index.rst + tutorials/manipulate_physics_data/index.rst + .. toctree:: :maxdepth: 2 :hidden: @@ -61,30 +82,6 @@ Troubleshooting concepts/waysofusing.rst concepts/stepbystep.rst - -.. toctree:: - :maxdepth: 2 - :hidden: - :caption: DPF most-used entities - - model - operators - fields_container - - -.. toctree:: - :maxdepth: 2 - :hidden: - :caption: How-tos - - plotting.rst - custom_operators.rst - dpf_server.rst - server_types.rst - server_context.rst - xmlfiles.rst - - .. toctree:: :maxdepth: 3 :hidden: diff --git a/doc/source/user_guide/tutorials/animate/animate_time.rst b/doc/source/user_guide/tutorials/animate/animate_time.rst new file mode 100644 index 0000000000..4033589ae5 --- /dev/null +++ b/doc/source/user_guide/tutorials/animate/animate_time.rst @@ -0,0 +1,358 @@ +.. _ref_tutorials_animate_time: + +====================== +Animate data over time +====================== + +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst +.. |Animator| replace:: :class:`Animator` +.. |animate| replace:: :func:`FieldsContainer.animate() ` +.. |Workflow| replace:: :class:`Workflow` +.. |Elemental| replace:: :class:`elemental` +.. |ElementalNodal| replace:: :class:`elemental_nodal` +.. |Nodal| replace:: :class:`nodal` +.. |Overall| replace:: :class:`overall` +.. |open_movie| replace:: :class:`pyvista.Plotter.open_movie` + +This tutorial demonstrates how to create 3D animations of data in time. + +:jupyter-download-script:`Download tutorial as Python script` :jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +To animate data across time, you must store the data in a |FieldsContainer| with a ``time`` label. + + +Get the result files +-------------------- + +First, import a results file. For this tutorial, you can use the one available in the |Examples| module. +For more information about how to import your own result file in DPF, see +the :ref:`ref_tutorials_import_data` tutorial section. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path = examples.find_msup_transient() + # Create the model + model = dpf.Model(data_sources=result_file_path) + +Define a time scoping +--------------------- + +To animate across time, you must define the time steps you are interested in. +This tutorial retrieves all the time steps available in |TimeFreqSupport|, but you can also filter them. +For more information on how to define a scoping, see the ``Narrow down data`` tutorial in the +:ref:`ref_tutorials_import_data` tutorials section. + +.. jupyter-execute:: + + # Get a scoping of all time steps available + time_steps = model.metadata.time_freq_support.time_frequencies + +Extract the results +------------------- + +Extract the results to animate. In this tutorial, you extract the displacement and stress results. + +.. note:: + + Only the |Elemental|, |Nodal|, or |Faces| locations are supported for animations. + |Overall| and |ElementalNodal| locations are not currently supported. + + +.. jupyter-execute:: + + # Get the displacement fields (already on nodes) at all time steps + disp_fc = model.results.displacement(time_scoping=time_steps).eval() + print(disp_fc) + +.. jupyter-execute:: + + # Get the stress fields on nodes at all time steps + # Request the stress on |Nodal| location as the default |ElementalNodal| location is not supported. + stress_fc = model.results.stress.on_location(location=dpf.locations.nodal).on_time_scoping(time_scoping=time_steps).eval() + print(stress_fc) + +Animate the results +------------------- + +Animate the results with the |animate| method. +You can animate them on a deformed mesh (animate the color map and the mesh) +or on a static mesh (animate the color map only). + +The default behavior of the |animate| method is to: + +- Display the norm of the data components; +- Display data at the top layer for shells; +- Display the deformed mesh when animating displacements; +- Display the static mesh for other types of results; +- Use a constant and uniform scale factor of 1.0 when deforming the mesh. + +You can animate any result on a deformed geometry by providing displacement results in the `deform_by` parameter. + +The geometry can be deformed by a |Result| object, an |Operator| (It must evaluate to a |FieldsContainer| +of same length as the one being animated), or a |FieldsContainer| (also of same length as the one being animated). + +.. note:: + + The behavior of the |animate| method is defined by a |Workflow| it creates and feeds to an |Animator|. + This |Workflow| loops over a |Field| of frame indices and for each frame generates a field of norm contours + to render, as well as a displacement field to deform the mesh if `deform_by` is provided. + For more information on plots on deformed meshes see: :ref:`ref_plotting_data_on_deformed_mesh`. + + +Animate the displacement results +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Use |animate| with the displacement results. + +.. tab-set:: + + .. tab-item:: Deformed mesh + + .. jupyter-execute:: + :hide-output: + + # Animate the displacement results in a deformed geometry + disp_fc.animate() + + .. jupyter-execute:: + :hide-code: + :hide-output: + + disp_fc.animate(off_screen=True,save_as="doc/source/user_guide/tutorials/animate/animate_disp_1.gif") + + .. image:: animate_disp_1.gif + :scale: 50 % + :align: center + + .. tab-item:: Static mesh + + .. jupyter-execute:: + :hide-output: + + # Animate the displacement results on a static mesh using ``deform_by=False`` + disp_fc.animate(deform_by=False) + + .. jupyter-execute:: + :hide-code: + :hide-output: + + disp_fc.animate(off_screen=True,save_as="doc/source/user_guide/tutorials/animate/animate_disp_2.gif", + deform_by=False) + + .. image:: animate_disp_2.gif + :scale: 50 % + :align: center + +Animate the stress +^^^^^^^^^^^^^^^^^^ + +Use |animate| with the stress results. + +.. tab-set:: + + .. tab-item:: Deformed mesh + + .. jupyter-execute:: + :hide-output: + + # Animate the stress results on a deformed mesh + # Use the ``deform_by`` argument and give the displacement results. + stress_fc.animate(deform_by=disp_fc) + + .. jupyter-execute:: + :hide-code: + :hide-output: + + stress_fc.animate(off_screen=True,save_as="doc/source/user_guide/tutorials/animate/animate_stress_1.gif", + deform_by=disp_fc) + + .. image:: animate_stress_1.gif + :scale: 50 % + :align: center + + .. tab-item:: Static mesh + + .. jupyter-execute:: + :hide-output: + + # Animate the stress results in a static geometry + stress_fc.animate() + + .. jupyter-execute:: + :hide-code: + :hide-output: + + stress_fc.animate(off_screen=True,save_as="doc/source/user_guide/tutorials/animate/animate_stress_2.gif") + + .. image:: animate_stress_2.gif + :scale: 50 % + :align: center + +Change the scale factor +----------------------- + +You can change the scale factor using: + +- A single number for a uniform constant scaling; +- A list of numbers for a varying scaling (same length as the number of frames). + +Uniform constant scaling +^^^^^^^^^^^^^^^^^^^^^^^^ +.. jupyter-execute:: + :hide-output: + + # Define a uniform scale factor + uniform_scale_factor=10. + # Animate the displacements + disp_fc.animate(scale_factor=uniform_scale_factor) + +.. jupyter-execute:: + :hide-code: + :hide-output: + + disp_fc.animate(off_screen=True,save_as="doc/source/user_guide/tutorials/animate/animate_disp_3.gif", + scale_factor=uniform_scale_factor, text="Uniform scale factor") + +.. image:: animate_disp_3.gif + :scale: 45 % + :align: center + +Varying scaling +^^^^^^^^^^^^^^^ + +.. jupyter-execute:: + :hide-output: + + # Define a varying scale factor + varying_scale_factor = [float(i) for i in range(len(disp_fc))] + # Animate the displacements + disp_fc.animate(scale_factor=varying_scale_factor) + +.. jupyter-execute:: + :hide-code: + :hide-output: + + disp_fc.animate(off_screen=True,save_as="doc/source/user_guide/tutorials/animate/animate_disp_4.gif", + scale_factor=varying_scale_factor, text="Varying scale factor") + +.. image:: animate_disp_4.gif + :scale: 45 % + :align: center + +Save the animation +------------------ + +You can save the animation using the ``save_as`` argument with a target file path with the desired format as the extension key. +Accepted extensions are: + +- ``.gif``; +- ``.avi``; +- ``.mp4`` + +For more information see |open_movie|. + +.. jupyter-execute:: + :hide-output: + + # Animate the stress results and save it + stress_fc.animate(deform_by=disp_fc, save_as="animate_stress.gif") + + +Control the camera +------------------ + +Control the camera with the ``cpos`` argument. + +A camera position is a combination of: + +- A position; +- A focal point (the target); +- A upwards vector. + +It results in a list of format: + +.. code-block:: python + + camera_position= [[pos_x, pos_y, pos_z], # position + [fp_x, fp_y, fp_z], # focal point + [up_x, up_y, up_z]] # upwards vector + +The |animate| method accepts a single camera position or a list of camera positions for each frame. + +.. note:: + A tip for defining a camera position is to do a first interactive plot of the data + with argument ``return_cpos=True``, position the camera as desired in the view, and retrieve + the output of the plotting command. + +Fixed camera +^^^^^^^^^^^^ + +.. jupyter-execute:: + :hide-output: + + # Define the camera position + cam_pos = [[0., 2.0, 0.6], [0.05, 0.005, 0.5], [0.0, 0.0, 1.0]] + # Animate the stress with a custom fixed camera position + stress_fc.animate(cpos=cam_pos) + +.. jupyter-execute:: + :hide-code: + :hide-output: + + stress_fc.animate(save_as="doc/source/user_guide/tutorials/animate/animate_disp_5.gif", + cpos=cam_pos, + off_screen=True) + +.. image:: animate_disp_5.gif + :scale: 50 % + :align: center + +Moving camera +^^^^^^^^^^^^^ + +.. jupyter-execute:: + :hide-output: + + import copy + # Define the list of camera positions + cpos_list = [cam_pos] + # Incrementally increase the x coordinate of the camera by 0.1 for each frame + for i in range(1, len(disp_fc)): + new_pos = copy.deepcopy(cpos_list[i-1]) + new_pos[0][0] += 0.1 + cpos_list.append(new_pos) + + # Animate the stress with a moving camera + stress_fc.animate(cpos=cpos_list) + +.. jupyter-execute:: + :hide-code: + :hide-output: + + stress_fc.animate(save_as="doc/source/user_guide/tutorials/animate/animate_disp_6.gif", + cpos=cpos_list, + off_screen=True) + +.. image:: animate_disp_6.gif + :scale: 50 % + :align: center + +Additional options +------------------ + +You can use additional PyVista arguments of |open_movie|), such as: + +- Show or hide the coordinate system axis with ``show_axes=True`` or ``show_axes=False``; +- Render off-screen for batch animation creation with ``off_screen=True``; +- Change the frame-rate with ``framerate``; +- Change the image quality with ``quality``. diff --git a/doc/source/user_guide/tutorials/animate/index.rst b/doc/source/user_guide/tutorials/animate/index.rst new file mode 100644 index 0000000000..6f360c4216 --- /dev/null +++ b/doc/source/user_guide/tutorials/animate/index.rst @@ -0,0 +1,27 @@ +.. _ref_tutorials_animate: + +======= +Animate +======= + +These tutorials demonstrate how to visualize the data as an animation. + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Animate data over time + :link: ref_tutorials_animate_time + :link-type: ref + :text-align: center + + This tutorial shows how to animate your results data over time. + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +.. toctree:: + :maxdepth: 2 + :hidden: + + animate_time.rst diff --git a/doc/source/user_guide/tutorials/data_structures/index.rst b/doc/source/user_guide/tutorials/data_structures/index.rst new file mode 100644 index 0000000000..602efc732d --- /dev/null +++ b/doc/source/user_guide/tutorials/data_structures/index.rst @@ -0,0 +1,42 @@ +.. _ref_tutorials_data_structures: + +=================== +DPF data structures +=================== + +DPF uses two main data structures to handle data: Fields and Collections. +Therefore, it is important to be aware of how the data is +structured in those containers. + +The data containers can be: + + - **Raw data storage structures**: Data arrays (a ``Field`` for example) or Data Maps (a ``DataTree`` for example) + - **Collections**: a group of same labeled objects from one DPF raw data storage structure (a ``FieldsContainer`` for example, that is a group of ``Fields`` with the same label) + +These tutorials explains how these structures work and how you can manipulate data within. + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: DPF raw data storage structures + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial shows how to create and work with some DPF data arrays: + Field, StringField and PropertyField + + + .. grid-item-card:: DPF collections + :link: ref_tutorials_language_and_usage + :link-type: ref + :text-align: center + + This tutorial shows how to create and work with some DPF collections: + FieldsContainer, MeshesContainer and ScopingContainer + +.. toctree:: + :maxdepth: 2 + :hidden: diff --git a/doc/source/user_guide/tutorials/distributed_files/index.rst b/doc/source/user_guide/tutorials/distributed_files/index.rst new file mode 100644 index 0000000000..70240e016d --- /dev/null +++ b/doc/source/user_guide/tutorials/distributed_files/index.rst @@ -0,0 +1,31 @@ +.. _ref_tutorials_distributed_files: + +============================== +Post-process distributed files +============================== + +These tutorials show how to create workflows on different processes (possibly on different machines) and connect them. + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Post process data on distributed processes + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + + .. grid-item-card:: Create a custom workflow on distributed processes + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + +.. toctree:: + :maxdepth: 2 + :hidden: + diff --git a/doc/source/user_guide/tutorials/dpf_server/index.rst b/doc/source/user_guide/tutorials/dpf_server/index.rst new file mode 100644 index 0000000000..efc4e1bfdb --- /dev/null +++ b/doc/source/user_guide/tutorials/dpf_server/index.rst @@ -0,0 +1,24 @@ +.. _ref_tutorials_dpf_server: + +========== +DPF server +========== + +This tutorial explains how to manipulate DPF client-server architecture + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Switch between local and remote server + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + +.. toctree:: + :maxdepth: 2 + :hidden: + diff --git a/doc/source/user_guide/tutorials/enriching_dpf_capabilities/index.rst b/doc/source/user_guide/tutorials/enriching_dpf_capabilities/index.rst new file mode 100644 index 0000000000..232c57c2a4 --- /dev/null +++ b/doc/source/user_guide/tutorials/enriching_dpf_capabilities/index.rst @@ -0,0 +1,42 @@ +.. _ref_tutorials_enriching: + +========================== +Enriching DPF capabilities +========================== + +The available DPF capabilities loaded in a DPF application can be enhanced +by creating new operator’s libraries. DPF offers multiple development APIs +depending on your environment. + +These tutorials demonstrate how to develop those plugins for PyDPF-Core (CPython based) + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Create custom operators and plugins + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + + .. grid-item-card:: Create a plug-in package with multiple operators + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + + .. grid-item-card:: Create a plug-in package that has third-party dependencies + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + +.. toctree:: + :maxdepth: 2 + :hidden: + diff --git a/doc/source/user_guide/tutorials/export_data/index.rst b/doc/source/user_guide/tutorials/export_data/index.rst new file mode 100644 index 0000000000..c9be8ecd6c --- /dev/null +++ b/doc/source/user_guide/tutorials/export_data/index.rst @@ -0,0 +1,28 @@ +.. _ref_tutorials_export_data: + +=========== +Export data +=========== + +Data in DPF can be exported to universal file formats, such as VTK, HDF5, and TXT files. +You can use it to generate TH-plots, screenshots, and animations or to create custom result +plots using the `numpy `_ and `matplotlib `_ packages. + +These tutorials explains how to export data from your manipulations with PyDPF-Core. + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: HDF5 export + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + +.. toctree:: + :maxdepth: 2 + :hidden: + diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst new file mode 100644 index 0000000000..38aea40a36 --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst @@ -0,0 +1,166 @@ +.. _ref_tutorials_extract_and_explore_results_data: + +================================ +Extract and explore results data +================================ + +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst +.. |get_entity_data| replace:: :func:`get_entity_data()` +.. |get_entity_data_by_id| replace:: :func:`get_entity_data_by_id()` + +This tutorial shows how to extract and explore results data from a result file. + +When you extract a result from a result file DPF stores it in a |Field|. +Thus, this |Field| contains the data of the result associated with it. + +.. note:: + + When DPF-Core returns the |Field| object, what Python actually has is a client-side + representation of the |Field|, not the entirety of the |Field| itself. This means + that all the data of the field is stored within the DPF service. This is important + because when building your workflows, the most efficient way of interacting with result data + is to minimize the exchange of data between Python and DPF, either by using operators + or by accessing exclusively the data that is needed. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Get the result file +------------------- + +First, import a result file. For this tutorial, you can use one available in the |Examples| module. +For more information about how to import your own result file in DPF, see the :ref:`ref_tutorials_import_result_file` +tutorial. + +Here, we extract the displacement results. The displacement |Result| object gives a |FieldsContainer| when evaluated. +Thus, we get a |Field| from this |FieldsContainer|. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_1 = examples.download_transient_result() + + # Create the model + model_1 = dpf.Model(data_sources=result_file_path_1) + + # Extract the displacement results for the last time step + disp_results = model_1.results.displacement.on_last_time_freq.eval() + + # Get the displacement field for the last time step + disp_field = disp_results[0] + + # Print the displacement Field + print(disp_field) + +Extract all the data from a |Field| +----------------------------------- + +You can extract the entire data in a |Field| as: + +- An array (numpy array); +- A list. + +Data as an array +^^^^^^^^^^^^^^^^ + +.. jupyter-execute:: + + # Get the displacement data as an array + data_array = disp_field.data + + # Print the data as an array + print("Displacement data as an array: ", '\n', data_array) + +Note that this array is a genuine, local, numpy array (overloaded by the DPFArray): + +.. jupyter-execute:: + + # Print the array type + print("Array type: ", type(data_array)) + +Data as a list +^^^^^^^^^^^^^^ + +.. jupyter-execute:: + + # Get the displacement data as a list + data_list = disp_field.data_as_list + # Print the data as a list + print("Displacement data as a list: ", '\n', data_list) + +Extract specific data from a field +---------------------------------- + +If you need to access data for specific entities (node, element ...), you can extract it with two approaches: + +- :ref:`Based on its index ` (data position on the |Field|) by using the |get_entity_data| method; +- :ref:`Based on the entities id ` by using the |get_entity_data_by_id| method. + +The |Field| data is organized with respect to its scoping ids. Note that the element with id=533 +would correspond to an index=2 within the |Field|. + +.. jupyter-execute:: + + # Get the index of the entity with id=533 + index_533_entity = disp_field.scoping.index(id=533) + # Print the index + print("Index entity id=533: ",index_533_entity) + +Be aware that scoping IDs are not sequential. You would get the id of the element in the 533 +position of the |Field| with: + +.. jupyter-execute:: + + # Get the id of the entity with index=533 + id_533_entity = disp_field.scoping.id(index=533) + print("Id entity index=533: ",id_533_entity) + +.. _ref_extract_specific_data_by_index: + +Get the data by the entity index +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. jupyter-execute:: + + # Get the data from the third entity in the field + data_3_entity = disp_field.get_entity_data(index=3) + # Print the data + print("Data entity index=3: ", data_3_entity) + +.. _ref_extract_specific_data_by_id: + +Get the data by the entity id +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. jupyter-execute:: + + # Get the data from the entity with id=533 + data_533_entity = disp_field.get_entity_data_by_id(id=533) + # Print the data + print("Data entity id=533: ", data_533_entity) + +Extract specific data from a field using a loop over the array +-------------------------------------------------------------- + +While the methods above are acceptable when requesting data for a few elements +or nodes, they should not be used when looping over the entire array. For efficiency, +a |Field| data can be recovered locally before sending a large number of requests: + +.. jupyter-execute:: + + # Create a deep copy of the field that can be accessed and modified locally. + with disp_field.as_local_field() as f: + for i in disp_field.scoping.ids[2:50]: + f.get_entity_data_by_id(i) + + # Print the field + print(f) \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst new file mode 100644 index 0000000000..32f0fa0228 --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -0,0 +1,159 @@ +.. _ref_tutorials_extract_and_explore_results_metadata: + +==================================== +Extract and explore results metadata +==================================== + +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst +.. |ResultInfo| replace:: :class:`ResultInfo` + +This tutorial shows how to extract and explore results metadata from a result file. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Get the result file +------------------- + +First, import a result file. For this tutorial, you can use one available in the |Examples| module. +For more information about how to import your own result file in DPF, see the :ref:`ref_tutorials_import_result_file` +tutorial. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_1 = examples.download_transient_result() + # Create the model + model_1 = dpf.Model(data_sources=result_file_path_1) + +Explore the results general metadata +------------------------------------ + +You can explore the general results metadata, before extracting the results, by using +the |ResultInfo| object and its methods. This metadata includes: + +- Analysis type; +- Physics type; +- Number of results; +- Unit system; +- Solver version, date and time; +- Job name; + +.. jupyter-execute:: + + # Define the ResultInfo object + result_info_1 = model_1.metadata.result_info + + # Get the analysis type + analysis_type = result_info_1.analysis_type + # Print the analysis type + print("Analysis type: ",analysis_type, "\n") + + # Get the physics type + physics_type = result_info_1.physics_type + # Print the physics type + print("Physics type: ",physics_type, "\n") + + # Get the number of available results + number_of_results = result_info_1.n_results + # Print the number of available results + print("Number of available results: ",number_of_results, "\n") + + # Get the unit system + unit_system = result_info_1.unit_system + # Print the unit system + print("Unit system: ",unit_system, "\n") + + # Get the solver version, data and time + solver_version = result_info_1.solver_version + solver_date = result_info_1.solver_date + solver_time = result_info_1.solver_time + + # Print the solver version, data and time + print("Solver version: ",solver_version, "\n") + print("Solver date: ", solver_date, "\n") + print("Solver time: ",solver_time, "\n") + + # Get the job name + job_name = result_info_1.job_name + # Print the job name + print("Job name: ",job_name, "\n") + +Explore a result metadata +------------------------- +When you extract a result from a result file DPF stores it in a |Field|. +Thus, this |Field| contains the metadata for the result associated with it. +This metadata includes: + +- Location; +- Scoping (type and quantity of entities); +- Elementary data count (number of entities, how many data vectors we have); +- Components count (vectors dimension, here we have a displacement so we expect to have 3 components (X, Y and Z)); +- Shape of the data stored (tuple with the elementary data count and the components count); +- Fields size (length of the data entire vector (equal to the number of elementary data times the number of components)); +- Units of the data. + +Here we will explore the metadata of the displacement results. + +Start by extracting the displacement results. + +.. jupyter-execute:: + + # Extract the displacement results + disp_results = model_1.results.displacement.eval() + + # Get the displacement field + disp_field = disp_results[0] + +Explore the displacement results metadata: + +.. jupyter-execute:: + + # Get the location of the displacement data + location = disp_field.location + # Print the location + print("Location: ", location,'\n') + + # Get the displacement Field scoping + scoping = disp_field.scoping + # Print the Field scoping + print("Scoping: ", '\n',scoping, '\n') + + # Get the displacement Field scoping ids + scoping_ids = disp_field.scoping.ids # Available entities ids + # Print the Field scoping ids + print("Scoping ids: ", scoping_ids, '\n') + + # Get the displacement Field elementary data count + elementary_data_count = disp_field.elementary_data_count + # Print the elementary data count + print("Elementary data count: ", elementary_data_count, '\n') + + # Get the displacement Field components count + components_count = disp_field.component_count + # Print the components count + print("Components count: ", components_count, '\n') + + # Get the displacement Field size + field_size = disp_field.size + # Print the Field size + print("Size: ", field_size, '\n') + + # Get the displacement Field shape + shape = disp_field.shape + # Print the Field shape + print("Shape: ", shape, '\n') + + # Get the displacement Field unit + unit = disp_field.unit + # Print the displacement Field unit + print("Unit: ", unit, '\n') \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/import_data/import_result_file.rst b/doc/source/user_guide/tutorials/import_data/import_result_file.rst new file mode 100644 index 0000000000..f8d22047e2 --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/import_result_file.rst @@ -0,0 +1,363 @@ +.. _ref_tutorials_import_result_file: + +=========================== +Import a result file in DPF +=========================== + +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst +.. |set_result_file_path| replace:: :func:`set_result_file_path() ` +.. |add_file_path| replace:: :func:`add_file_path() ` + +This tutorial shows how to import a result file in DPF. + +There are two approaches to import a result file in DPF: + +- :ref:`Using the DataSources object ` +- :ref:`Using the Model object ` + +.. note:: + + The |Model| extracts a large amount of information by default (results, mesh and analysis data). + If using this helper takes a long time for processing the code, mind using a |DataSources| object + and instantiating operators directly with it. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Define the result file path +--------------------------- + +Both approaches need a file path to be defined. For this tutorial, you can use a result file available in +the |Examples| module. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the .rst result file path + result_file_path_11 = examples.find_static_rst() + + # Define the modal superposition harmonic analysis (.mode, .rfrq and .rst) result files paths + result_file_path_12 = examples.download_msup_files_to_dict() + + + # Print the result files paths + print("Result file path 11:", "\n",result_file_path_11, "\n") + print("Result files paths 12:", "\n",result_file_path_12, "\n") + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the .d3plot result files paths + result_file_path_21 = examples.download_d3plot_beam() + + # Define the .binout result file path + result_file_path_22 = examples.download_binout_matsum() + + # Print the result files paths + print("Result files paths 21:", "\n",result_file_path_21, "\n") + print("Result file path 22:", "\n",result_file_path_22, "\n") + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the project .flprj result file path + result_file_path_31 = examples.download_fluent_axial_comp()["flprj"] + + # Define the CFF .cas.h5/.dat.h5 result files paths + result_file_path_32 = examples.download_fluent_axial_comp() + + # Print the result files paths + print("Result file path 31:", "\n",result_file_path_31, "\n") + print("Result files paths 32:", "\n",result_file_path_32, "\n") + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the project .res result file path + result_file_path_41 = examples.download_cfx_mixing_elbow() + + # Define the CFF .cas.cff/.dat.cff result files paths + result_file_path_42 = examples.download_cfx_heating_coil() + + # Print the result files paths + print("Result file path 41:", "\n",result_file_path_41, "\n") + print("Result files paths 42:", "\n",result_file_path_42, "\n") + +.. _ref_import_result_file_data_sources: + +Use a |DataSources| +------------------- + +The |DataSources| object manages paths to their files. Use this object to declare data +inputs for PyDPF-Core APIs. + +.. tab-set:: + + .. tab-item:: MAPDL + + **a) `.rst` result file** + + Create the |DataSources| object and give the path to the result file to the *'result_path'* argument. + + .. jupyter-execute:: + + # Create the DataSources object + # Use the ``result_path`` argument and give the result file path + ds_11 = dpf.DataSources(result_path=result_file_path_11) + + **b) `.mode`, `.rfrq` and `.rst` result files** + + In the modal superposition, modal coefficients are multiplied by mode shapes (of a previous modal analysis) + to analyse a structure under given boundary conditions in a range of frequencies. Doing this expansion “on demand” + in DPF instead of in the solver reduces the size of the result files. + + The expansion is recursive in DPF: first the modal response is read. Then, *upstream* mode shapes are found in + the |DataSources|, where they are read and expanded. Upstream refers to a source that provides data to a + particular process. + + To create a recursive workflow add the upstream |DataSources| object, that contains the upstream + data files, to the main |DataSources| object. + + .. jupyter-execute:: + + # Create the main DataSources object + ds_12 = dpf.DataSources() + # Define the main result file path + ds_12.set_result_file_path(filepath=result_file_path_12["rfrq"], key='rfrq') + + # Create the upstream DataSources object with the main upstream file path + upstream_ds_12 = dpf.DataSources(result_path=result_file_path_12["mode"]) + # Add the additional upstream file path to the upstream DataSources object + upstream_ds_12.add_file_path(filepath=result_file_path_12["rst"]) + + # Add the upstream DataSources to the main DataSources object + ds_12.add_upstream(upstream_data_sources=upstream_ds_12) + + .. tab-item:: LSDYNA + + **a) `.d3plot` result file** + + The d3plot file does not contain information related to units. In this case, as the + simulation was run through Mechanical, a ``file.actunits`` file is produced. If this + file is supplemented in the |DataSources|, the units will be correctly fetched for all + results in the file as well as for the mesh. + + Thus, we must use the |set_result_file_path| and the |add_file_path| methods to add the main + and the additional result file to the |DataSources| object. + + .. jupyter-execute:: + + # Create the DataSources object + ds_21 = dpf.DataSources() + + # Define the main result file path + ds_21.set_result_file_path(filepath=result_file_path_21[0], key="d3plot") + + # Add the additional file path related to the units + ds_21.add_file_path(filepath=result_file_path_21[3], key="actunits") + + **b) `.binout` result file** + + The extension key *`.binout`* is not explicitly specified in the result file. Thus, we use + the |set_result_file_path| method and give the extension key to the *'key'* argument to correctly + add the result file path to the |DataSources| object. + + .. jupyter-execute:: + + # Create the DataSources object + ds_22 = dpf.DataSources() + + # Define the path to the result file + # Use the ``key`` argument and give the file extension key + ds_22.set_result_file_path(filepath=result_file_path_22, key="binout") + + .. tab-item:: Fluent + + **a) `.flprj` result file** + + Create the |DataSources| object and give the path to the result file to the *'result_path'* argument. + + .. jupyter-execute:: + + # Create the DataSources object + # Use the ``result_path`` argument and give the result file path + ds_31 = dpf.DataSources(result_path=result_file_path_31) + + **b) `.cas.h5`, `.dat.h5` result files** + + Here, we have a main and an additional result file with two extensions keys. + + Thus, you must use the |set_result_file_path| and the |add_file_path| methods to add the main and + additional result file to the |DataSources| object and explicitly give the *first* extension key to + their *'key'* argument. + + .. jupyter-execute:: + + # Create the DataSources object + ds_32 = dpf.DataSources() + + # Define the path to the main result file + # Use the ``key`` argument and give the first extension key + ds_32.set_result_file_path(filepath=result_file_path_32['cas'][0], key="cas") + + # Add the additional result file path to the DataSources + # Use the ``key`` argument and give the first extension key + ds_32.add_file_path(filepath=result_file_path_32['dat'][0], key="dat") + + .. tab-item:: CFX + + **a) `.res` result file** + + Create the |DataSources| object and give the path to the result file to the *'result_path'* argument. + + .. jupyter-execute:: + + # Create the DataSources object + # Use the ``result_path`` argument and give the result file path + ds_41 = dpf.DataSources(result_path=result_file_path_41) + + **b) `.cas.cff`, `.dat.cff` result files** + + Here, we have a main and an additional result file with two extensions keys. + + Thus, you must use the |set_result_file_path| and the |add_file_path| methods to add the main and + additional result file to the |DataSources| object. Also, you must explicitly give the *first* extension keys to + the *'key'* argument. + + .. jupyter-execute:: + + # Create the DataSources object + ds_42 = dpf.DataSources() + + # Define the path to the main result file + # Use the ``key`` argument and give the first extension key + ds_42.set_result_file_path(filepath=result_file_path_42["cas"], key="cas") + + # Add the additional result file path to the DataSources + # Use the ``key`` argument and give the first extension key + ds_42.add_file_path(filepath=result_file_path_42["dat"], key="dat") + +.. _ref_import_result_file_model: + +Use a |Model| +------------- + +The |Model| is a helper designed to give shortcuts to access the analysis results +metadata and to instanciate results providers by opening a |DataSources| or a Streams. + +To create a |Model| you can provide to the *'data_sources'* argument.: + +- The result file path, in the case you are working with a single result file that has an explicit extension key; +- A |DataSources| object. + +.. tab-set:: + + .. tab-item:: MAPDL + + **a) `.rst` result file** + + .. jupyter-execute:: + + # Create the model with the result file path + model_11 = dpf.Model(data_sources=result_file_path_11) + + # Create the model with the DataSources object + model_12 = dpf.Model(data_sources=ds_11) + + **b) `.mode`, `.rfrq` and `.rst` result files** + + .. jupyter-execute:: + + # Create the model with the DataSources object + model_13 = dpf.Model(data_sources=ds_12) + + .. tab-item:: LSDYNA + + **a) `.d3plot` result file** + + .. jupyter-execute:: + + # Create the model with the DataSources object + model_21 = dpf.Model(data_sources=ds_21) + + **b) `.binout` result file** + + .. jupyter-execute:: + + # Create the model with the DataSources object + model_22 = dpf.Model(data_sources=ds_22) + + .. tab-item:: Fluent + + **a) `.flprj` result file** + + .. jupyter-execute:: + + # Create the model with the result file path + model_31 = dpf.Model(data_sources=result_file_path_31) + + # Create the model with the DataSources object + model_32 = dpf.Model(data_sources=ds_31) + + **b) `.cas.h5`, `.dat.h5` result files** + + .. jupyter-execute:: + + # Create the model with the DataSources object + model_33 = dpf.Model(data_sources=ds_32) + + .. tab-item:: CFX + + **a) `.res` result file** + + .. jupyter-execute:: + + # Create the model with the result file path + model_41 = dpf.Model(data_sources=result_file_path_41) + + # Create the model with the DataSources object + model_42 = dpf.Model(data_sources=ds_41) + + **b) `.cas.cff`, `.dat.cff` result files** + + .. jupyter-execute:: + + # Create the model with the DataSources object + model_43 = dpf.Model(data_sources=ds_42) + diff --git a/doc/source/user_guide/tutorials/import_data/index.rst b/doc/source/user_guide/tutorials/import_data/index.rst new file mode 100644 index 0000000000..b9607fa4e2 --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/index.rst @@ -0,0 +1,83 @@ +.. _ref_tutorials_import_data: + +=========== +Import Data +=========== + +These tutorials demonstrate how to represent data in DPF: either from manual input either +form simulation result files. + +From user input +*************** + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Load custom data + :link: ref_tutorials_load_custom_data + :link-type: ref + :text-align: center + + Learn how to build DPF data storage structures from custom data. + +From result files +***************** + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Import a result file in DPF + :link: ref_tutorials_import_result_file + :link-type: ref + :text-align: center + + This tutorial shows how to import a result file in DPF. + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + + .. grid-item-card:: Extract and explore results metadata + :link: ref_tutorials_extract_and_explore_results_metadata + :link-type: ref + :text-align: center + + This tutorial shows how to extract and explore results metadata (analysis type, + physics type, unit system...) from a result file. + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + + .. grid-item-card:: Extract and explore results data + :link: ref_tutorials_extract_and_explore_results_data + :link-type: ref + :text-align: center + + This tutorial shows how to extract and explore results data from a result file. + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + + .. grid-item-card:: Narrow down data + :link: reft_tutorials_narrow_down_data + :link-type: ref + :text-align: center + + This tutorial explains how to scope (get a spatial and/or temporal subset of + the simulation data) your results. + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +.. toctree:: + :maxdepth: 2 + :hidden: + + load_custom_data.rst + import_result_file.rst + extract_and_explore_results_metadata.rst + extract_and_explore_results_data.rst + narrow_down_data.rst \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/import_data/load_custom_data.rst b/doc/source/user_guide/tutorials/import_data/load_custom_data.rst new file mode 100644 index 0000000000..cb2737933e --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/load_custom_data.rst @@ -0,0 +1,697 @@ +.. _ref_tutorials_load_custom_data: + +======================= +Load custom data in DPF +======================= + +.. include:: ../../../links_and_refs.rst +.. |Field.append| replace:: :func:`append()` +.. |Field.data| replace:: :attr:`Field.data` +.. |fields_factory| replace:: :mod:`fields_factory` +.. |fields_container_factory| replace:: :mod:`fields_container_factory` +.. |location| replace:: :class:`location` +.. |nature| replace:: :class:`nature` +.. |dimensionality| replace:: :class:`dimensionality` +.. |Field.dimensionality| replace:: :func:`Field.dimensionality` +.. |Field.location| replace:: :func:`Field.location` +.. |Field.scoping| replace:: :func:`Field.scoping` +.. |field_from_array| replace:: :func:`field_from_array()` +.. |create_scalar_field| replace:: :func:`create_scalar_field()` +.. |create_vector_field| replace:: :func:`create_vector_field()` +.. |create_3d_vector_field| replace:: :func:`create_3d_vector_field()` +.. |create_matrix_field| replace:: :func:`create_matrix_field()` +.. |create_tensor_field| replace:: :func:`create_tensor_field()` +.. |over_time_freq_fields_container| replace:: :func:`over_time_freq_fields_container()` + +This tutorial shows how to represent your custom data in DPF data storage structures. + +To import you custom data in DPF, you must create a DPF data structure to store it. +DPF uses |Field| and |FieldsContainer| objects to handle data. The |Field| is a homogeneous array +and a |FieldsContainer| is a labeled collection of |Field|. For more information on DPF data structures +such as the |Field| and their use check the :ref:`ref_tutorials_data_structures` tutorials section. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Define the data +--------------- + +In this tutorial, we create different Fields from data stored in Python lists. + +Create the python lists with the data to be *set* to the Fields. + +.. jupyter-execute:: + + # Data for the scalar Fields (lists with 1 and 2 dimensions) + data_1 = [6.0, 5.0, 4.0, 3.0, 2.0, 1.0] + data_2 = [[12.0, 7.0, 8.0], [ 9.0, 31.0, 1.0]] + + # Data for the vector Fields (lists with 1 and 2 dimensions) + data_3 = [4.0, 1.0, 8.0, 5.0, 7.0, 9.0] + data_4 = [6.0, 5.0, 4.0, 3.0, 2.0, 1.0, 9.0, 7.0, 8.0, 10.0] + data_5 = [[8.0, 4.0, 3.0], [31.0, 5.0, 7.0]] + + # Data for the matrix Fields + data_6 = [3.0, 2.0, 1.0, 7.0] + data_7 = [15.0, 3.0, 9.0, 31.0, 1.0, 42.0, 5.0, 68.0, 13.0] + data_8 = [[12.0, 7.0, 8.0], [ 1.0, 4.0, 27.0], [98.0, 4.0, 6.0]] + +Create the python lists with the data to be *appended* to the Fields. + +.. jupyter-execute:: + + # Data for the scalar Fields + data_9 = [24.0] + + # Data for the vector Fields + data_10 = [47.0, 33.0, 5.0] + + # Data for the matrix Fields + data_11 = [8.0, 2.0, 4.0, 64.0, 32.0, 47.0, 11.0, 23.0, 1.0] + + +Create the Fields +----------------- + +In this tutorial, we explain how to create the following Fields: + +- Scalar Field; +- Vector Field; +- Matrix Field. + +.. note:: + + A |Field| must always be given: + + - A |location| and a |Scoping|. + + Here, we create Fields in the default *'Nodal'* |location|. Thus, each entity (here, the nodes) must + have a |Scoping| id, that can be defined in a random or in a numerical order: + + - If you want to *set* a data array to the |Field|, you must previously set the |Scoping| ids using the |Field.scoping| method. + - If you want to *append* an entity with a data array to the |Field|, you don't need to previously set the |Scoping| ids. + + - A |nature| and a |dimensionality| (number of data components for each entity). They must respect the type and size of the + data to be stored in the |Field|. + +Import the PyDPF-Core library +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +First, import the PyDPF-Core library. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + +Define the Fields sizing +^^^^^^^^^^^^^^^^^^^^^^^^ + +The second step consists in defining the Fields dimensions. + +.. tab-set:: + + .. tab-item:: Scalar fields + + Here, we create one |Field| with 6 scalar. Thus, 6 entities with one |Scoping| id each. + + .. jupyter-execute:: + + # Define the number of entities + num_entities_1 = 6 + + You must ensure that this |Field| has a *'scalar'* |nature| and an *'1D'* |dimensionality|. + + .. tab-item:: Vector fields + + Here, we create: + + - One |Field| with 2 vectors (thus, 2 entities) of 3 components each (3D vector |Field|); + - One |Field| with 2 vectors (thus, 2 entities) of 5 components each (5D vector |Field|); + + .. jupyter-execute:: + + # Define the number of entities + num_entities_2 = 2 + + You must ensure that these Fields have a *'vector'* |nature| and the corresponding |dimensionality| + (*'3D'* and *'5D'*). + + .. tab-item:: Matrix fields + + Here, we create: + + - One Field with 1 matrix (thus, 1 entity) of 2 lines and 2 columns; + - Two Fields with 1 matrix (thus, 1 entity) of 3 lines and 3 columns (tensor). + + .. jupyter-execute:: + + # Define the number of entities + num_entities_3 = 1 + + You must ensure that these Fields have a *'matrix'* |nature| and the corresponding |dimensionality|. + +Create the Fields objects +^^^^^^^^^^^^^^^^^^^^^^^^^ + +You can create the Fields using two approaches: + +- :ref:`Instantianting the Field object`; +- :ref:`Using the fields_factory module`. + +.. _ref_create_field_instance: + +Create a |Field| by an instance of this object +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. tab-set:: + + .. tab-item:: Scalar fields + + .. jupyter-execute:: + + # Define the number of entities + num_entities_1 = 6 + + You must ensure that this |Field| has a *'scalar'* |nature| and an *'1D'* |dimensionality|. + + For this approach, the default |nature| of the |Field| object is *'vector'*. You can modify it directly with the + *'nature'* argument or with the |Field.dimensionality| method. + + Create the scalar |Field| and use the *'nature'* argument. + + .. jupyter-execute:: + + # Instanciate the Field + field_11 = dpf.Field(nentities=num_entities_1, nature=dpf.common.natures.scalar) + + # Set the scoping ids + field_11.scoping.ids = range(num_entities_1) + + # Print the Field + print("Scalar Field: ", '\n',field_11, '\n') + + Create the scalar |Field| and use the |Field.dimensionality| method. + + .. jupyter-execute:: + + # Instanciate the Field + field_12 = dpf.Field(nentities=num_entities_1) + + # Use the Field.dimensionality method + field_12.dimensionality = dpf.Dimensionality([1]) + + # Set the scoping ids + field_12.scoping.ids = range(num_entities_1) + + # Print the Field + print("Scalar Field : ", '\n',field_12, '\n') + + .. tab-item:: Vector fields + + Here, we create: + + - One |Field| with 2 vectors (thus, 2 entities) of 3 components each (3D vector |Field|); + - One |Field| with 2 vectors (thus, 2 entities) of 5 components each (5D vector |Field|); + + .. jupyter-execute:: + + # Define the number of entities + num_entities_2 = 2 + + You must ensure that these Fields have a *'vector'* |nature| and the corresponding |dimensionality| (*'3D'* and *'5D'*). + + For this approach, the default |nature| is *'vector'* and the default |dimensionality| is *'3D'*. So for the second vector + |Field| you must set a *'5D'* |dimensionality| using the |Field.dimensionality| method. + + Create the *'3D'* vector Field. + + .. jupyter-execute:: + + # Instantiate the Field + field_21 = dpf.Field(nentities=num_entities_2) + + # Set the scoping ids + field_21.scoping.ids = range(num_entities_2) + + # Print the Field + print("3D vector Field : ", '\n',field_21, '\n') + + Create the *'5D'* vector Field. + + .. jupyter-execute:: + + # Instantiate the Field + field_31 = dpf.Field(nentities=num_entities_2) + + # Use the Field.dimensionality method + field_31.dimensionality = dpf.Dimensionality([5]) + + # Set the scoping ids + field_31.scoping.ids = range(num_entities_2) + + # Print the Field + print("5D vector Field (5D): ", '\n',field_31, '\n') + +.. _ref_create_field_fields_factory: + +Create a |Field| using the |fields_factory| module +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. tab-set:: + + .. tab-item:: Scalar fields + + You can use two functions from the |fields_factory| module to create a scalar |Field|: + + - The |create_scalar_field| function; + - The |field_from_array| function. + + **Create the Field using the create_scalar_field function** + + For this approach, the default |nature| of the |Field| object is *'scalar'* and the default |dimensionality| is *'1D'*. + Thus, you just have to use the |create_scalar_field| function to create a scalar |Field|. + + .. jupyter-execute:: + + # Create the scalar Field + field_13 = dpf.fields_factory.create_scalar_field(num_entities=num_entities_1) + + # Set the scoping ids + field_13.scoping.ids = range(num_entities_1) + + # Print the Field + print("Scalar Field: ", '\n',field_13, '\n') + + **Create the Field using the field_from_array function** + + Different from the other approaches, where you set or append the data after creating the |Field|, here, the data is + used as an input of the |field_from_array| function. + + This function gets an Numpy array or Python list of either: + + - 1 dimension (one array). In this case, you get directly a scalar |Field|; + - 2 dimensions (one array containing multiple arrays with 3 components each). In the is case, you get a 3D vector |Field|. + Thus, you have to change the |Field| |dimensionality| using the |Field.dimensionality| method. + + Create the scalar Field with an 1 dimensional list. + + .. jupyter-execute:: + + # Use the field_from_array function + field_14 = dpf.fields_factory.field_from_array(arr=data_1) + + # Set the scoping ids + field_14.scoping.ids = range(num_entities_1) + + # Print the Field + print("Scalar Field: ", '\n',field_14, '\n') + + Create the scalar Field with a 2 dimensional list. + + .. jupyter-execute:: + + # Use the field_from_array function + field_15 = dpf.fields_factory.field_from_array(arr=data_2) + + # Use the |Field.dimensionality| method + field_15.dimensionality = dpf.Dimensionality([1]) + + # Set the scoping ids + field_15.scoping.ids = range(num_entities_1) + + # Print the Field + print("Scalar Field (b): ", '\n',field_15, '\n') + + + .. tab-item:: Vector fields + + You can use three functions from the |fields_factory| module to create a vector |Field|: + + - The |create_vector_field| function; + - The |create_3d_vector_field| function (Specifically to create a 3D vector |Field| + (a vector |Field| with 3 components for each entity)); + - The |field_from_array| function. + + **Create the Field using the create_vector_field() function** + + For this approach, the default |nature| is *'vector'*. To define the |dimensionality| you must use the *'num_comp'* argument. + + Create the *'3D'* vector Field. + + .. jupyter-execute:: + + # Use the create_vector_field function + field_22 = dpf.fields_factory.create_vector_field(num_entities=num_entities_2, num_comp=3) + + # Set the scoping ids + field_22.scoping.ids = range(num_entities_2) + + # Print the Field + print("3D vector Field : ", '\n',field_22, '\n') + + Create the *'5D'* vector Field. + + .. jupyter-execute:: + + # Use the create_vector_field function + field_32 = dpf.fields_factory.create_vector_field(num_entities=num_entities_2, num_comp=5) + + # Set the scoping ids + field_32.scoping.ids = range(num_entities_2) + + # Print the Field + print("5D vector Field : ", '\n',field_32, '\n') + + **Create a 3d vector Field using the create_3d_vector_field() function** + + For this approach, the default |nature| is *'vector'* and the |dimensionality| is *'3D'*. Thus, you just + have to use the |create_3d_vector_field| function to create a 3D vector |Field|. + + .. jupyter-execute:: + + # Create the 3d vector Field + field_25 = dpf.fields_factory.create_3d_vector_field(num_entities=num_entities_2) + # Set the scoping ids + field_25.scoping.ids = range(num_entities_2) + + # Print the Field + print("Vector Field (3D): ", '\n',field_25, '\n') + + **Create the Field using the field_from_array() function** + + Different from the other approaches, where you set or append the data after creating the |Field|, here, the data is + used as an input of the |field_from_array| function. + + This function gets an Numpy array or Python list of either: + + - 1 dimension (one array). In this case, you have to change the |Field| |dimensionality| using the + |Field.dimensionality| method. + - 2 dimensions (one array containing multiple arrays with 3 components). In the is case, you get a 3D vector |Field|. + + .. note:: + + The |Field| must always assure a homogeneous shape. The shape is a tuple with the number of elementary data and the + number of components. + + So, for the *'5D* vector |field| we would want a shape of (10,5). Nevertheless, the 2 dimensions data vector we + defined ("data_5") has a elementary data count of 6 (2*3). Thus, we cannot define the *'5D'* vector |Field| because it would + have a (6,5) shape. + + Create the *'3D'* vector Field with an 1 dimensional list. + + .. jupyter-execute:: + + # Use the field_from_array function + field_23 = dpf.fields_factory.field_from_array(arr=data_3) + + # Use the Field.dimensionality method + field_23.dimensionality = dpf.Dimensionality([3]) + + # Set the scoping ids + field_23.scoping.ids = range(num_entities_2) + + # Print the Field + print("3D vector Field: ", '\n',field_23, '\n') + + Create the *'3D'* vector Field and give a 2 dimensional list. + + .. jupyter-execute:: + + # Use the field_from_array function + field_24 = dpf.fields_factory.field_from_array(arr=data_5) + + # Set the scoping ids + field_24.scoping.ids = range(num_entities_2) + + # Print the Field + print("3D vector Field: ", '\n',field_24, '\n') + + .. tab-item:: Matrix fields + + You can create a matrix |Field| using the |create_matrix_field| function from the |fields_factory| module. + + The default |nature| here is *'matrix'*. Thus, you only have to define the matrix |dimensionality| using the + *'num_lines'* and *'num_col'* arguments. + + Create the (2,2) matrix Field. + + .. jupyter-execute:: + + # Use the create_matrix_field function + field_41 = dpf.fields_factory.create_matrix_field(num_entities=num_entities_3, num_lines=2, num_col=2) + + # Set the scoping ids + field_41.scoping.ids = range(num_entities_3) + + # Print the Field + print("Matrix Field (2,2) : ", '\n',field_41, '\n') + + Create the (3,3) matrix Fields. + + .. jupyter-execute:: + + # Use the create_matrix_field function + field_51 = dpf.fields_factory.create_matrix_field(num_entities=num_entities_3, num_lines=3, num_col=3) + field_52 = dpf.fields_factory.create_matrix_field(num_entities=num_entities_3, num_lines=3, num_col=3) + + # Set the scoping ids + field_51.scoping.ids = range(num_entities_3) + field_52.scoping.ids = range(num_entities_3) + + # Print the Field + print("Matrix Field 1 (3,3) : ", '\n',field_51, '\n') + print("Matrix Field 2 (3,3) : ", '\n',field_52, '\n') + +Set data to the Fields +---------------------- + +To set a data array to a |Field| use the |Field.data| method. The |Field| |Scoping| defines how the data is ordered. +For example: the first id in the scoping identifies to which entity the first data entity belongs to. + +The data can be in a 1 dimension (one array) or 2 dimensions (one array containing multiple arrays) +Numpy array or Python list. When attributed to a |Field|, these data arrays are reshaped to respect +the |Field| definition. + +.. tab-set:: + + .. tab-item:: Scalar fields + + Set the data from a 1 dimensional array to the scalar Field. + + .. jupyter-execute:: + + # Set the data + field_11.data = data_1 + + # Print the Field + print("Scalar Field : ", '\n',field_11, '\n') + + # Print the Fields data + print("Data scalar Field : ", '\n',field_11.data, '\n') + + Set the data from a 2 dimensional array to the scalar Field. + + .. jupyter-execute:: + + # Set the data + field_12.data = data_2 + + # Print the Field + print("Scalar Field : ", '\n',field_12, '\n') + + # Print the Fields data + print("Data scalar Field : ", '\n',field_12.data, '\n') + + .. tab-item:: Vector fields + + Set the data from a 1 dimensional array to the *'3D'* vector Field. + + .. jupyter-execute:: + + # Set the data + field_21.data = data_3 + + # Print the Field + print("Vector Field : ", '\n',field_21, '\n') + + # Print the Fields data + print("Data vector Field: ", '\n',field_21.data, '\n') + + Set the data from a 1 dimensional array to the *'5D'* vector Field. + + .. jupyter-execute:: + + # Set the data + field_31.data = data_4 + + # Print the Field + print("Vector Field: ", '\n',field_31, '\n') + + # Print the Fields data + print("Data vector Field : ", '\n',field_31.data, '\n') + + Set the data from a 2 dimensional array to the *'3D'* vector Field. + + .. jupyter-execute:: + + # Set the data + field_22.data = data_5 + + # Print the Field + print("Vector Field: ", '\n',field_22, '\n') + + # Print the Fields data + print("Data vector Field: ", '\n',field_22.data, '\n') + + .. tab-item:: Matrix fields + + Set the data from a 1 dimensional array to the (2,2) matrix Field. + + .. jupyter-execute:: + + # Set the data + field_41.data = data_6 + + # Print the Field + print("Matrix Field: ", '\n',field_41, '\n') + + # Print the Fields data + print("Data matrix Field: ", '\n',field_41.data, '\n') + + Set the data from a 1 dimensional array to the (3,3) matrix Field. + + .. jupyter-execute:: + + # Set the data + field_51.data = data_7 + + # Print the Field + print("Matrix Field: ", '\n',field_51, '\n') + + # Print the Fields data + print("Data matrix Field: ", '\n',field_51.data, '\n') + + Set the data from a 2 dimensional array to the (3,3) matrix Field. + + .. jupyter-execute:: + + # Set the data + field_52.data = data_8 + + # Print the Field + print("Matrix Field: ", '\n',field_51, '\n') + + # Print the Fields data + print("Data matrix Field: ", '\n',field_51.data, '\n') + +Append data to the Fields +------------------------- + +You can append a data array to a |Field|, this means adding a new entity with the new data in the |Field|. You have to +give the |Scoping| id that this entities will have. + +.. tab-set:: + + .. tab-item:: Scalar fields + + Append data to a scalar |Field|. + + .. jupyter-execute:: + + # Append the data + field_11.append(scopingid=6, data=data_9) + + # Print the Field + print("Scalar Field : ", '\n',field_11, '\n') + + # Print the Fields data + print("Data scalar Field: ", '\n',field_11.data, '\n') + + .. tab-item:: Vector fields + + Append data to a vector |Field|. + + .. jupyter-execute:: + + # Append the data + field_21.append(scopingid=2, data=data_10) + + # Print the Field + print("Vector Field : ", '\n',field_21, '\n') + + # Print the Fields data + print("Data vector Field: ", '\n',field_21.data, '\n') + + + .. tab-item:: Matrix fields + + Append data to a matrix |Field|. + + .. jupyter-execute:: + + # Append the data + field_51.append(scopingid=1, data=data_11) + + # Print the Field + print("VMatrix Field : ", '\n',field_51, '\n') + + # Print the Fields data + print("Data Matrix Field: ", '\n',field_51.data, '\n') + +Create a |FieldsContainer| +-------------------------- + +A |FieldsContainer| is a collection of |Field| ordered by labels. Each |Field| of the |FieldsContainer| has +an ID for each label. These ids allow splitting the fields on any criteria. + +The most common |FieldsContainer| have the label *'time'* with ids corresponding to time sets. The label *'complex'*, +which is used in a harmonic analysis for example, allows real parts (id=0) to be separated from imaginary parts (id=1). + +For more information on DPF data structures, see the :ref:`ref_tutorials_data_structures` tutorials section. + +You can create a |FieldsContainer| by: + +- :ref:`Instantiating the FieldsContainer object`; +- :ref:`Using the fields_container_factory module`. + +.. _ref_fields_container_instance: + +Create a |FieldsContainer| by an instance of this object +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +After defining a |FieldsContainer| by an instance of this object you need to set the labels. Here, we define +Fields over time steps labels. So, when you add a |Field| to the |FieldsContainer| you must specify the time step id +it belongs to. + +.. jupyter-execute:: + + # Create the FieldsContainer object + fc_1 = dpf.FieldsContainer() + + # Define the labels + fc_1.add_label(label="time") + + # Add the Fields + fc_1.add_field(label_space={"time": 0}, field=field_21) + fc_1.add_field(label_space={"time": 1}, field=field_31) + + # Print the FieldsContainer + print(fc_1) + +.. _ref_fields_container_factory_module: + +Create a |FieldsContainer| with the |fields_container_factory| module +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The |fields_container_factory| module contains functions that create a |FieldsContainer| with predefined +labels. Here, we use the |over_time_freq_fields_container| function that create a |FieldsContainer| with a *'time'* +label. + +.. jupyter-execute:: + + # Create the FieldsContainer + fc_2 = dpf.fields_container_factory.over_time_freq_fields_container(fields=[field_21, field_31]) + + # Print the FieldsContainer + print(fc_2) \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst new file mode 100644 index 0000000000..4e2c668037 --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst @@ -0,0 +1,457 @@ +.. _reft_tutorials_narrow_down_data: + +================ +Narrow down data +================ + +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LS-DYNA` :bdg-fluent:`FLUENT` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst +.. |location| replace:: :class:`location` +.. |time_freq_scoping_factory| replace:: :mod:`time_freq_scoping_factory` +.. |mesh_scoping_factory| replace:: :mod:`mesh_scoping_factory` +.. |displacement| replace:: :class:`result.displacement ` +.. |Model.results| replace:: :func:`Model.results ` +.. |result op| replace:: :mod:`result` +.. |rescope| replace:: :class:`rescope ` +.. |from_mesh| replace:: :class:`from_mesh ` +.. |extract_scoping| replace:: :class:`extract_scoping ` +.. |scoping_by_sets| replace:: :func:`scoping_by_sets() ` +.. |nodal_scoping| replace:: :func:`nodal_scoping() ` +.. |ScopingsContainer| replace:: :class:`ScopingsContainer ` +.. |MeshedRegion.elements| replace:: :func:`MeshedRegion.elements` +.. |MeshedRegion.nodes| replace:: :func:`MeshedRegion.nodes` +.. |Elements.scoping| replace:: :func:`Elements.scoping` +.. |Nodes.scoping| replace:: :func:`Nodes.scoping` +.. |Field.scoping| replace:: :func:`Field.scoping` +.. |Model.metadata| replace:: :func:`Model.metadata` +.. |Metadata| replace:: :class:`Metadata ` +.. |Metadata.time_freq_support| replace:: :func:`Metadata.time_freq_support` +.. |FieldsContainer.time_freq_support| replace:: :func:`FieldsContainer.time_freq_support` +.. |Field.time_freq_support| replace:: :func:`Field.time_freq_support` +.. |TimeFreqSupport.time_frequencies| replace:: :func:`TimeFreqSupport.time_frequencies` + +This tutorial explains how to scope your results over time and mesh domains. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Understanding the scope +----------------------- + +To begin the workflow set up, you need to establish the ``scoping``, that is +a spatial and/or temporal subset of the simulation data. + +The data in DPF is represented by a |Field|. Thus, narrow down your results means scoping your |Field|. +To do so in DPF, you use the |Scoping| object. You can retrieve all the time steps available for +a result, but you can also filter them. + +.. note:: + + Scoping is important because when DPF-Core returns the |Field| object, what Python actually has + is a client-side representation of the |Field|, not the entirety of the |Field| itself. This means + that all the data of the field is stored within the DPF service. This is important + because when building your workflows, the most efficient way of interacting with result data + is to minimize the exchange of data between Python and DPF, either by using operators + or by accessing exclusively the data that is needed. For more information on the DPF data storage + structures see :ref:`ref_tutorials_data_structures`. + +In conclusion, the essence of a scoping is to specify a set of time or mesh entities by defining a range of IDs: + +.. image:: ../../../images/drawings/scoping-eg.png + :align: center + +Create a |Scoping| object from scratch +-------------------------------------- + +First, import the necessary PyDPF-Core modules. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + +Then, use the available APIs to create a |Scoping| object. It can be created by: + +- :ref:`Instantiating the Scoping class`; +- :ref:`Using the scoping factory `. + + +.. _ref_create_scoping_instance_object: + +Instanciate a |Scoping| +^^^^^^^^^^^^^^^^^^^^^^^ + +Create a time and a mesh |Scoping| by instantiating the |Scoping| object. Use the *'ids'* and *'location'* arguments +and give the entities ids and |location| of interest. + +.. tab-set:: + + .. tab-item:: Time scoping + + A time location in DPF is a |TimeFreqSupport| object. Thus, we chose a *'time_freq'* |location| and target + a set of time by their ids. + + .. jupyter-execute:: + + + # Define a time list that targets the times ids 14, 15, 16, 17 + time_list_1 = [14, 15, 16, 17] + + # Create the time Scoping object + time_scoping_1 = dpf.Scoping(ids=time_list_1, location=dpf.locations.time_freq) + + + .. tab-item:: Mesh scoping + + Here, we chose a nodal |location| and target a set of nodes by their ids. + + .. jupyter-execute:: + + # Define a nodes list that targets the nodes with the ids 103, 204, 334, 1802 + nodes_ids_1 = [103, 204, 334, 1802] + + # Create the mesh Scoping object + mesh_scoping_1 = dpf.Scoping(ids=nodes_ids_1, location=dpf.locations.nodal) + +.. _ref_create_scoping_scoping_factory: + +Use the scoping factory module +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Create a |Scoping| object by using the |time_freq_scoping_factory| module for a temporal |Scoping| +and the |mesh_scoping_factory| module for a spatial |Scoping|. + +.. tab-set:: + + .. tab-item:: Time scoping + + Here, we use the |scoping_by_sets| function so we can have different time steps in the |Scoping|. This function + gives a |Scoping| on a *'time_freq'* |location|. + + .. jupyter-execute:: + + # Define a time list that targets the times ids 14, 15, 16, 17 + time_list_2 = [14, 15, 16, 17] + + # Create the time Scoping object + time_scoping_2 = dpf.time_freq_scoping_factory.scoping_by_sets(cumulative_sets=time_list_2) + + + .. tab-item:: Mesh scoping + + Here, we use the |nodal_scoping| function so we have a mesh |Scoping| in a nodal |location|. + + .. jupyter-execute:: + + # Define a nodes list that targets the nodes with the ids 103, 204, 334, 1802 + nodes_ids_2 = [103, 204, 334, 1802] + + # Create the mesh Scoping object + mesh_scoping_2 = dpf.mesh_scoping_factory.nodal_scoping(node_ids=nodes_ids_2) + +Extract a |Scoping| +------------------- + +You can extract |Scoping| from some DPF objects. They are: + +.. tab-set:: + + .. tab-item:: Time scoping + + - A |Model|; + - A |FieldsContainer| ; + - A |Field|. + + + .. tab-item:: Mesh scoping + + - A |MeshedRegion|; + - A |FieldsContainer| ; + - A |Field|. + +Define the objects +^^^^^^^^^^^^^^^^^^ + +First, import a result file and create a |Model|. For this tutorial, you can use one available in the |Examples| module. +For more information about how to import your own result file in DPF, see the :ref:`ref_tutorials_import_result_file` +tutorial. + +.. jupyter-execute:: + + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_1 = examples.download_transient_result() + # Create the DataSources object + ds_1 = dpf.DataSources(result_path=result_file_path_1) + # Create the model + model_1 = dpf.Model(data_sources=ds_1) + +From this result file we extract: + +- The mesh (in DPF a mesh is the |MeshedRegion| object); +- The displacement results. The displacement |Result| object gives a |FieldsContainer| when evaluated. Additionally, + we can get a |Field| from this |FieldsContainer|. + +.. jupyter-execute:: + + # Get the MeshedRegion + meshed_region_1 = model_1.metadata.meshed_region + + # Get a FieldsContainer with the displacement results + disp_fc = model_1.results.displacement.on_all_time_freqs.eval() + + # Get a Field from the FieldsContainer + disp_field = disp_fc[0] + +Extract the time |Scoping| +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Extract the time |Scoping| is extracting the scoping of the time frequencies from the |TimeFreqSupport| +of the DPF object. + +.. tab-set:: + + .. tab-item:: From the Model + + You can extract the |TimeFreqSupport| available for the results by accessing the |Model| |Metadata|. + Thus, you must use the |Model.metadata| method. From the |Metadata|, you can get the |TimeFreqSupport| + by using the |Metadata.time_freq_support| method. + + .. jupyter-execute:: + + # Extract the TimeFreq support + tfs_1 = model_1.metadata.time_freq_support + + To extract the time frequencies you use the |TimeFreqSupport.time_frequencies| method. The time + frequencies are given in a Field. Thus, to get the time |Scoping| you need to use the |Field.scoping| method. + For this approach, the time frequencies are given in a *'TimeFreq_sets'* location. + + .. jupyter-execute:: + + # Extract the time frequencies + t_freqs_1 = tfs_1.time_frequencies + + # Extract the time scoping + time_scop_1 = t_freqs_1.scoping + + #Print the time scoping + print(time_scop_1) + + + .. tab-item:: From the FieldsContainer + + You can extract the |TimeFreqSupport| of each |Field| in the |FieldsContainer| by using the + |FieldsContainer.time_freq_support| method. + + .. jupyter-execute:: + + # Extract the TimeFreq support + tfs_2 = disp_fc.time_freq_support + + To extract the time frequencies you use the |TimeFreqSupport.time_frequencies| method. The time + frequencies are given in a Field. Thus, to get the time |Scoping| you need to use the |Field.scoping| method. + + .. jupyter-execute:: + + # Extract the time frequencies + t_freqs_2 = tfs_2.time_frequencies + + # Extract the time scoping + time_scop_2 = t_freqs_2.scoping + + #Print the time scoping + print(time_scop_2) + + + .. tab-item:: From the Field + + You can extract the |TimeFreqSupport| of a |Field| by using the |Field.time_freq_support| method. + + .. jupyter-execute:: + + # Extract the TimeFreq support + tfs_3 = disp_field.time_freq_support + + To extract the time frequencies you use the |TimeFreqSupport.time_frequencies| method. The time + frequencies are given in a Field. Thus, to get the time |Scoping| you need to use the |Field.scoping| method. + + .. jupyter-execute:: + + # Extract the time frequencies + t_freqs_3 = tfs_1.time_frequencies + + # Extract the time scoping + time_scop_3 = t_freqs_3.scoping + + #Print the time scoping + print(time_scop_3) + +Extract the mesh |Scoping| +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. tab-set:: + + .. tab-item:: From the MeshedRegion + + You can extract the mesh |Scoping| from a |MeshedRegion| using: + + - The |from_mesh| operator; + - The |Elements| object; + - The |Nodes| object. + + **Use the from_mesh operator** + + Extract the mesh |Scoping| from the |MeshedRegion| using the |from_mesh| operator. It gets the + |Scoping| for the entire mesh with a *'nodal'* location. You can also get an *'elemental'* location + by using the *'requested_location'* argument. + + .. jupyter-execute:: + + # Extract the mesh scoping + mesh_scoping_3 = ops.scoping.from_mesh(mesh=meshed_region_1).eval() + + # Print the mesh Scoping + print("Scoping from mesh", "\n", mesh_scoping_3, "\n") + + **Use the Elements object** + + You can obtain the |Elements| object from a given |MeshedRegion| by using the |MeshedRegion.elements| + method. You can extract the mesh |Scoping| from the |Elements| object by using the |Elements.scoping| method. + It gets the |Scoping| for the entire mesh with a *'elemental'* location. + + .. jupyter-execute:: + + # Extract the mesh scoping + mesh_scoping_4 = meshed_region_1.elements.scoping + + # Print the mesh Scoping + print("Scoping from mesh", "\n", mesh_scoping_4, "\n") + + **Use the Nodes object** + + You can obtain the |Nodes| object from a given |MeshedRegion| by using the |MeshedRegion.nodes| + method. You can extract the mesh |Scoping| from the |Nodes| object by using the |Nodes.scoping| method. + It gets the |Scoping| for the entire mesh with a *'nodal'* location. + + .. jupyter-execute:: + + # Extract the mesh scoping + mesh_scoping_5 = meshed_region_1.nodes.scoping + + # Print the mesh Scoping + print("Scoping from mesh", "\n", mesh_scoping_5, "\n") + + + .. tab-item:: From the FieldsContainer + + Extract the mesh Scoping from the |FieldsContainer| using the |extract_scoping| operator. This operator gets the mesh + |Scoping| for each |Field| in the |FieldsContainer|. Thus, you must specify the output as a |ScopingsContainer|. + + .. jupyter-execute:: + + # Define the extract_scoping operator + extract_scop_fc_op = ops.utility.extract_scoping(field_or_fields_container=disp_fc) + + # Get the mesh Scopings from the operators output + mesh_scoping_6 = extract_scop_fc_op.outputs.mesh_scoping_as_scopings_container() + + # Print the mesh Scopings + print("Scoping from FieldsContainer", "\n", mesh_scoping_6, "\n") + + .. tab-item:: From the Field + + You can extract the mesh |Scoping| from a |Field| using: + + - The |extract_scoping| operator; + - The |Field.scoping| method. + + **Use the extract_scoping operator** + + This operator gets the mesh |Scoping| from the result |Field|. This means it gets the |Scoping| + where the result is defined at. + + .. jupyter-execute:: + + # Extract the mesh scoping + mesh_scoping_7 = ops.utility.extract_scoping(field_or_fields_container=disp_field).eval() + + # Print the mesh Scoping + print("Scoping from Field ", "\n", mesh_scoping_7, "\n") + + **Use the Field.scoping method** + + This method gets the mesh |Scoping| from the result |Field|. This means it gets the |Scoping| + where the result is defined at. + + .. jupyter-execute:: + + # Extract the mesh scoping + mesh_scoping_8 = disp_field + + # Print the mesh Scoping + print("Scoping from Field", "\n", mesh_scoping_8, "\n") + +Use a |Scoping| +--------------- + +The |Scoping| object can be used : + +- :ref:`When extracting a result`; +- :ref:`After extracting a result`. + +.. _ref_use_scoping_when_extracting: + +Extract and scope the results +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +You can extract and scope a result using the |Model.results| method or the |result op| operator inputs. +Those two approaches handle |Result| objects. Thus, to scope the results when extracting them you use +the *'time_scoping'* and *'mesh_scoping'* arguments and give the Scopings of interest. + +Here, we extract and scope the displacement results. + +.. jupyter-execute:: + + # Extract and scope the result using the Model.results method + disp_model = model_1.results.displacement(time_scoping=time_scoping_1, mesh_scoping=mesh_scoping_1).eval() + + # Extract and scope the results using the result.displacement operator + disp_op = ops.result.displacement(data_sources=ds_1, time_scoping=time_scoping_1, mesh_scoping=mesh_scoping_1).eval() + + # Print the displacement results + print("Displacement from Model.results ", "\n", disp_model, "\n") + print("Displacement from result.displacement operator", "\n", disp_op, "\n") + +.. _ref_use_scoping_after_extracting: + +Extract and rescope the results +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The mesh |Scoping| can be changed after the result extraction or manipulation by using the +|rescope| operator. It takes a |Field| or |FieldsContainer| that contains the results data +and rescope them. + +Here, we rescope the displacement results. + +.. jupyter-execute:: + + # Extract the results for the entire mesh + disp_all_mesh = model_1.results.displacement.eval() + + # Rescope the displacement results to get the data only for a specific set of nodes + disp_rescope = ops.scoping.rescope(fields=disp_all_mesh, mesh_scoping=mesh_scoping_1).eval() + + # Print the displacement results for the entire mesh + print("Displacement results for the entire mesh", "\n", disp_all_mesh, "\n") + + # Print the displacement results for the specific set of nodes + print("Displacement results rescoped ", "\n", disp_rescope, "\n") + + + + diff --git a/doc/source/user_guide/tutorials/index.rst b/doc/source/user_guide/tutorials/index.rst new file mode 100644 index 0000000000..a24466eba5 --- /dev/null +++ b/doc/source/user_guide/tutorials/index.rst @@ -0,0 +1,144 @@ +.. _ref_tutorials: + +Tutorials +--------- + +The tutorials cover specifics features with detailed demonstrations to help +understanding the fundamental PyDPF-Core functionalities and clarify some concepts. +They are designed to teach how to perform a task, providing explanations at each stage. + +It helps to have a Python interpreter for hands-on experience, but all code examples are +executed, so the tutorial can be read off-line as well. + +For a complete description of all the objects and modules, see the :ref:`API reference ` +section. + +:fa:`person-running` Beginner's guide +************************************* + +New to PyDPF-Core? Check our beginner's tutorials. They offer an overview +of our package background so you can understand how to work with it. + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: PyDPF-Core data structures + :link: ref_tutorials_data_structures + :link-type: ref + :text-align: center + + Learn the different data structures used by DPF when handling data + + .. grid-item-card:: PyDPF-Core language + :link: ref_tutorials_language_and_usage + :link-type: ref + :text-align: center + + Check an overview on how to use PyDPF-Core API. + Learn the different ways to interact with data by using PyDPF-Core + objects and methods. + + .. grid-item-card:: Post-processing data basics + :link: ref_tutorials_processing_basics + :link-type: ref + :text-align: center + + Learn the basics on a post-processing procedure + using PyDPf-Core based on its usual main steps. The goal is to + transform simulation data into output data that can be used to + visualize and analyze simulation results. + +:fa:`book-open-reader` Common topics +************************************ + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Import Data on DPF + :link: ref_tutorials_import_data + :link-type: ref + :text-align: center + + Understand how to represent data in DPF: either from manual input either form result files. + + .. grid-item-card:: Mesh exploration + :link: ref_tutorials_mesh + :link-type: ref + :text-align: center + + Learn how to explore a mesh in DPF. + + .. grid-item-card:: Manipulate data with operators and workflows + :link: ref_tutorials_operators_and_workflows + :link-type: ref + :text-align: center + + Learn how to use operators to process your data and build workflows. + + .. grid-item-card:: Export data from DPF + :link: ref_tutorials_export_data + :link-type: ref + :text-align: center + + Discover the best ways to export data from your manipulations with PyDPF-Core. + + .. grid-item-card:: Plot + :link: ref_tutorials_plot + :link-type: ref + :text-align: center + + Explore the different approaches to visualise the data in plots. + + .. grid-item-card:: Animate + :link: ref_tutorials_animate + :link-type: ref + :text-align: center + + Explore the different approaches to visualise the data in an animation. + + .. grid-item-card:: Mathematical operations + :link: ref_tutorials_mathematics + :link-type: ref + :text-align: center + + Learn how to do mathematical operations using PyDPF-Core and data structures + + .. grid-item-card:: Manipulating physics data + :link: ref_tutorials_manipulate_physics_data + :link-type: ref + :text-align: center + + Learn how to manipulate the physics data associate to a + data storage structure. (Unit, homogeneity ...) + + .. grid-item-card:: Enriching DPF capabilities + :link: ref_tutorials_enriching + :link-type: ref + :text-align: center + + Discover how to enhance DPF capabilities by creating new operator’s libraries. + + .. grid-item-card:: Post-process distributed files + :link: ref_tutorials_distributed_files + :link-type: ref + :text-align: center + + Learn how to use PyDPF-Core with distributed files. + + .. grid-item-card:: DPF server + :link: ref_tutorials_dpf_server + :link-type: ref + :text-align: center + + Understand how to manipulate DPF client-server architecture + + .. grid-item-card:: Licensing + :link: ref_tutorials_licensing + :link-type: ref + :text-align: center + + Understand how to access the Entry and Premium licensing capabilities \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/language_and_usage/index.rst b/doc/source/user_guide/tutorials/language_and_usage/index.rst new file mode 100644 index 0000000000..d56d10a795 --- /dev/null +++ b/doc/source/user_guide/tutorials/language_and_usage/index.rst @@ -0,0 +1,10 @@ +.. _ref_tutorials_language_and_usage: + +============================= +PyDPF-Core language and usage +============================= + +This tutorials gives you an overview on how the PyDPF-Core API can be used +to interact with data. + +For more detailed information on each module and function, see :ref:`ref_api_section`. diff --git a/doc/source/user_guide/tutorials/licensing/index.rst b/doc/source/user_guide/tutorials/licensing/index.rst new file mode 100644 index 0000000000..e7760c435b --- /dev/null +++ b/doc/source/user_guide/tutorials/licensing/index.rst @@ -0,0 +1,25 @@ +.. _ref_tutorials_licensing: + +========= +Licensing +========= + +This tutorial explains the DPF server licensing logic. Here you +learn about the Entry and Premium licensing capabilities + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Access Entry and Premium Capabilities + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + +.. toctree:: + :maxdepth: 2 + :hidden: + diff --git a/doc/source/user_guide/tutorials/manipulate_physics_data/index.rst b/doc/source/user_guide/tutorials/manipulate_physics_data/index.rst new file mode 100644 index 0000000000..31b4fd3665 --- /dev/null +++ b/doc/source/user_guide/tutorials/manipulate_physics_data/index.rst @@ -0,0 +1,24 @@ +.. _ref_tutorials_manipulate_physics_data: + +======================= +Manipulate Physics data +======================= + +This sections demonstrates how to manipulate the physics data associate to a +data storage structure. (Unit, homogeneity ...). + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Unit + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + +.. toctree:: + :maxdepth: 2 + :hidden: diff --git a/doc/source/user_guide/tutorials/mathematics/basic_maths.rst b/doc/source/user_guide/tutorials/mathematics/basic_maths.rst new file mode 100644 index 0000000000..6dc9dbc5b6 --- /dev/null +++ b/doc/source/user_guide/tutorials/mathematics/basic_maths.rst @@ -0,0 +1,786 @@ +.. _ref_basic_math: + +=========== +Basic maths +=========== + +.. include:: ../../../links_and_refs.rst +.. |math operators| replace:: :mod:`math operators ` +.. |fields_factory| replace:: :mod:`fields_factory` +.. |fields_container_factory| replace:: :mod:`fields_container_factory` +.. |over_time_freq_fields_container| replace:: :func:`over_time_freq_fields_container()` +.. |add| replace:: :class:`add` +.. |add_fc| replace:: :class:`add_fc` +.. |minus| replace:: :class:`minus` +.. |minus_fc| replace:: :class:`minus_fc` +.. |accumulate| replace:: :class:`accumulate` +.. |accumulate_fc| replace:: :class:`accumulate_fc` +.. |cross_product| replace:: :class:`cross_product` +.. |cross_product_fc| replace:: :class:`cross_product_fc` +.. |component_wise_divide| replace:: :class:`component_wise_divide` +.. |component_wise_divide_fc| replace:: :class:`component_wise_divide_fc` +.. |generalized_inner_product| replace:: :class:`generalized_inner_product` +.. |generalized_inner_product_fc| replace:: :class:`generalized_inner_product_fc` +.. |overall_dot| replace:: :class:`overall_dot` +.. |outer_product| replace:: :class:`outer_product` +.. |pow| replace:: :class:`pow` +.. |pow_fc| replace:: :class:`pow_fc` +.. |sqr| replace:: :class:`sqr` +.. |sqrt| replace:: :class:`sqr` +.. |sqr_fc| replace:: :class:`sqr_fc` +.. |norm| replace:: :class:`norm` +.. |norm_fc| replace:: :class:`norm_fc` +.. |component_wise_product| replace:: :class:`component_wise_product` +.. |component_wise_product_fc| replace:: :class:`component_wise_product_fc` + +This tutorial explains how to perform some basic mathematical operations with PyDPF-Core. + +DPF exposes data through |Field| objects (or other specialized kinds of fields). +A |Field| is a homogeneous array of floats. + +A |FieldsContainer| is a labeled collection of |Field| objects that most operators can use, +allowing you to operate on several fields at once. + +To perform mathematical operations, use the operators available in the |math operators| module. +First create an instance of the operator of interest, then use the ``.eval()`` method to compute +and retrieve the first output. + +Most operators for mathematical operations can take in a |Field| or a |FieldsContainer|. + +Most mathematical operators have a separate implementation for handling |FieldsContainer| objects +as input, and are recognizable by the suffix ``_fc`` appended to their name. + +This tutorial first shows in :ref:`ref_basic_maths_create_custom_data` how to create the custom fields and field containers it uses. + +It then provides a focus on the effect of the scoping of the fields on the result in :ref:`ref_basic_maths_scoping_handling`, +as well as a focus on the treatment of collections in :ref:`ref_basic_maths_handling_of_collections`. + +It then explains how to use several of the mathematical operators available, both with fields and with field containers. + + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + + +.. _ref_basic_maths_create_custom_data : + +Create fields and field collections +----------------------------------- + +DPF exposes mathematical fields of floats through |Field| and |FieldsContainer| objects. +The |Field| is a homogeneous array of floats and a |FieldsContainer| is a labeled collection of |Field| objects. + +Here, fields and field collections created from scratch are used to show how the +mathematical operators work. + +For more information on creating a |Field| from scratch, see :ref:`ref_tutorials_data_structures`. + +.. tab-set:: + + .. tab-item:: Fields + + Create the fields based on: + + - A number of entities + - A list of IDs and a location, which together define the scoping of the field + + The location defines the type of entity the IDs refer to. It defaults to *nodal*, in which case the scoping is + understood as a list of node IDs, and the field is a nodal field. + + For a more detailed explanation about the influence of the |Scoping| on the operations, + see the :ref:`ref_basic_maths_scoping_handling` section of this tutorial. + + First import the necessary DPF modules. + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the math operators module + from ansys.dpf.core.operators import math as maths + + Create the fields with the |Field| class constructor. + + Helpers are also available in |fields_factory| for easier creation of fields from scratch. + + .. jupyter-execute:: + + # Create four nodal 3D vector fields of size 2 + num_entities = 2 + field1 = dpf.Field(nentities=num_entities) + field2 = dpf.Field(nentities=num_entities) + field3 = dpf.Field(nentities=num_entities) + field4 = dpf.Field(nentities=num_entities) + + # Set the scoping IDs + field1.scoping.ids = field2.scoping.ids = field3.scoping.ids = field4.scoping.ids = range(num_entities) + + # Set the data for each field using flat lists (of size = num_entities * num_components) + field1.data = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] + field2.data = [7.0, 3.0, 5.0, 8.0, 1.0, 2.0] + field3.data = [6.0, 5.0, 4.0, 3.0, 2.0, 1.0] + field4.data = [4.0, 1.0, 8.0, 5.0, 7.0, 9.0] + + # Print the fields + print("Field 1","\n", field1, "\n"); print("Field 2","\n", field2, "\n"); + print("Field 3","\n", field3, "\n"); print("Field 4","\n", field4, "\n") + + .. tab-item:: Field containers + + Create the collections of fields (called "field containers") using the |fields_container_factory|. + Here, we use the |over_time_freq_fields_container| helper to generate a |FieldsContainer| with *'time'* labels. + + .. jupyter-execute:: + + # Create the field containers + fc1 = dpf.fields_container_factory.over_time_freq_fields_container(fields=[field1, field2]) + fc2 = dpf.fields_container_factory.over_time_freq_fields_container(fields=[field3, field4]) + + # Print the field containers + print("FieldsContainer1","\n", fc1, "\n") + print("FieldsContainer2","\n", fc2, "\n") + + +.. _ref_basic_maths_scoping_handling : + +Effect of the scoping +--------------------- + +The scoping of a DPF field stores information about which entity the data is associated to. +A scalar field containing data for three entities is, for example, linked to a scoping defining three entity IDs. +The location of the scoping defines the type of entity the IDs refer to. +This allows DPF to know what each data point of a field is associated to. + +Operators such as mathematical operators usually perform operations between corresponding entities of fields. + +For example, the addition of two scalar fields does not just add the two data arrays, +which may not be of the same length or may not be ordered the same way. +Instead it uses the scoping of each field to find corresponding entities, their data in each field, +and perform the addition on those. + +This means that the operation is usually performed for entities in the intersection of the two field scopings. + +Some operators provide options to handle data for entities outside of this intersection, +but most simply ignore the data for these entities not in the intersection of the scopings. + +The following examples illustrate this behavior. + +.. jupyter-execute:: + + # Instantiate two nodal 3D vector fields of length 3 + field5 = dpf.Field(nentities=3) + field6 = dpf.Field(nentities=3) + + # Set the data for each field + field5.data = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0] + field6.data = [5.0, 1.0, 6.0, 3.0, 8.0, 9.0, 7.0, 2.0, 4.0] + + # Set the scoping IDs (here node IDs) + field5.scoping.ids = [1, 2, 3] + field6.scoping.ids = [3, 4, 5] + + # Print the fields + print("Field 5", "\n", field5, "\n") + print("Field 6", "\n", field6, "\n") + +Here the only entities with matching IDs between the two fields are: + +- The third entity in field5 (ID=3) +- The first entity in field6 (ID=3) + +Other entities are not taken into account when using an operator that needs two operands. + +For example the |add| operator: + +.. jupyter-execute:: + + # Use the add operator + add_scop = dpf.operators.math.add(fieldA=field5, fieldB=field6).eval() + + # Print the result + # The resulting field only contains data for entities where a match is found in the other field. + # It has the size of the intersection of the two scopings. + # Here this means the addition returns a field with data only for the node with ID=3. + # This behavior is specific to each operator. + print(add_scop, "\n") + +Or the |generalized_inner_product| operator: + +.. jupyter-execute:: + + # Use the dot product operator + dot_scop = dpf.operators.math.generalized_inner_product(fieldA=field5, fieldB=field6).eval() + # ID 3: (7. * 5.) + (8. * 1.) + (9. * 6.) + + # Print the result + # The operator returns zero for entities where no match is found in the other field. + # The resulting field is the size of the union of the two scopings. + # This behavior is specific to each operator. + print(dot_scop,"\n") + print(dot_scop.data,"\n") + +.. _ref_basic_maths_handling_of_collections : + +Handling of collections +----------------------- + +Most mathematical operators have a separate implementation for handling |FieldsContainer| objects +as input, and are recognizable by the suffix ``_fc`` appended to their name. + +These operator operate on fields with the same label space. + +Using the two collections of fields built previously, both have a *time* label with an associated value for each field. + +Operators working with |FieldsContainer| inputs match fields from each collection with the same value for all labels. + +In this case, ``field 0`` of ``fc1`` with label space ``{"time": 1}`` gets matched up with ``field 0`` of ``fc2`` also with label space ``{"time": 1}``. +Then ``field 1`` of ``fc1`` with label space ``{"time": 2}`` gets matched up with ``field 1`` of ``fc2`` also with label space ``{"time": 2}``. + +Addition +-------- + +Use: + +- the |add| operator to compute the element-wise addition for each component of two fields +- the |accumulate| operator to compute the overall sum of data for each component of a field + +Element-wise addition +^^^^^^^^^^^^^^^^^^^^^ + +This operator computes the element-wise sum of two fields for each component. + +.. tab-set:: + + .. tab-item:: *add* + + .. jupyter-execute:: + + # Add the fields + add_field = maths.add(fieldA=field1, fieldB=field2).eval() + # id 0: [1.+7. 2.+3. 3.+5.] = [ 8. 5. 8.] + # id 1: [4.+8. 5.+1. 6.+2.] = [12. 6. 8.] + + # Print the results + print("Addition field ", add_field , "\n") + + .. tab-item:: *add_fc* + + .. jupyter-execute:: + + # Add the two field collections + add_fc = maths.add_fc(fields_container1=fc1, fields_container2=fc2).eval() + # {time: 1}: field1 + field3 + # --> id 0: [1.+6. 2.+5. 3.+4.] = [7. 7. 7.] + # id 1: [4.+3. 5.+2. 6.+1.] = [7. 7. 7.] + # + # {time: 2}: field2 + field4 + # --> id 0: [7.+4. 3.+1. 5.+8.] = [11. 4. 13.] + # id 1: [8.+5. 1.+7. 2.+9.] = [13. 8. 11.] + + # Print the results + print("Addition FieldsContainers","\n", add_fc , "\n") + print(add_fc.get_field({"time":1}), "\n") + print(add_fc.get_field({"time":2}), "\n") + +Overall sum +^^^^^^^^^^^ + +This operator computes the total sum of elementary data of a field, for each component of the field. +You can give a scaling ("weights") argument. + + Keep in mind the |Field| dimension. The |Field| represents 3D vectors, so each elementary data is a 3D vector. + The optional "weights" |Field| attribute is a scaling factor for each entity when performing the sum, + so you must provide a 1D field. + +Compute the total sum (accumulate) for each component of a given |Field|. + +.. tab-set:: + + .. tab-item:: *accumulate* + + .. jupyter-execute:: + + # Compute the total sum of a field + tot_sum_field = maths.accumulate(fieldA=field1).eval() + # vector component 0 = 1. + 4. = 5. + # vector component 1 = 2. + 5. = 7. + # vector component 2 = 3. + 6. = 9. + + # Print the results + print("Total sum fields","\n", tot_sum_field, "\n") + + .. tab-item:: *accumulate_fc* + + .. jupyter-execute:: + + # Find the total sum of the two field collections + tot_sum_fc = maths.accumulate_fc(fields_container=fc1).eval() + # {time: 1}: field1 + # --> vector component 0 = 1.+ 4. = 5. + # vector component 1 = 2.+ 5. = 7. + # vector component 2 = 3.+ 6. = 9. + # + # {time: 2}: field2 + # --> vector component 0 = 7.+ 8. = 15. + # vector component 1 = 3.+ 1. = 4. + # vector component 2 = 5.+ 2. = 7. + + # Print the results + print("Total sum FieldsContainers","\n", tot_sum_fc , "\n") + print(tot_sum_fc.get_field({"time":1}), "\n") + print(tot_sum_fc.get_field({"time":2}), "\n") + +Compute the total sum (accumulate) for each component of a given |Field| using a scale factor field. + +.. tab-set:: + + .. tab-item:: *accumulate* + + .. jupyter-execute:: + + # Define the scale factor field + scale_vect = dpf.Field(nentities=num_entities, nature=dpf.natures.scalar) + # Set the scale factor field scoping IDs + scale_vect.scoping.ids = range(num_entities) + # Set the scale factor field data + scale_vect.data = [5., 2.] + + # Compute the total sum of the field using a scaling field + tot_sum_field_scale = maths.accumulate(fieldA=field1, weights=scale_vect).eval() + # vector component 0 = (1.0 * 5.0) + (4.0 * 2.0) = 13. + # vector component 1 = (2.0 * 5.0) + (5.0 * 2.0) = 20. + # vector component 2 = (3.0 * 5.0) + (6.0 * 2.0) = 27. + + # Print the results + print("Total weighted sum:","\n", tot_sum_field_scale, "\n") + + .. tab-item:: *accumulate_fc* + + .. jupyter-execute:: + + # Total scaled sum of the two field collections (accumulate) + tot_sum_fc_scale = maths.accumulate_fc(fields_container=fc1, weights=scale_vect).eval() + # {time: 1}: field1 + # --> vector component 0 = (1.0 * 5.0) + (4.0 * 2.0) = 13. + # vector component 1 = (2.0 * 5.0) + (5.0 * 2.0) = 20. + # vector component 2 = (3.0 * 5.0) + (6.0 * 2.0) = 27. + # + # {time: 2}: field2 + # --> vector component 0 = (7.0 * 5.0) + (8.0 * 2.0) = 51. + # vector component 1 = (3.0 * 5.0) + (1.0 * 2.0) = 17. + # vector component 2 = (5.0 * 5.0) + (2.0 * 2.0) = 29. + + # Print the results + print("Total sum FieldsContainers scale","\n", tot_sum_fc_scale , "\n") + print(tot_sum_fc_scale.get_field({"time":1}), "\n") + print(tot_sum_fc_scale.get_field({"time":2}), "\n") + +Subtraction +----------- + +Use the |minus| operator to compute the element-wise difference between each component of two fields. + +.. tab-set:: + + .. tab-item:: *minus* + + .. jupyter-execute:: + + # Subtraction of two 3D vector fields + minus_field = maths.minus(fieldA=field1, fieldB=field2).eval() + # id 0: [1.-7. 2.-3. 3.-5.] = [-6. -1. -2.] + # id 1: [4.-8. 5.-1. 6.-2.] = [-4. 4. 4.] + + # Print the results + print("Subtraction field","\n", minus_field , "\n") + + .. tab-item:: *minus_fc* + + .. jupyter-execute:: + + # Subtraction of two field collections + minus_fc = maths.minus_fc( + field_or_fields_container_A=fc1, + field_or_fields_container_B=fc2 + ).eval() + # {time: 1}: field1 - field3 + # --> id 0: [1.-6. 2.-5. 3.-4.] = [-5. -3. -1.] + # id 1: [4.-3. 5.-2. 6.-1.] = [1. 3. 5.] + # + # {time: 2}: field2 - field4 + # --> id 0: [7.-4. 3.-1. 5.-8.] = [3. 2. -3.] + # id 1: [8.-5. 1.-7. 2.-9.] = [3. -6. -7.] + + # Print the results + print("Subtraction field collection","\n", minus_fc , "\n") + print(minus_fc.get_field({"time":1}), "\n") + print(minus_fc.get_field({"time":2}), "\n") + +Element-wise product +-------------------- + +Use the |component_wise_product| operator to compute the element-wise product between each component of two fields. +Also known as the `Hadamard product `_, the *entrywise product* or *Schur product*. + +.. tab-set:: + + .. tab-item:: *component_wise_product* + + .. jupyter-execute:: + + # Compute the Hadamard product of two fields + element_prod_field = maths.component_wise_product(fieldA=field1, fieldB=field2).eval() + # id 0: [1.*7. 2.*3. 3.*5.] = [7. 6. 15.] + # id 1: [4.*8. 5.*1. 6.*2.] = [32. 5. 12.] + + # Print the results + print("Element-wise product field","\n", element_prod_field , "\n") + + .. tab-item:: *component_wise_product_fc* + + The current implementation of |component_wise_product_fc| only performs the Hadamard product + for each field in a collection with a distinct unique field. + + The element-wise product between two field collections is not implemented. + + .. jupyter-execute:: + + # Cross product of each field in a collection and a single unique field + element_prod_fc = maths.component_wise_product_fc(fields_container=fc1, fieldB=field3).eval() + # {time: 1}: field1 and field3 + # --> id 0: [1.*6. 2.*5. 3.*4.] = [6. 10. 12.] + # id 1: [4.*3. 5.*2. 6.*1.] = [12. 10. 6.] + # + # {time: 2}: field2 and field3 + # --> id 0: [7.*6. 3.*5. 5.*4.] = [42. 15. 20.] + # id 1: [8.*3. 1.*2. 2.*1.] = [24. 2. 2.] + + # Print the results + print("Element product FieldsContainer","\n", element_prod_fc , "\n") + print(element_prod_fc.get_field({"time":1}), "\n") + print(element_prod_fc.get_field({"time":2}), "\n") + + + +Cross product +------------- + +Use the |cross_product| operator to compute the `cross product `_ between two vector fields. + +.. tab-set:: + + .. tab-item:: *cross_product* + + .. jupyter-execute:: + + # Compute the cross product + cross_prod_field = maths.cross_product(fieldA=field1, fieldB=field2).eval() + # id 0: [(2.*5. - 3.*3.) (3.*7. - 1.*5.) (1.*3. - 2.*7.)] = [1. 16. -11.] + # id 1: [(5.*2. - 6.*1.) (6.*8. - 4.*2.) (4.*1. - 5.*8.)] = [4. 40. -36.] + + # Print the results + print("Cross product field","\n", cross_prod_field , "\n") + + .. tab-item:: *cross_product_fc* + + .. jupyter-execute:: + + # Cross product of two field collections + cross_prod_fc = maths.cross_product_fc(field_or_fields_container_A=fc1,field_or_fields_container_B=fc2).eval() + # {time: 1}: field1 X field3 + # --> id 0: [(2.*4. - 3.*5.) (3.*6. - 1.*4.) (1.*5. - 2.*6.)] = [-7. 14. -7.] + # id 1: [(5.*1. - 6.*2.) (6.*3. - 4.*1.) (4.*2. - 5.*3.)] = [-7. 14. -7.] + # + # {time: 2}: field2 X field4 + # --> id 0: [(3.*8. - 5.*1.) (5.*4. - 7.*8.) (7.*1. - 3.*4.)] = [19. -36. -5] + # id 1: [(1.*9. - 2.*7.) (2.*5. - 8.*9.) (8.*7. - 1.*5.)] = [-5. -62. 51.] + + # Print the results + print("Cross product FieldsContainer","\n", cross_prod_fc , "\n") + print(cross_prod_fc.get_field({"time":1}), "\n") + print(cross_prod_fc.get_field({"time":2}), "\n") + +Dot product +----------- + +Here, DPF provides two operations: + +- Use the |generalized_inner_product| operator to compute the `inner product `_ (also known as *dot product* or *scalar product*) between vector data of entities in two fields +- Use the |overall_dot| operator to compute the sum over all entities of the inner product of two vector fields + +Inner product +^^^^^^^^^^^^^ + +The |generalized_inner_product| operator computes a general notion of inner product between two vector fields. +In Cartesian coordinates it is equivalent to the dot/scalar product. + +.. tab-set:: + + .. tab-item:: *generalized_inner_product* + + .. jupyter-execute:: + + # Generalized inner product of two fields + dot_prod_field = maths.generalized_inner_product(fieldA=field1, fieldB=field2).eval() + # id 0: (1. * 7.) + (2. * 3.) + (3. * 5.) = 28. + # id 1: (4. * 8.) + (5. * 1.) + (6. * 2.) = 49. + + # Print the results + print("Dot product field","\n", dot_prod_field , "\n") + + .. tab-item:: *generalized_inner_product_fc* + + .. jupyter-execute:: + + # Generalized inner product of two field collections + dot_prod_fc = maths.generalized_inner_product_fc(field_or_fields_container_A=fc1, field_or_fields_container_B=fc2).eval() + # {time: 1}: field1 X field3 + # --> id 0: (1. * 6.) + (2. * 5.) + (3. * 4.) = 28. + # id 1: (4. * 3.) + (5. * 2.) + (6. * 1.) = 28. + # + # {time: 2}: field2 X field4 + # --> id 0: (7. * 4.) + (3. * 1.) + (5. * 8.) = 71. + # id 1: (8. * 5.) + (1. * 7.) + (2. * 9.) = 65. + + # Print the results + print("Dot product FieldsContainer","\n", dot_prod_fc , "\n") + print(dot_prod_fc.get_field({"time":1}), "\n") + print(dot_prod_fc.get_field({"time":2}), "\n") + +Overall dot product +^^^^^^^^^^^^^^^^^^^ + +The |overall_dot| operator creates two manipulations to give the result: + +1. it first computes a dot product between data of corresponding entities for two vector fields, resulting in a scalar field +2. it then sums the result obtained previously over all entities to return a scalar + +.. tab-set:: + + .. tab-item:: *overall_dot* + + .. jupyter-execute:: + + # Overall dot product of two fields + overall_dot = maths.overall_dot(fieldA=field1, fieldB=field2).eval() + # id 1: (1. * 7.) + (2. * 3.) + (3. * 5.) + (4. * 8.) + (5. * 1.) + (6. * 2.) = 77. + + # Print the results + print("Overall dot","\n", overall_dot , "\n") + + .. tab-item:: *overall_dot_fc* + + The ``overall_dot_fc`` operator is not available. + +Division +-------- + +Use the |component_wise_divide| operator to compute the +`Hadamard division `_ +between each component of two fields. + +.. tab-set:: + + .. tab-item:: *component_wise_divide* + + .. jupyter-execute:: + + # Divide a field by another field + comp_wise_div = maths.component_wise_divide(fieldA=field1, fieldB=field2).eval() + # id 0: [1./7. 2./3. 3./5.] = [0.143 0.667 0.6] + # id 1: [4./8. 5./1. 6./2.] = [0.5 5. 3.] + + # Print the results + print("Component-wise division field","\n", comp_wise_div , "\n") + + .. tab-item:: *component_wise_divide_fc* + + .. jupyter-execute:: + + # Component-wise division between two field collections + comp_wise_div_fc = maths.component_wise_divide_fc(fields_containerA=fc1, fields_containerB=fc2).eval() + # {time: 1}: field1 - field3 + # --> id 0: [1./6. 2./5. 3./4.] = [0.167 0.4 0.75] + # id 1: [4./3. 5./2. 6./1.] = [1.333 2.5 6.] + # + # {time: 2}: field2 - field4 + # --> id 0: [7./4. 3./1. 5./8.] = [1.75 3. 0.625] + # id 1: [8./5. 1./7. 2./9.] = [1.6 0.143 0.222] + + # Print the results + print("Component-wise division FieldsContainer","\n", comp_wise_div_fc , "\n") + print(comp_wise_div_fc.get_field({"time":1}), "\n") + print(comp_wise_div_fc.get_field({"time":2}), "\n") + +Power +----- + +Use: + +- the |pow| operator to compute the element-wise power of each component of a |Field| +- the |sqr| operator to compute the `Hadamard power `_ of each component of a |Field| +- the |sqrt| operator to compute the `Hadamard root `_ of each component of a |Field| + +*pow* operator +^^^^^^^^^^^^^^ + +The |pow| operator computes the element-wise power of each component of a |Field| to a given factor. + +This example computes the power of three. + +.. tab-set:: + + .. tab-item:: *pow* + + .. jupyter-execute:: + + # Define the power factor + pow_factor = 3.0 + # Compute the power of three of a field + pow_field = maths.pow(field=field1, factor=pow_factor).eval() + # id 0: [(1.^3.) (2.^3.) (3.^3.)] = [1. 8. 27.] + # id 1: [(4.^3.) (5.^3.) (6.^3.)] = [64. 125. 216.] + + # Print the results + print("Power field","\n", pow_field , "\n") + + .. tab-item:: *pow_fc* + + .. jupyter-execute:: + + # Compute the power of three of a field collection + pow_fc = maths.pow_fc(fields_container=fc1, factor=pow_factor).eval() + # {time: 1}: field1 + # --> id 0: [(1.^3.) (2.^3.) (3.^3.)] = [1. 8. 27.] + # id 1: [(4.^3.) (5.^3.) (6.^3.)] = [64. 125. 216.] + # + # {time: 2}: field2 + # --> id 0: [(7.^3.) (3.^3.) (5.^3.)] = [343. 27. 125.] + # id 1: [(8.^3.) (1.^3.) (2.^3.)] = [512. 1. 8.] + + # Print the results + print("Power FieldsContainer","\n", pow_fc , "\n") + print(pow_fc.get_field({"time":1}), "\n") + print(pow_fc.get_field({"time":2}), "\n") + +*sqr* operator +^^^^^^^^^^^^^^ + +The |sqr| operator computes the element-wise power of two +(`Hadamard power `_) +for each component of a |Field|. +It is a shortcut for the |pow| operator with factor 2. + +.. tab-set:: + + .. tab-item:: *sqr* + + .. jupyter-execute:: + + # Compute the power of two of a field + sqr_field = maths.sqr(field=field1).eval() + # id 0: [(1.^2.) (2.^2.) (3.^2.)] = [1. 4. 9.] + # id 1: [(4.^2.) (5.^2.) (6.^2.)] = [16. 25. 36.] + + print("^2 field","\n", sqr_field , "\n") + + .. tab-item:: *sqr_fc* + + .. jupyter-execute:: + + # Compute the power of two of a field collection + sqr_fc = maths.sqr_fc(fields_container=fc1).eval() + # {time: 1}: field1 + # --> id 0: [(1.^2.) (2.^2.) (3.^2.)] = [1. 4. 9.] + # id 1: [(4.^2.) (5.^2.) (6.^2.)] = [16. 25. 36.] + # + # {time: 2}: field2 + # --> id 0: [(7.^2.) (3.^2.) (5.^2.)] = [49. 9. 25.] + # id 1: [(8.^2.) (1.^2.) (2.^2.)] = [64. 1. 4.] + + # Print the results + print("^2 FieldsContainer","\n", sqr_fc , "\n") + print(sqr_fc.get_field({"time":1}), "\n") + print(sqr_fc.get_field({"time":2}), "\n") + +*sqrt* operator +^^^^^^^^^^^^^^^ + +The |sqrt| operator computes the element-wise square-root +(`Hadamard root `_) +for each component of a |Field|. +It is a shortcut for the |pow| operator with factor *0.5*. + +.. tab-set:: + + .. tab-item:: *sqrt* + + .. jupyter-execute:: + + # Compute the square-root of a field + sqrt_field = maths.sqrt(field=field1).eval() + # id 0: [(1.^0.5) (2.^0.5) (3.^0.5)] = [1. 1.414 1.732] + # id 1: [(4.^0.5) (5.^0.5) (6.^0.5)] = [2. 2.236 2.449] + + print("^0.5 field","\n", sqrt_field , "\n") + + .. tab-item:: *sqrt_fc* + + .. jupyter-execute:: + + # Compute the square-root of a field collection + sqrt_fc = maths.sqrt_fc(fields_container=fc1).eval() + # {time: 1}: field1 + # --> id 0: [(1.^.5) (2.^.5) (3.^.5)] = [1. 1.414 1.732] + # id 1: [(4.^.5) (5.^.5) (6.^.5)] = [2. 2.236 2.449] + # + # {time: 2}: field2 + # --> id 0: [(7.^.5) (3.^.5) (5.^.5)] = [2.645 1.732 2.236] + # id 1: [(8.^.5) (1.^.5) (2.^.5)] = [2.828 1. 1.414] + + # Print the results + print("Sqrt FieldsContainer","\n", sqrt_fc , "\n") + print(sqrt_fc.get_field({"time":1}), "\n") + print(sqrt_fc.get_field({"time":2}), "\n") + +Norm +---- + +Use the |norm| operator to compute the +`Lp norm `_ +of the elementary data for each entity of a |Field|. + +The default *Lp* norm is *Lp=L2*. + +.. tab-set:: + + .. tab-item:: *norm* + + .. jupyter-execute:: + + # Compute the L2 norm of a field + norm_field = maths.norm(field=field1, scalar_int=2).eval() + # id 0: [(1.^2.) + (2.^2.) + (3.^2.)] ^1/2 = 3.742 + # id 1: [(4.^2.) + (5.^2.) + (6.^2.)] ^1/2 = 8.775 + + # Print the results + print("Norm field","\n", norm_field , "\n") + + .. tab-item:: *norm_fc* + + .. jupyter-execute:: + + # Define the L2 norm of a field collection + norm_fc = maths.norm_fc(fields_container=fc1).eval() + # {time: 1}: field1 + # --> id 0: [(1.^2.) + (2.^2.) + (3.^2.)] ^1/2 = 3.742 + # id 1: [(4.^2.) + (5.^2.) + (6.^2.)] ^1/2 = 8.775 + # + # {time: 2}: field2 + # --> id 0: [(7.^2.) + (3.^2.) + (5.^2.)] ^1/2 = 9.110 + # id 1: [(8.^2.) + (1.^2.) + (2.^2.)] ^1/2 = 8.307 + + # Print the results + print("Norm FieldsContainer","\n", norm_fc , "\n") + print(norm_fc.get_field({"time":1}), "\n") + print(norm_fc.get_field({"time":2}), "\n") diff --git a/doc/source/user_guide/tutorials/mathematics/index.rst b/doc/source/user_guide/tutorials/mathematics/index.rst new file mode 100644 index 0000000000..8d08b911a2 --- /dev/null +++ b/doc/source/user_guide/tutorials/mathematics/index.rst @@ -0,0 +1,33 @@ +.. _ref_tutorials_mathematics: + +=========== +Mathematics +=========== + +DPF provides operators for implementing mathematical operations, ranging +from addition and multiplication to FFT and QR solving. + +This section explains how to you can do mathematical operations using +PyDPF-Core API and data structures. + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Basic maths + :link: ref_basic_math + :link-type: ref + :text-align: center + + This tutorial explains how to do some basic + mathematical operations with PyDPF-Core. + +.. toctree:: + :maxdepth: 2 + :hidden: + + basic_maths.rst + + + diff --git a/doc/source/user_guide/tutorials/mesh/create_a_mesh_from_scratch.rst b/doc/source/user_guide/tutorials/mesh/create_a_mesh_from_scratch.rst new file mode 100644 index 0000000000..902bd72be8 --- /dev/null +++ b/doc/source/user_guide/tutorials/mesh/create_a_mesh_from_scratch.rst @@ -0,0 +1,157 @@ +.. _ref_tutorials_create_a_mesh_from_scratch: + +========================== +Create a mesh from scratch +========================== + +.. include:: ../../../links_and_refs.rst + +This tutorial demonstrates how to build a |MeshedRegion| from scratch. + +The mesh object in DPF is a |MeshedRegion|. You can create your own |MeshedRegion| object and use it +with DPF operators. The ability to use scripting to create any DPF entity means +that you are not dependent on result files and can connect the DPF environment +with any Python tool. + +In this tutorial, we create a parallel piped mesh made of linear hexa elements. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Import the necessary modules +---------------------------- + +Import the ``ansys.dpf.core`` module, including the operators module and the numpy library. + +.. jupyter-execute:: + + # Import the numpy library + import numpy as np + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the operators module + from ansys.dpf.core import operators as ops + +Define the mesh dimensions +-------------------------- + +.. jupyter-execute:: + + # Define the mesh dimensions + length = 0.1 + width = 0.05 + depth = 0.1 + num_nodes_in_length = 10 + num_nodes_in_width = 5 + num_nodes_in_depth = 10 + # Create a MeshedRegion object + my_meshed_region = dpf.MeshedRegion() + +Define the connectivity function +-------------------------------- + +To create a mesh you must define the nodes connectivity. This means to define +the nodes ids connected to each element. + +Here, we create a function that will find this connectivity. + +.. jupyter-execute:: + + def search_sequence_numpy(arr, node): + """Find the node location in an array of nodes and return its index.""" + indexes = np.isclose(arr, node) + match = np.all(indexes, axis=1).nonzero() + return int(match[0][0]) + +Add nodes +--------- + +Add |Nodes| to the |MeshedRegion| object. + +.. jupyter-execute:: + + node_id = 1 + for i, x in enumerate( + [float(i) * length / float(num_nodes_in_length) for i in range(0, num_nodes_in_length)] + ): + for j, y in enumerate( + [float(i) * width / float(num_nodes_in_width) for i in range(0, num_nodes_in_width)] + ): + for k, z in enumerate( + [float(i) * depth / float(num_nodes_in_depth) for i in range(0, num_nodes_in_depth)] + ): + my_meshed_region.nodes.add_node(node_id, [x, y, z]) + node_id += 1 + +Get the nodes coordinates field. + +.. jupyter-execute:: + + my_nodes_coordinates = my_meshed_region.nodes.coordinates_field + +Set the mesh properties +----------------------- + +Set the mesh unit. + +.. jupyter-execute:: + + my_meshed_region.unit = "mm" + +Set the nodes coordinates. + +.. jupyter-execute:: + + # Get the nodes coordinates data + my_nodes_coordinates_data = my_nodes_coordinates.data + # As we use the connectivity function we need to get the data as a list + my_nodes_coordinates_data_list = my_nodes_coordinates.data_as_list + # Set the nodes scoping + my_coordinates_scoping = my_nodes_coordinates.scoping + +Add elements +------------ +Add |Elements| to the |MeshedRegion| object. + +.. jupyter-execute:: + + # Add solid elements (linear hexa with eight nodes): + element_id = 1 + # Precompute node spacings + dx = length / float(num_nodes_in_length) + dy = width / float(num_nodes_in_width) + dz = depth / float(num_nodes_in_depth) + # Generate node coordinates + x_coords = [i * dx for i in range(num_nodes_in_length - 1)] + y_coords = [j * dy for j in range(num_nodes_in_width - 1)] + z_coords = [k * dz for k in range(num_nodes_in_depth - 1)] + # Iterate through the grid + for x in x_coords: + for y in y_coords: + for z in z_coords: + coord1 = np.array([x, y, z]) + connectivity = [] + # Generate connectivity for the current element + for xx in [x, x + dx]: + for yy in [y, y + dy]: + for zz in [z, z + dz]: + scoping_index = search_sequence_numpy(my_nodes_coordinates_data, + [xx, yy, zz]) + connectivity.append(scoping_index) + # Rearrange connectivity to maintain element orientation + connectivity[2], connectivity[3] = connectivity[3], connectivity[2] + connectivity[6], connectivity[7] = connectivity[7], connectivity[6] + # Add the solid element + my_meshed_region.elements.add_solid_element(element_id, connectivity) + element_id += 1 + +Plot the mesh +------------- + +You can check the mesh we just created with a plot. For more information on how to plot a mesh see +the :ref:`ref_tutorials_plotting_meshes` tutorial. + +.. jupyter-execute:: + + # Plot the mesh + my_meshed_region.plot() \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/mesh/explore_mesh.rst b/doc/source/user_guide/tutorials/mesh/explore_mesh.rst new file mode 100644 index 0000000000..043fc25ec2 --- /dev/null +++ b/doc/source/user_guide/tutorials/mesh/explore_mesh.rst @@ -0,0 +1,283 @@ +.. _ref_tutorials_explore_mesh: + +============== +Explore a mesh +============== + +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LSDYNA` :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst +.. |PropertyField| replace:: :class:`PropertyField ` +.. |element_types| replace:: :class:`list of available element types in a DPF mesh` +.. |StringField| replace:: :class:`StringField ` + +This tutorial explains how to access a mesh data and metadata so it can be manipulated. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Define the mesh +--------------- + +The mesh object in DPF is a |MeshedRegion|. You can obtain a |MeshedRegion| by creating your +own from scratch or by getting it from a result file. For more information check the +:ref:`ref_tutorials_create_a_mesh_from_scratch` and :ref:`ref_tutorials_get_mesh_from_result_file` tutorials. + +For this tutorial, we get a |MeshedRegion| from a result file. You can use one available in the |Examples| module. +For more information see the :ref:`ref_tutorials_get_mesh_from_result_file` tutorial. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_1 = examples.find_static_rst() + # Create the model + model_1 = dpf.Model(data_sources=result_file_path_1) + # Get the mesh + meshed_region_1 = model_1.metadata.meshed_region + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_2 = examples.download_d3plot_beam() + # Create the DataSources object + ds_2 = dpf.DataSources() + ds_2.set_result_file_path(filepath=result_file_path_2[0], key="d3plot") + ds_2.add_file_path(filepath=result_file_path_2[3], key="actunits") + # Create the model + model_2 = dpf.Model(data_sources=ds_2) + # Get the mesh + meshed_region_2 = model_2.metadata.meshed_region + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_3 = examples.download_fluent_axial_comp()["flprj"] + # Create the model + model_3 = dpf.Model(data_sources=result_file_path_3) + # Get the mesh + meshed_region_3 = model_3.metadata.meshed_region + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_4 = examples.download_cfx_mixing_elbow() + # Create the model + model_4 = dpf.Model(data_sources=result_file_path_4) + # Get the mesh + meshed_region_4 = model_4.metadata.meshed_region + +Explore the mesh data +--------------------- + +You can access the mesh data by manipulating the |MeshedRegion| object methods. +The mesh data includes : + +- Unit +- Nodes, elements and faces +- Named selections + +The :method:`MeshedRegion.nodes `, :method:`MeshedRegion.elements `, :method:`MeshedRegion.faces ` and :method:`MeshedRegion.named_selections ` properties give corresponding DPF objects: +|Nodes|, |Elements|, |Faces| and |Scoping|. + +For more information of other types of data you can get from a mesh, see the API reference of the |MeshedRegion| class. + +In this tutorial, we explore the data about the mesh nodes. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Get the mesh nodes + nodes_1 = meshed_region_1.nodes + + # Print the object type + print("Object type: ",type(nodes_1),'\n') + + # Print the nodes + print("Nodes: ", nodes_1) + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Get the mesh nodes + nodes_2 = meshed_region_2.nodes + + # Print the object type + print("Object type: ",type(nodes_2),'\n') + + # Print the nodes + print("Nodes: ", nodes_2) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Get the mesh nodes + nodes_3 = meshed_region_3.nodes + + # Print the object type + print("Object type: ",type(nodes_3),'\n') + + # Print the nodes + print("Nodes: ", nodes_3) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Get the mesh nodes + nodes_4 = meshed_region_4.nodes + + # Print the object type + print("Object type: ",type(nodes_4),'\n') + + # Print the nodes + print("Nodes: ", nodes_4) + +Explore the mesh metadata +------------------------- + +You can access the mesh metadata by manipulating the |MeshedRegion| object properties. + +The mesh metadata information describes the mesh composition. + +You can access which metadata information is available for a given result file. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Get the available properties + available_props_1 = meshed_region_1.available_property_fields + + # Print the available properties + print("Available properties: ", available_props_1) + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Get the available properties + available_props_2 = meshed_region_2.available_property_fields + + # Print the available properties + print("Available properties: ", available_props_2) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Get the available properties + available_props_3 = meshed_region_3.available_property_fields + + # Print the available properties + print("Available properties: ", available_props_3) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Get the available properties + available_props_4 = meshed_region_4.available_property_fields + + # Print the available properties + print("Available properties: ", available_props_4) + +You can also chose which property you want to extract. + +When extracting the properties you get a |PropertyField| with that information. Their data is mapped to +the entity they are defined at. + +Here, we extract the element types for the mesh elements. + +The element type is given as a number. See the |element_types| to find the +corresponding element name. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Get the element types on the mesh + el_types_1 = meshed_region_1.elements.element_types_field + + # Print the element types by element + print(el_types_1) + + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Get the element types on the mesh + el_types_2 = meshed_region_2.property_field(property_name="eltype") + + # Print the element types by element + print(el_types_2) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Get the element types on the mesh + el_types_3 = meshed_region_3.property_field(property_name="eltype") + + # Print the element types by element + print(el_types_3) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Get the element types on the mesh + el_types_4 = meshed_region_4.property_field(property_name="eltype") + + # Print the element types by element + print(el_types_4) + +For more information about how to explore a mesh metadata before extracting it from a result file, see the +:ref:`ref_tutorials_explore_mesh_metadata` tutorial. \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/mesh/explore_mesh_metadata.rst b/doc/source/user_guide/tutorials/mesh/explore_mesh_metadata.rst new file mode 100644 index 0000000000..bc211df4f5 --- /dev/null +++ b/doc/source/user_guide/tutorials/mesh/explore_mesh_metadata.rst @@ -0,0 +1,187 @@ +.. _ref_tutorials_explore_mesh_metadata: + +======================= +Explore a mesh metadata +======================= + +:bdg-lsdyna:`LSDYNA` :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst +.. |PropertyField| replace:: :class:`PropertyField ` +.. |StringField| replace:: :class:`StringField ` + +This tutorial explains how to read a mesh metadata (data about the elements, nodes, faces, region, zone ...) +before extracting the mesh from a result file. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Get the result file +------------------- + +First, import a result file. For this tutorial, you can use one available in the |Examples| module. +For more information about how to import your own result file in DPF, see the :ref:`ref_tutorials_import_data` +tutorial section. + +.. tab-set:: + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + + # Define the result file path + result_file_path_2 = examples.download_d3plot_beam() + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + + # Define the result file path + result_file_path_3 = examples.download_fluent_axial_comp()["flprj"] + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + + # Define the result file path + result_file_path_4 = examples.download_cfx_mixing_elbow() + +Create the |Model| +------------------ + +Create a |Model| object with the result file. The |Model| is a helper designed to give shortcuts to +access the analysis results metadata and to instanciate results providers by opening a |DataSources| or a Streams. + +.. tab-set:: + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Create the DataSources object + ds_2 = dpf.DataSources() + ds_2.set_result_file_path(filepath=result_file_path_2[0], key="d3plot") + ds_2.add_file_path(filepath=result_file_path_2[3], key="actunits") + # Create the Model + model_2 = dpf.Model(data_sources=ds_2) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Create the Model + model_3 = dpf.Model(data_sources=result_file_path_3) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Create the Model + model_4 = dpf.Model(data_sources=result_file_path_4) + +Explore the mesh metadata +------------------------- + +You can access the mesh metadata with the |MeshInfo| object. It reads the metadata information before extracting +the |MeshedRegion| from the result file. + +The mesh metadata information is stored in a |PropertyField| or in a |StringField|. They contain information +that describes the mesh composition and their data is mapped to the entity they are defined at. +The mesh metadata information information can be: + +- Properties +- Parts +- Faces +- Bodies +- Zones +- Number of nodes and elements +- Elements types + +You can access which metadata information is available for a given result file. + +.. tab-set:: + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Get the mesh metadata information + mesh_info_2 = model_2.metadata.mesh_info + + # Print the mesh metadata information + print(mesh_info_2) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Get the mesh metadata information + mesh_info_3 = model_3.metadata.mesh_info + + # Print the mesh metadata information + print(mesh_info_3) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Get the mesh metadata information + mesh_info_4 = model_4.metadata.mesh_info + + # Print the mesh metadata information + print(mesh_info_4) + +You can also extract each of those mesh metadata information by manipulating the |MeshInfo| object properties. + +For example, we can check the part names (for the LSDYNA result file) or the cell zone names +(for the Fluent or CFX result files): + +.. tab-set:: + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Get the part names + cell_zones_2 = mesh_info_2.get_property("part_names") + + # Print the part names + print(cell_zones_2) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Get the cell zone names + cell_zones_3 = mesh_info_3.get_property("cell_zone_names") + + # Print the cell zone names + print(cell_zones_3) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Get the cell zone names + cell_zones_4 = mesh_info_4.get_property("cell_zone_names") + + # Print the cell zone names + print(cell_zones_4) + +For more information on reading a mesh from a LSDYNA, Fluent or CFX file check the :ref:`examples_lsdyna`, +:ref:`fluids_examples` and :ref:`examples_cfx` examples sections. \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/mesh/extract_mesh_in_split_parts.rst b/doc/source/user_guide/tutorials/mesh/extract_mesh_in_split_parts.rst new file mode 100644 index 0000000000..f46d3e9bc4 --- /dev/null +++ b/doc/source/user_guide/tutorials/mesh/extract_mesh_in_split_parts.rst @@ -0,0 +1,118 @@ +.. _ref_tutorials_extract_mesh_in_split_parts: + +============================= +Extract a mesh in split parts +============================= + +:bdg-fluent:`Fluent` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst +.. |MeshesContainer| replace:: :class:`MeshesContainer ` +.. |meshes_provider| replace:: :class:`meshes_provider ` + +This tutorial shows how to extract meshes split on a given space or time from a result file. + +To accomplish this goal, you must use the |meshes_provider| operator. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Define the |DataSources| +------------------------ + +We must create a |DataSources| object so the |meshes_provider| operator can access the mesh. This object +manages paths to their files. + +For this tutorial, you can use a result file available in the |Examples| module. +For more information about how to import your own result file in DPF, see the :ref:`ref_tutorials_import_data` +tutorial section. + +.. tab-set:: + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_3 = examples.download_fluent_axial_comp()["flprj"] + # Create the DataSources object + ds_3 = dpf.DataSources(result_path=result_file_path_3) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_4 = examples.download_cfx_mixing_elbow() + # Create the DataSources object + ds_4 = dpf.DataSources(result_path=result_file_path_4) + +Extract the mesh in split parts +------------------------------- + +Instanciate and evaluate the |meshes_provider| operator. +The split meshes are given in a |MeshesContainer| and can be spatially or temporally varying. + +.. tab-set:: + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Instanciate the meshes_provider operator + meshes_31 = ops.mesh.meshes_provider(data_sources=ds_3).eval() + + # Print the meshes + print(meshes_31) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Instanciate the meshes_provider operator + meshes_41 = ops.mesh.meshes_provider(data_sources=ds_4).eval() + + # Print the meshes + print(meshes_41) + +Scope the mesh regions to be extracted in split regions +----------------------------------------------------- + +A region corresponds to a zone for Fluid and CFX results. You can specify the mesh regions you want to get by giving +the zones ids to the ``region_scoping`` argument. + +.. tab-set:: + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Instanciate the meshes_provider operator and specify a region + meshes_32 = ops.mesh.meshes_provider(data_sources=ds_3, region_scoping=[3,12]).eval() + + # Print the meshes + print(meshes_32) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Instanciate the meshes_provider operator specifying a region + meshes_42 = ops.mesh.meshes_provider(data_sources=ds_4, region_scoping=[5,8]).eval() + + # Print the meshes + print(meshes_42) diff --git a/doc/source/user_guide/tutorials/mesh/get_mesh_from_result_file.rst b/doc/source/user_guide/tutorials/mesh/get_mesh_from_result_file.rst new file mode 100644 index 0000000000..085e8782ae --- /dev/null +++ b/doc/source/user_guide/tutorials/mesh/get_mesh_from_result_file.rst @@ -0,0 +1,269 @@ +.. _ref_tutorials_get_mesh_from_result_file: + +============================= +Get a mesh from a result file +============================= + +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LSDYNA` :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst + +.. |mesh_provider| replace:: :class:`mesh_provider ` + +This tutorial explains how to extract a mesh from a result file. + +The mesh object in DPF is a |MeshedRegion|. You can obtain a |MeshedRegion| by creating your +own from scratch or by getting it from a result file. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Import the result file +---------------------- + +First, import a result file. For this tutorial, you can use one available in the |Examples| module. +For more information about how to import your own result file in DPF, see the :ref:`ref_tutorials_import_data` +tutorials section. + +Here, we create a |DataSources| object so the data can be directly accessed by different +PyDPF-Core APIs. This object manages paths to their files. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_1 = examples.find_static_rst() + # Create the DataSources object + ds_1 = dpf.DataSources(result_path=result_file_path_1) + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_2 = examples.download_d3plot_beam() + # Create the DataSources object + ds_2 = dpf.DataSources() + ds_2.set_result_file_path(filepath=result_file_path_2[0], key="d3plot") + ds_2.add_file_path(filepath=result_file_path_2[3], key="actunits") + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_3 = examples.download_fluent_axial_comp()["flprj"] + # Create the DataSources object + ds_3 = dpf.DataSources(result_path=result_file_path_3) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + # Define the result file path + result_file_path_4 = examples.download_cfx_mixing_elbow() + # Create the DataSources object + ds_4 = dpf.DataSources(result_path=result_file_path_4) + + +Get the mesh from the result file +--------------------------------- + +You can get the mesh from a result file by two methods: + +- :ref:`Using the DPF Model `; +- :ref:`Using the mesh_provider operator `. + +.. note:: + + A |Model| extracts a large amount of information by default (results, mesh and analysis data). + If using this helper takes a long time for processing the code, mind using a |DataSources| object + and instantiating operators directly with it. + +.. _get_mesh_model: + +Using the DPF |Model| +^^^^^^^^^^^^^^^^^^^^^ + +The |Model| is a helper designed to give shortcuts to access the analysis results +metadata and to instanciate results providers by opening a |DataSources| or a Streams. + +Get the |MeshedRegion| by instantiating a |Model| object and accessing its metadata. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Create the Model + model_1 = dpf.Model(data_sources=ds_1) + # Get the mesh + meshed_region_11 = model_1.metadata.meshed_region + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Create the Model + model_2 = dpf.Model(data_sources=ds_2) + # Get the mesh + meshed_region_21 = model_2.metadata.meshed_region + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Create the Model + model_3 = dpf.Model(data_sources=ds_3) + # Get the mesh + meshed_region_31 = model_3.metadata.meshed_region + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Create the Model + model_4 = dpf.Model(data_sources=ds_4) + # Get the mesh + meshed_region_41 = model_4.metadata.meshed_region + +Printing the |MeshedRegion| displays the mesh dimensions: + +- Number of nodes and elements +- Unit +- Elements type + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Print the MeshedRegion + print(meshed_region_11) + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Print the MeshedRegion + print(meshed_region_21) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Print the MeshedRegion + print(meshed_region_31) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Print the MeshedRegion + print(meshed_region_41) + +.. _get_mesh_mesh_provider: + +Using the |mesh_provider| operator +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Get the |MeshedRegion| by instantiating the |mesh_provider| operator with the +|DataSources| object as an argument. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Get the mesh with the mesh_provider operator + meshed_region_12 = ops.mesh.mesh_provider(data_sources=ds_1).eval() + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Get the mesh with the mesh_provider operator + meshed_region_22 = ops.mesh.mesh_provider(data_sources=ds_2).eval() + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Get the mesh with the mesh_provider operator + meshed_region_32 = ops.mesh.mesh_provider(data_sources=ds_3).eval() + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Get the mesh with the mesh_provider operator + meshed_region_42 = ops.mesh.mesh_provider(data_sources=ds_4).eval() + +Printing the |MeshedRegion| displays the mesh dimensions: + +- Number of nodes and elements +- Unit +- Elements type + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Print the MeshedRegion + print(meshed_region_12) + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Print the MeshedRegion + print(meshed_region_22) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Print the MeshedRegion + print(meshed_region_32) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Print the MeshedRegion + print(meshed_region_42) \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/mesh/index.rst b/doc/source/user_guide/tutorials/mesh/index.rst new file mode 100644 index 0000000000..cfe2cf85af --- /dev/null +++ b/doc/source/user_guide/tutorials/mesh/index.rst @@ -0,0 +1,86 @@ +.. _ref_tutorials_mesh: + +==== +Mesh +==== + +The mesh in DPF is represented by the :class:`MeshedRegion ` entity. + +These tutorials explains how to explore different attributes of a given mesh with PyDPF-Core. + + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Create a mesh from scratch + :link: ref_tutorials_create_a_mesh_from_scratch + :link-type: ref + :text-align: center + + This tutorial demonstrates how to build a mesh from the scratch. + + .. grid-item-card:: Get a mesh from a result file + :link: ref_tutorials_get_mesh_from_result_file + :link-type: ref + :text-align: center + + This tutorial explains how to extract a mesh from a result file. + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LSDYNA` :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + + .. grid-item-card:: Explore a mesh metadata + :link: ref_tutorials_explore_mesh_metadata + :link-type: ref + :text-align: center + + This tutorial explains how to explore a mesh metadata before + extracting the mesh from a result file. + + +++ + :bdg-lsdyna:`LSDYNA` :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + + + .. grid-item-card:: Explore a mesh + :link: ref_tutorials_explore_mesh + :link-type: ref + :text-align: center + + This tutorial explains how to access a mesh data and metadata + so it can be manipulated. + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LSDYNA` :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + + .. grid-item-card:: Extract a mesh in split parts + :link: ref_tutorials_extract_mesh_in_split_parts + :link-type: ref + :text-align: center + + This tutorial shows how to extract meshes split on a given space or time from a result file. + + +++ + :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + + .. grid-item-card:: Split a mesh + :link: ref_tutorials_split_mesh + :link-type: ref + :text-align: center + + This tutorial shows how to split a mesh on a given property. + + +++ + :bdg-mapdl:`MAPDL` :bdg-lsdyna:`LSDYNA` :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + +.. toctree:: + :maxdepth: 2 + :hidden: + + create_a_mesh_from_scratch.rst + get_mesh_from_result_file.rst + explore_mesh_metadata.rst + explore_mesh.rst + extract_mesh_in_split_parts.rst + split_mesh.rst diff --git a/doc/source/user_guide/tutorials/mesh/split_mesh.rst b/doc/source/user_guide/tutorials/mesh/split_mesh.rst new file mode 100644 index 0000000000..7172672b48 --- /dev/null +++ b/doc/source/user_guide/tutorials/mesh/split_mesh.rst @@ -0,0 +1,240 @@ +.. _ref_tutorials_split_mesh: + +============ +Split a mesh +============ + +:bdg-mapdl:`MAPDL` :bdg-lsdyna:`LSDYNA` :bdg-fluent:`Fluent` :bdg-cfx:`CFX` + +.. include:: ../../../links_and_refs.rst + +.. |MeshesContainer| replace:: :class:`MeshesContainer ` +.. |split_mesh| replace:: :class:`split_mesh ` +.. |split_on_property_type| replace:: :class:`split_on_property_type ` +.. |from_scopings| replace:: :class:`from_scopings ` +.. |ScopingsContainer| replace:: :class:`ScopingsContainer ` +.. |PropertyField| replace:: :class:`PropertyField ` + +This tutorial shows how to split a mesh on a given property. + +There are two approaches to accomplish this goal: + +- :ref:`Use the split_mesh operator to split a already existing MeshedRegion`; +- :ref:`Split the mesh scoping and create the split MeshedRegion objects `. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Define the mesh +--------------- + +The mesh object in DPF is a |MeshedRegion|. You can obtain a |MeshedRegion| by creating your own from scratch or by getting it from a result file. For more +information check the :ref:`ref_tutorials_create_a_mesh_from_scratch` and :ref:`ref_tutorials_get_mesh_from_result_file` +tutorials. + +For this tutorial, we get a |MeshedRegion| from a result file. You can use one available in the |Examples| module. +For more information see the :ref:`ref_tutorials_get_mesh_from_result_file` tutorial. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_1 = examples.find_multishells_rst() + # Create the model + model_1 = dpf.Model(data_sources=result_file_path_1) + # Get the mesh + meshed_region_1 = model_1.metadata.meshed_region + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_2 = examples.download_d3plot_beam() + # Create the DataSources object + ds_2 = dpf.DataSources() + ds_2.set_result_file_path(filepath=result_file_path_2[0], key="d3plot") + ds_2.add_file_path(filepath=result_file_path_2[3], key="actunits") + # Create the model + model_2 = dpf.Model(data_sources=ds_2) + # Get the mesh + meshed_region_2 = model_2.metadata.meshed_region + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_3 = examples.download_fluent_axial_comp()["flprj"] + # Create the model + model_3 = dpf.Model(data_sources=result_file_path_3) + # Get the mesh + meshed_region_3 = model_3.metadata.meshed_region + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_4 = examples.download_cfx_mixing_elbow() + # Create the model + model_4 = dpf.Model(data_sources=result_file_path_4) + # Get the mesh + meshed_region_4 = model_4.metadata.meshed_region + +.. _ref_first_approach_split_mesh: + +First approach +-------------- + +This approach consists in splitting an already existing |MeshedRegion| based on a given property. To accomplish +that end, you must use the |split_mesh| operator. Currently you can split a mesh by material or eltype. + +The split mesh parts are stored in the DPF collection called |MeshesContainer|, where they are ordered by *labels*. +When you use the |split_mesh| operator, each split mesh part has two different *labels*: + +- A "body" *label* +- A *label* with the property used to split the mesh + +Here, we split the |MeshedRegion| by material. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Split the mesh by material + meshes_11 = ops.mesh.split_mesh(mesh=meshed_region_1, property="mat").eval() + + # Print the meshes + print(meshes_11) + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Split the mesh by material + meshes_21 = ops.mesh.split_mesh(mesh=meshed_region_2, property="mat").eval() + + # Print the meshes + print(meshes_21) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Split the mesh by material + meshes_31 = ops.mesh.split_mesh(mesh=meshed_region_3, property="mat").eval() + + # Print the meshes + print(meshes_31) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Split the mesh by material + meshes_41 = ops.mesh.split_mesh(mesh=meshed_region_4, property="mat").eval() + # Print the meshes + print(meshes_41) + +.. _ref_second_approach_split_mesh: + +Second approach +--------------- + +This approach consists in splitting the |Scoping| of a given |MeshedRegion| based on a given property and then creating +a new |MeshedRegion| for each split |Scoping|. + +To accomplish this goal you must follow these steps: + +#. Use the |split_on_property_type| operator to split the mesh |Scoping|. + This operator splits a |Scoping| on a given property (elshape and/or material, since 2025R1 it supports any + scalar property field name contained in the mesh property fields). The split |Scoping| is stored in the DPF + collection called |ScopingsContainer|, where they are ordered by *labels*. In this case, you get *labels* with + the property used to split the |Scoping|. + +#. Create the split |MeshedRegion| objects using the |from_scopings| operator for the |Scoping| of interest. + The split parts are stored in the DPF collection called |MeshesContainer| where they are also ordered by *labels*. + These *labels* are corresponding to the "mat" labels gotten with the |split_on_property_type| operator. + +Here, we split the mesh scoping by material and create a |MeshedRegion| for all the split |Scoping| in the +|ScopingsContainer|. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Define the scoping split by material + split_scoping_1 = ops.scoping.split_on_property_type(mesh=meshed_region_1, label1="mat").eval() + # Get the split meshes + meshes_12 = ops.mesh.from_scopings(scopings_container=split_scoping_1, mesh=meshed_region_1).eval() + # Print the meshes + print(meshes_12) + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Define the scoping split by material + split_scoping_2 = ops.scoping.split_on_property_type(mesh=meshed_region_2, label1="mat").eval() + # Get the split meshes + meshes_22 = ops.mesh.from_scopings(scopings_container=split_scoping_2, mesh=meshed_region_2).eval() + # Print the meshes + print(meshes_22) + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Define the scoping split by material + split_scoping_3 = ops.scoping.split_on_property_type(mesh=meshed_region_3, label1="mat").eval() + # Get the split meshes + meshes_32 = ops.mesh.from_scopings(scopings_container=split_scoping_3, mesh=meshed_region_3).eval() + # Print the meshes + print(meshes_32) + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Define the scoping split by material + split_scoping_4 = ops.scoping.split_on_property_type(mesh=meshed_region_4, label1="mat").eval() + # Get the split meshes + meshes_42 = ops.mesh.from_scopings(scopings_container=split_scoping_4, mesh=meshed_region_4).eval() + # Print the meshes + print(meshes_42) diff --git a/doc/source/user_guide/tutorials/operators_and_workflows/index.rst b/doc/source/user_guide/tutorials/operators_and_workflows/index.rst new file mode 100644 index 0000000000..900657acab --- /dev/null +++ b/doc/source/user_guide/tutorials/operators_and_workflows/index.rst @@ -0,0 +1,41 @@ +.. _ref_tutorials_operators_and_workflows: + +========================================= +Process data with operators and workflows +========================================= + +An operator is the main object used to create, transform, and stream data in DPF. + +They can perform different modifications of the data: direct mathematical operations, +averaging on the mesh, changes in the model locations.... They can also be chained together +to create workflows for more complex operations and customizable results. + +The tutorials in this section present how to create and use these operators and workflows in PyDPF-Core. + +For more information on how to program with PyDPF-Core check the +:ref:`ref_tutorials_language_and_usage` tutorial. + + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Use operators + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + + .. grid-item-card:: Create workflows + :link: ref_tutorials + :link-type: ref + :text-align: center + + This tutorial + +.. toctree:: + :maxdepth: 2 + :hidden: + diff --git a/doc/source/user_guide/tutorials/plot/index.rst b/doc/source/user_guide/tutorials/plot/index.rst new file mode 100644 index 0000000000..0b14860821 --- /dev/null +++ b/doc/source/user_guide/tutorials/plot/index.rst @@ -0,0 +1,49 @@ +.. _ref_tutorials_plot: + +==== +Plot +==== + +These tutorials demonstrate different ways one can visualize the data in plots using PyDPF-Core. + +.. grid:: 1 1 3 3 + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: Plot a mesh + :link: ref_tutorials_plot_mesh + :link-type: ref + :text-align: center + + This tutorial shows several ways to plot meshes. + + .. grid-item-card:: Add deformation + :link: ref_tutorials_plot_deformed_mesh + :link-type: ref + :text-align: center + + This tutorial shows how to add deformation to plots. + + .. grid-item-card:: Plot contours + :link: ref_tutorials_plot_contour + :link-type: ref + :text-align: center + + This tutorial shows how to plot contours. + + .. grid-item-card:: Plot a graph + :link: ref_tutorials_plot_graph + :link-type: ref + :text-align: center + + This tutorial shows how to plot graphs using matplotlib. + +.. toctree:: + :maxdepth: 2 + :hidden: + + plot_mesh.rst + plot_deformed_mesh.rst + plot_contour.rst + plot_a_graph.rst \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/plot/plot_a_graph.rst b/doc/source/user_guide/tutorials/plot/plot_a_graph.rst new file mode 100644 index 0000000000..82f4db32b2 --- /dev/null +++ b/doc/source/user_guide/tutorials/plot/plot_a_graph.rst @@ -0,0 +1,260 @@ +.. _ref_tutorials_plot_graph: + +============================= +Plot a graph using matplotlib +============================= + +.. include:: ../../../links_and_refs.rst + +.. |Line| replace:: :class:`Line ` +.. |on_coordinates| replace:: :class:`on_coordinates ` +.. |Line.path| replace:: :py:attr:`Line.path ` +.. |min_max_fc| replace:: :class:`min_max_fc ` + +This tutorial explains how to plot a graph with data from DPF using `matplotlib `_. + +The current |DpfPlotter| module does not allow to plot graphs. Instead, you need to import the +`matplotlib `_ library to plot graphs with PyDPF-Core. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +There is a large range of graphs you can plot. Here, we showcase: + +- :ref:`A graph of a result along a path ` +- :ref:`A graph of transient data ` + +.. _ref_graph_result_space: + +Result along a path +------------------- + +In this tutorial, we plot the norm of the displacement along a custom path represented by a |Line|. +For more information about how to create a custom geometric object, +see the :ref:`ref_tutorials_plot_on_custom_geometry` tutorial. + +We first need to get the data of interest, then create a custom |Line| geometry for the path. +We then map the result on the path, and finally create a 2D graph. + +Extract the data +^^^^^^^^^^^^^^^^ + +First, extract the data from a result file or create some from scratch. +For this tutorial we use a case available in the |Examples| module. +For more information on how to import your own result file in DPF, +or on how to create data from user input in PyDPF-Core,see +the :ref:`ref_tutorials_import_data` tutorials section. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + import ansys.dpf.core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + # Import the geometry module + from ansys.dpf.core import geometry as geo + + # Import the ``matplotlib.pyplot`` module + import matplotlib.pyplot as plt + + # Download and get the path to an example result file + result_file_path_1 = examples.find_static_rst() + + # Create a model from the result file + model_1 = dpf.Model(data_sources=result_file_path_1) + +We then extract the result of interest for the graph. +In this tutorial, we want the norm of the displacement field at the last step. + +.. jupyter-execute:: + + # Get the nodal displacement field at the last simulation step (default) + disp_results_1 = model_1.results.displacement.eval() + + # Get the norm of the displacement field + norm_disp = ops.math.norm_fc(fields_container=disp_results_1).eval() + +Define the path +^^^^^^^^^^^^^^^ + +Create a path as a |Line| passing through the diagonal of the mesh. + +.. jupyter-execute:: + + # Create a discretized line for the path + line_1 = geo.Line(coordinates=[[0.0, 0.06, 0.0], [0.03, 0.03, 0.03]], n_points=50) + # Plot the line on the original mesh + line_1.plot(mesh=model_1.metadata.meshed_region) + +Map the data on the path +^^^^^^^^^^^^^^^^^^^^^^^^ + +Map the displacement norm field to the |Line| using the |on_coordinates| mapping operator. + +This operator interpolates field values at given node coordinates, using element shape functions. + +It takes as input a |FieldsContainer| of data, a 3D vector |Field| of coordinates to interpolate at, +and an optional |MeshedRegion| to use for element shape functions if the first |Field| in the data +provided does not have an associated meshed support. + +.. jupyter-execute:: + + # Interpolate the displacement norm field at the nodes of the custom path + disp_norm_on_path_fc: dpf.FieldsContainer = ops.mapping.on_coordinates( + fields_container=norm_disp, + coordinates=line_1.mesh.nodes.coordinates_field, + ).eval() + # Extract the only field in the collection obtained + disp_norm_on_path: dpf.Field = disp_norm_on_path_fc[0] + print(disp_norm_on_path) + +Plot the graph +^^^^^^^^^^^^^^ + +Plot a graph of the norm of the displacement field along the path using the +`matplotlib `_ library. + +To get the parametric coordinates of the nodes along the line and use them as X-axis, +you can use the |Line.path| property. +It gives the 1D array of parametric coordinates of the nodes of the line along the line. + +The values in the displacement norm field are in the same order as the parametric +coordinates because the mapping operator orders output data the same as the input coordinates. + +.. jupyter-execute:: + + # Get the field of parametric coordinates along the path for the X-axis + line_coordinates = line_1.path + + # Define the curve to plot + plt.plot(line_coordinates, disp_norm_on_path.data) + + # Add titles to the axes and the graph + plt.xlabel("Position on path") + plt.ylabel("Displacement norm") + plt.title("Displacement norm along the path") + + # Display the graph + plt.show() + +.. _ref_graph_result_time: + +Transient data +-------------- + +In this tutorial, we plot the minimum and maximum displacement norm over time for a transient analysis. +For more information about using PyDPF-Core with a transient analysis, +see the :ref:`static_transient_examples` examples. + +We first need to create data for the Y-axis, +and then format the time information of the model for the X-axis, +to finally create a 2D graph using both. + +Prepare data +^^^^^^^^^^^^ + +First, extract the data from a transient result file or create some from scratch. +For this tutorial we use a transient case available in the |Examples| module. +For more information on how to import your own result file in DPF, +or on how to create data from user input in PyDPF-Core, see +the :ref:`ref_tutorials_import_data` tutorials section. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + import ansys.dpf.core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Import the ``matplotlib.pyplot`` module + import matplotlib.pyplot as plt + + # Download and get the path to an example transient result file + result_file_path_2 = examples.download_transient_result() + + # Create a model from the result file + model_2 = dpf.Model(data_sources=result_file_path_2) + + # Check the model is transient with its ``TimeFreqSupport`` + print(model_2.metadata.time_freq_support) + +We then extract the result of interest for the graph. +In this tutorial, we want the maximum and minimum displacement norm over the field at each time step. + +First extract the displacement field for every time step. + +.. jupyter-execute:: + + # Get the displacement at all time steps + disp_results_2: dpf.FieldsContainer = model_2.results.displacement.on_all_time_freqs.eval() + +Next, get the minimum and maximum of the norm of the displacement at each time step using the |min_max_fc| operator. + +.. jupyter-execute:: + + # Instantiate the min_max operator and give the output of the norm operator as input + min_max_op = ops.min_max.min_max_fc(fields_container=ops.math.norm_fc(disp_results_2)) + + # Get the field of maximum values at each time-step + max_disp: dpf.Field = min_max_op.outputs.field_max() + print(max_disp) + + # Get the field of minimum values at each time-step + min_disp: dpf.Field = min_max_op.outputs.field_min() + print(min_disp) + +The operator already outputs fields where data points are associated to time-steps. + +Prepare time values +^^^^^^^^^^^^^^^^^^^ + +The time or frequency information associated to DPF objects is stored in |TimeFreqSupport| objects. + +You can use the |TimeFreqSupport| of a |Field| with location ``time_freq`` to retrieve the time or +frequency values associated to the entities mentioned in its scoping. + +Here the fields are on all time-steps, so we can simply get the list of all time values without filtering. + +.. jupyter-execute:: + + # Get the field of time values + time_steps_1: dpf.Field = disp_results_2.time_freq_support.time_frequencies + + # Print the time values + print(time_steps_1) + +The time values associated to time-steps are given in a |Field|. +To use it in the graph you need to extract the data of the |Field| as an array. + +.. jupyter-execute:: + + # Get the time values + time_data = time_steps_1.data + print(time_data) + + +Plot the graph +^^^^^^^^^^^^^^ + +Plot a graph of the minimum and maximum displacement over time using the +`matplotlib `_ library. + +Use the ``unit`` property of the fields to properly label the axes. + +.. jupyter-execute:: + + # Define the plot figure + plt.plot(time_data, max_disp.data, "r", label="Max") + plt.plot(time_data, min_disp.data, "b", label="Min") + + # Add axis labels and legend + plt.xlabel(f"Time ({time_steps_1.unit})") + plt.ylabel(f"Displacement ({max_disp.unit})") + plt.legend() + + # Display the graph + plt.show() diff --git a/doc/source/user_guide/tutorials/plot/plot_contour.rst b/doc/source/user_guide/tutorials/plot/plot_contour.rst new file mode 100644 index 0000000000..3ec2b13d06 --- /dev/null +++ b/doc/source/user_guide/tutorials/plot/plot_contour.rst @@ -0,0 +1,284 @@ +.. _ref_tutorials_plot_contour: + +============= +Plot contours +============= + +.. include:: ../../../links_and_refs.rst + +.. |Field.plot| replace:: :py:meth:`Field.plot() ` +.. |MeshedRegion.plot| replace:: :py:meth:`MeshedRegion.plot() ` +.. |add_mesh| replace:: :py:meth:`add_mesh() ` +.. |add_field| replace:: :py:meth:`add_field() ` +.. |show_figure| replace:: :py:meth:`show_figure() ` +.. |to_nodal_fc| replace:: :py:class:`to_nodal_fc ` +.. |select_component| replace:: :func:`select_component() ` +.. |stress_op| replace:: :py:class:`stress ` +.. |Field.meshed_region| replace:: :py:attr:`Field.meshed_region ` +.. |FieldsContainer.plot| replace:: :py:meth:`FieldsContainer.plot() ` +.. |split_fields| replace:: :py:class:`split_fields ` +.. |split_mesh| replace:: :py:class:`split_mesh ` + +This tutorial shows different commands for plotting data contours on meshes. + +PyDPF-Core has a variety of plotting methods for generating 3D plots with Python. +These methods use VTK and leverage the `PyVista `_ library. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Load data to plot +----------------- + +Load a result file in a model +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +For this tutorial, we use mesh information and data from a case available in the |Examples| module. +For more information on how to import your own result file in DPF, see +the :ref:`ref_tutorials_import_data` tutorials section. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + import ansys.dpf.core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Define the result file path + result_file_path_1 = examples.download_piston_rod() + + # Create a model from the result file + model_1 = dpf.Model(data_sources=result_file_path_1) + +Extract data for the contour +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Extract data for the contour. For more information about extracting results from a result file, +see the :ref:`ref_tutorials_import_data` tutorials section. + +.. note:: + + Only the *'elemental'* or *'nodal'* locations are supported for plotting. + +Here, we choose to plot the XX component of the stress tensor. + +.. jupyter-execute:: + + # Get the stress operator for component XX + stress_XX_op = ops.result.stress_X(data_sources=model_1) + + # The default behavior of the operator is to return data as *'ElementalNodal'* + print(stress_XX_op.eval()) + +We must request the stress in a *'nodal'* location as the default *'ElementalNodal'* location for the stress results +is not supported for plotting. + +There are different ways to change the location. Here, we define the new location using the input of the |stress_op| +operator. Another option would be using an averaging operator on the output of the stress operator, +like the |to_nodal_fc| operator + +.. jupyter-execute:: + + # Define the desired location as an input of the stress operator + stress_XX_op.inputs.requested_location(dpf.locations.nodal) + + # Get the output + stress_XX_fc = stress_XX_op.eval() + +The output if a collection of fields, a |FieldsContainer|. + +Extract a mesh +^^^^^^^^^^^^^^ + +Here we simply get the |MeshedRegion| object of the model, but any other |MeshedRegion| works. + +.. jupyter-execute:: + + # Extract the mesh + meshed_region_1 = model_1.metadata.meshed_region + +Plot a contour of a single field +-------------------------------- + +To plot a single |Field|, you can use: + +- the |Field.plot| method +- the |MeshedRegion.plot| method with the field as argument +- the |DpfPlotter| class and its |add_field| method + +.. hint:: + + Using the |DpfPlotter| class is more performant than using the |Field.plot| method + +.. tab-set:: + + .. tab-item:: Field.plot() + + First, get a |Field| from the stress results |FieldsContainer|. Then, use the |Field.plot| method [1]_. + If the |Field| does not have an associated mesh support (see |Field.meshed_region|), + you must use the ``meshed_region`` argument and provide a mesh. + + .. jupyter-execute:: + + # Get a single field + stress_XX = stress_XX_fc[0] + + # Plot the contour on the mesh + stress_XX.plot(meshed_region=meshed_region_1) + + .. tab-item:: MeshedRegion.plot() + + Use the |MeshedRegion.plot| method [1]_. + You must use the *'field_or_fields_container'* argument and + give the |Field| or the |FieldsContainer| containing the stress results data. + + .. jupyter-execute:: + + # Plot the mesh with the stress field contour + meshed_region_1.plot(field_or_fields_container=stress_XX) + + .. tab-item:: DpfPlotter + + First create an instance of |DpfPlotter| [2]_. Then, add the |Field| to the scene using the |add_field| method. + If the |Field| does not have an associated mesh support (see |Field.meshed_region|), + you must use the *'meshed_region'* argument and provide a mesh. + + To render and show the figure based on the current state of the plotter object, use the |show_figure| method. + + .. jupyter-execute:: + + # Create a DpfPlotter instance + plotter_1 = dpf.plotter.DpfPlotter() + + # Add the field to the scene, here with an explicitly associated mesh + plotter_1.add_field(field=stress_XX, meshed_region=meshed_region_1) + + # Display the scene + plotter_1.show_figure() + + You can also first use the |add_mesh| method to add the mesh to the scene + and then use |add_field| without the ``meshed_region`` argument. + + +Plot a contour of multiple fields +--------------------------------- + +Prepare a collection of fields +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. warning:: + + The fields should not have conflicting data, meaning you cannot build a contour for two fields + with two different sets of data for the same mesh entities (intersecting scopings). + + This means the following methods are for example not available for a collection made of the same field + varying across time, or a collection of fields for different shell layers of the same elements. + +Here we split the field for XX stress based on material to get a collection of fields with non-conflicting associated mesh entities. + +We use the |split_fields| operator to split the field based on the result of the |split_mesh| operator. +The |split_mesh| operator returns a |MeshesContainer| with meshes labeled according to the criterion for the split. +In our case, the split criterion is the material ID. + +.. jupyter-execute:: + + # Split the field based on material property + fields = ( + ops.mesh.split_fields( + field_or_fields_container=stress_XX_fc, + meshes=ops.mesh.split_mesh(mesh=meshed_region_1, property="mat"), + ) + ).eval() + + # Show the result + print(fields) + +For ``MAPDL`` results the split on material is equivalent to a split on ``bodies``, hence the two equivalent labels. + +Plot the contour +^^^^^^^^^^^^^^^^ + +To plot a contour for multiple |Field| objects, you can use: + +- the |FieldsContainer.plot| method if the fields are in a collection +- the |MeshedRegion.plot| method with the field collection as argument +- the |DpfPlotter| class and several calls to its |add_field| method + +.. hint:: + + Using the |DpfPlotter| class is more performant than using the |Field.plot| method + +.. tab-set:: + + .. tab-item:: FieldsContainer.plot() + + Use the |FieldsContainer.plot| method [1]_. + + .. jupyter-execute:: + + # Plot the contour for all fields in the collection + fields.plot() + + The ``label_space`` argument provides further field filtering capabilities. + + .. jupyter-execute:: + + # Plot the contour for ``mat`` 1 only + fields.plot(label_space={"mat":1}) + + .. tab-item:: MeshedRegion.plot() + + Use the |MeshedRegion.plot| method [1]_. + You must use the *'field_or_fields_container'* argument and + give the |Field| or the |FieldsContainer| containing the stress results data. + + .. jupyter-execute:: + + # Plot the mesh with the stress field contours + meshed_region_1.plot(field_or_fields_container=fields) + + .. tab-item:: DpfPlotter + + First create an instance of |DpfPlotter| [2]_. + Then, add each |Field| to the scene using the |add_field| method. + If the |Field| does not have an associated mesh support (see |Field.meshed_region|), + you must use the *'meshed_region'* argument and provide a mesh. + + To render and show the figure based on the current state of the plotter object, use the |show_figure| method. + + .. jupyter-execute:: + + # Create a DpfPlotter instance + plotter_1 = dpf.plotter.DpfPlotter() + + # Add each field to the scene + plotter_1.add_field(field=fields[0]) + plotter_1.add_field(field=fields[1]) + + # Display the scene + plotter_1.show_figure() + +.. rubric:: Footnotes + +.. [1] The |DpfPlotter| displays the mesh with edges, lighting and axis widget enabled by default. + You can pass additional PyVista arguments to all plotting methods to change the default behavior + (see options for `pyvista.plot() `_), such as: + + .. jupyter-execute:: + + model_1.plot(title="Mesh", + text="this is a mesh", # Adds the given text at the bottom of the plot + off_screen=True, + screenshot="mesh_plot_1.png", # Save a screenshot to file with the given name + window_size=[450,350]) + # Notes: + # - To save a screenshot to file, use "screenshot=figure_name.png" ( as well as "notebook=False" if on a Jupyter notebook). + # - The "off_screen" keyword only works when "notebook=False". If "off_screen=True" the plot is not displayed when running the code. + +.. [2] The |DpfPlotter| is currently based on PyVista. + That means that PyVista must be installed. + The DPF plotter also passes additional parameters to the PyVista plotter + (arguments supported by the version of PyVista installed). + More information about available additional arguments is available at `pyvista.plot() `_. diff --git a/doc/source/user_guide/tutorials/plot/plot_deformed_mesh.rst b/doc/source/user_guide/tutorials/plot/plot_deformed_mesh.rst new file mode 100644 index 0000000000..b0fd1bca2a --- /dev/null +++ b/doc/source/user_guide/tutorials/plot/plot_deformed_mesh.rst @@ -0,0 +1,227 @@ +.. _ref_tutorials_plot_deformed_mesh: + +========================== +Plot with mesh deformation +========================== + +.. include:: ../../../links_and_refs.rst + +.. |Model.plot| replace:: :py:meth:`Model.plot() ` +.. |MeshedRegion.plot| replace:: :py:meth:`MeshedRegion.plot() ` +.. |MeshesContainer.plot| replace:: :py:meth:`MeshesContainer.plot() ` +.. |add_mesh| replace:: :py:meth:`add_mesh()` +.. |add_field| replace:: :py:meth:`add_field()` +.. |show_figure| replace:: :py:meth:`show_figure()` +.. |split_mesh| replace:: :py:class:`split_mesh ` +.. |disp_op| replace:: :py:class:`displacement operator ` + +This tutorial shows different commands for plotting a deformed mesh without data. + +A mesh is represented in DPF by a |MeshedRegion| object. +You can store multiple |MeshedRegion| in a DPF collection called |MeshesContainer|. + +You can obtain a |MeshedRegion| by creating your own from scratch or by getting it from a result file. +For more information, see the :ref:`ref_tutorials_create_a_mesh_from_scratch` and +:ref:`ref_tutorials_get_mesh_from_result_file` tutorials. + +PyDPF-Core has a variety of plotting methods for generating 3D plots with Python. +These methods use VTK and leverage the `PyVista `_ library. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Load data to plot +----------------- + +For this tutorial, we use mesh information from a case available in the |Examples| module. +For more information see the :ref:`ref_tutorials_get_mesh_from_result_file` tutorial. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + import ansys.dpf.core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Download and get the path to an example result file + result_file_path_1 = examples.download_piston_rod() + + # Create a model from the result file + model_1 = dpf.Model(data_sources=result_file_path_1) + +Get the deformation field +------------------------- + +To deform the mesh, we need a nodal 3D vector field specifying the translation of each node in the mesh. + +The following DPF objects are able to return or represent such a field +and are accepted inputs for the deformation parameter of plot methods: + +- A |Field| +- A |FieldsContainer| +- A |Result| +- An |Operator| + +Here, we use the |disp_op| which outputs a nodal 3D vector field of distances. + +One can get the operator from the |Model| with the source of data already connected. +For more information about extracting results from a result file, +see the :ref:`ref_tutorials_import_data` tutorials section. + +.. jupyter-execute:: + + # Get the displacement operator for this model + disp_op = model_1.results.displacement() + +You can apply a scale factor to the deformation for every method in this tutorial +by passing in the ``scale_factor`` argument. + +.. jupyter-execute:: + + # Define the scale factor + scl_fct = 2.0 + +.. _ref_plot_deformed_mesh_with_model: + +Plot a deformed model +--------------------- + +You can directly plot the overall mesh loaded by the model with |Model.plot| [1]_. +To plot it with deformation, use the *'deform_by'* argument and provide the displacement operator. + +.. jupyter-execute:: + + # Plot the deformed mesh + model_1.plot(deform_by=disp_op, scale_factor=scl_fct) + +You can apply a scale factor to the deformation for every method in this tutorial. + +.. jupyter-execute:: + + # Define the scale factor + scl_fct = 2.0 + +.. _ref_plot_deformed_mesh_with_meshed_region: + +Plot a single mesh +------------------ + +Get the mesh +^^^^^^^^^^^^ + +Here we simply get the |MeshedRegion| object of the model, but any other |MeshedRegion| works. + +.. jupyter-execute:: + + # Extract the mesh + meshed_region_1 = model_1.metadata.meshed_region + +Plot the mesh +^^^^^^^^^^^^^ + +To plot the deformed |MeshedRegion| you can use: + +- The |MeshedRegion.plot| method; +- The |DpfPlotter| object. + +.. tab-set:: + + .. tab-item:: MeshedRegion.plot() method + + Use the |MeshedRegion.plot| method [1]_ of the |MeshedRegion| object we defined. + Add deformation by providing our displacement operator to the *'deform_by'* argument. + + .. jupyter-execute:: + + # Plot the deformed mesh + meshed_region_1.plot(deform_by=disp_op, scale_factor=scl_fct) + + .. tab-item:: DpfPlotter object + + To plot the mesh with this approach, first create an instance of |DpfPlotter| [2]_. + Then, add the |MeshedRegion| to the scene using the |add_mesh| method. + Add deformation by providing our displacement operator to the *'deform_by'* argument. + + To render and show the figure based on the current state of the plotter object, use the |show_figure| method. + + .. jupyter-execute:: + + # Create a DpfPlotter instance + plotter_1 = dpf.plotter.DpfPlotter() + + # Add the mesh to the scene with deformation + plotter_1.add_mesh(meshed_region=meshed_region_1, + deform_by=disp_op, + scale_factor=scl_fct) + + # Display the scene + plotter_1.show_figure() + +You can also plot data contours on a deformed mesh. For more information, see :ref:`ref_tutorials_plot_contour` + +.. _ref_plot_deformed_mesh_with_meshes_container: + +Plot several meshes +------------------- + +Build a collection of meshes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +There are different ways to obtain a |MeshesContainer|. +You can for example split a |MeshedRegion| using operators. + +Here, we use the |split_mesh| operator to split the mesh based on the material of each element. +This operator returns a |MeshesContainer| with meshes labeled according to the criterion for the split. +In our case, each mesh has a *'mat'* label. +For more information about how to get a split mesh, see the :ref:`ref_tutorials_split_mesh` +and :ref:`ref_tutorials_extract_mesh_in_split_parts` tutorials. + +.. jupyter-execute:: + + # Split the mesh based on material property + meshes = ops.mesh.split_mesh(mesh=meshed_region_1, property="mat").eval() + + # Show the result + print(meshes) + +Plot the meshes +^^^^^^^^^^^^^^^ + +Use the |MeshesContainer.plot| method [1]_ of the |MeshesContainer| object we defined. +Provide the displacement operator to the *'deform_by'* argument to add mesh deformation. + +This method plots all the |MeshedRegion| objects stored in the |MeshesContainer| +and colors them based on the property used to split the mesh. + +.. jupyter-execute:: + + # Plot the deformed mesh + meshes.plot(deform_by=disp_op, scale_factor=scl_fct) + +You can also plot data on a collection of deformed meshes. +For more information, see :ref:`_ref_tutorials_plot_contour` + +.. rubric:: Footnotes + +.. [1] The |DpfPlotter| displays the mesh with edges, lighting and axis widget enabled by default. + You can pass additional PyVista arguments to all plotting methods to change the default behavior + (see options for `pyvista.plot() `_), such as: + + .. jupyter-execute:: + + model_1.plot(title="Mesh", + text="this is a mesh", # Adds the given text at the bottom of the plot + off_screen=True, + screenshot="mesh_plot_1.png", # Save a screenshot to file with the given name + window_size=[450,350]) + # Notes: + # - To save a screenshot to file, use "screenshot=figure_name.png" ( as well as "notebook=False" if on a Jupyter notebook). + # - The "off_screen" keyword only works when "notebook=False". If "off_screen=True" the plot is not displayed when running the code. + +.. [2] The |DpfPlotter| is currently based on PyVista. + That means that PyVista must be installed. + The DPF plotter also passes additional parameters to the PyVista plotter + (arguments supported by the version of PyVista installed). + More information about available additional arguments is available at `pyvista.plot() `_. diff --git a/doc/source/user_guide/tutorials/plot/plot_mesh.rst b/doc/source/user_guide/tutorials/plot/plot_mesh.rst new file mode 100644 index 0000000000..62db004513 --- /dev/null +++ b/doc/source/user_guide/tutorials/plot/plot_mesh.rst @@ -0,0 +1,174 @@ +.. _ref_tutorials_plot_mesh: + +=========== +Plot a mesh +=========== + +.. include:: ../../../links_and_refs.rst + +.. |Model.plot| replace:: :py:meth:`Model.plot() ` +.. |MeshedRegion.plot| replace:: :py:meth:`MeshedRegion.plot() ` +.. |MeshesContainer.plot| replace:: :py:meth:`MeshesContainer.plot() ` +.. |add_mesh| replace:: :py:meth:`add_mesh() ` +.. |show_figure| replace:: :py:meth:`show_figure() ` +.. |split_mesh| replace:: :py:class:`split_mesh ` + +This tutorial shows different commands for plotting a mesh without data. + +A mesh is represented in DPF by a |MeshedRegion| object. +You can store multiple |MeshedRegion| in a DPF collection called |MeshesContainer|. + +You can obtain a |MeshedRegion| by creating your own from scratch or by getting it from a result file. +For more information, see the :ref:`ref_tutorials_create_a_mesh_from_scratch` and +:ref:`ref_tutorials_get_mesh_from_result_file` tutorials. + +PyDPF-Core has a variety of plotting methods for generating 3D plots with Python. +These methods use VTK and leverage the `PyVista `_ library. + +:jupyter-download-script:`Download tutorial as Python script` +:jupyter-download-notebook:`Download tutorial as Jupyter notebook` + +Load data to plot +----------------- + +For this tutorial, we use mesh information from a case available in the |Examples| module. +For more information see the :ref:`ref_tutorials_get_mesh_from_result_file` tutorial. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + import ansys.dpf.core as dpf + # Import the examples module + from ansys.dpf.core import examples + # Import the operators module + from ansys.dpf.core import operators as ops + + # Download and get the path to an example result file + result_file_path_1 = examples.download_piston_rod() + + # Create a model from the result file + model_1 = dpf.Model(data_sources=result_file_path_1) + +Plot a model +------------ + +You can directly plot the overall mesh loaded by the model with |Model.plot| [1]_. + +.. jupyter-execute:: + + # Plot the mesh + model_1.plot() + +Plot a single mesh +------------------ + +Get the mesh +^^^^^^^^^^^^ + +Here we simply get the |MeshedRegion| object of the model, but any other |MeshedRegion| works. + +.. jupyter-execute:: + + # Extract the mesh + meshed_region_1 = model_1.metadata.meshed_region + +Plot the mesh +^^^^^^^^^^^^^ + +To plot the |MeshedRegion| you can use: + +- The |MeshedRegion.plot| method; +- The |DpfPlotter| object. + +.. tab-set:: + + .. tab-item:: MeshedRegion.plot() method + + Use the |MeshedRegion.plot| method [1]_ of the |MeshedRegion| object we defined. + + .. jupyter-execute:: + + # Plot the mesh object + meshed_region_1.plot() + + .. tab-item:: DpfPlotter object + + To plot the mesh with this approach, first create an instance of |DpfPlotter| [2]_. + Then, add the |MeshedRegion| to the scene using the |add_mesh| method. + + To render and show the figure based on the current state of the plotter object, use the |show_figure| method. + + .. jupyter-execute:: + + # Create a DpfPlotter instance + plotter_1 = dpf.plotter.DpfPlotter() + + # Add the mesh to the scene + plotter_1.add_mesh(meshed_region=meshed_region_1) + + # Display the scene + plotter_1.show_figure() + +You can also plot data contours on a mesh. For more information, see :ref:`ref_tutorials_plot_contour` + +Plot several meshes +------------------- + +Build a collection of meshes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +There are different ways to obtain a |MeshesContainer|. +You can for example split a |MeshedRegion| using operators. + +Here, we use the |split_mesh| operator to split the mesh based on the material of each element. +This operator returns a |MeshesContainer| with meshes labeled according to the criterion for the split. +In our case, each mesh has a *'mat'* label. +For more information about how to get a split mesh, see the :ref:`ref_tutorials_split_mesh` +and :ref:`ref_tutorials_extract_mesh_in_split_parts` tutorials. + +.. jupyter-execute:: + + # Split the mesh based on material property + meshes = ops.mesh.split_mesh(mesh=meshed_region_1, property="mat").eval() + + # Show the result + print(meshes) + +Plot the meshes +^^^^^^^^^^^^^^^ + +Use the |MeshesContainer.plot| method [1]_ of the |MeshesContainer| object we defined. + +This method plots all the |MeshedRegion| objects stored in the |MeshesContainer| +and colors them based on the property used to split the mesh. + +.. jupyter-execute:: + + # Plot the collection of meshes + meshes.plot() + +You can also plot data on a collection of meshes. +For more information, see :ref:`ref_tutorials_plot_contour` + +.. rubric:: Footnotes + +.. [1] The |DpfPlotter| displays the mesh with edges, lighting and axis widget enabled by default. + You can pass additional PyVista arguments to all plotting methods to change the default behavior + (see options for `pyvista.plot() `_), such as: + + .. jupyter-execute:: + + model_1.plot(title="Mesh", + text="this is a mesh", # Adds the given text at the bottom of the plot + off_screen=True, + screenshot="mesh_plot_1.png", # Save a screenshot to file with the given name + window_size=[450,350]) + # Notes: + # - To save a screenshot to file, use "screenshot=figure_name.png" ( as well as "notebook=False" if on a Jupyter notebook). + # - The "off_screen" keyword only works when "notebook=False". If "off_screen=True" the plot is not displayed when running the code. + +.. [2] The |DpfPlotter| is currently based on PyVista. + That means that PyVista must be installed. + The DPF plotter also passes additional parameters to the PyVista plotter + (arguments supported by the version of PyVista installed). + More information about available additional arguments is available at `pyvista.plot() `_. diff --git a/doc/source/user_guide/tutorials/post_processing_basics/01-main-steps.rst b/doc/source/user_guide/tutorials/post_processing_basics/01-main-steps.rst new file mode 100644 index 0000000000..43ef9b3a36 --- /dev/null +++ b/doc/source/user_guide/tutorials/post_processing_basics/01-main-steps.rst @@ -0,0 +1,199 @@ +.. _tutorials_main_steps: + +Postprocessing main steps +------------------------- + +There are five main steps to transform simulation data into output data that can +be used to visualize and analyze simulation results: + +.. grid:: + :gutter: 2 + :padding: 2 + :margin: 2 + + .. grid-item-card:: 1 + :link: tutorials_main_steps_1 + :link-type: ref + :text-align: center + + Importing and opening results files + + .. grid-item-card:: 2 + :link: tutorials_main_steps_2 + :link-type: ref + :text-align: center + + Access and extract results + + .. grid-item-card:: 3 + :link: tutorials_main_steps_3 + :link-type: ref + :text-align: center + + Transform available data + + .. grid-item-card:: 4 + :link: tutorials_main_steps_4 + :link-type: ref + :text-align: center + + Visualize the data + + .. grid-item-card:: 5 + :link: tutorials_main_steps_5 + :link-type: ref + :text-align: center + + Export data + +.. _tutorials_main_steps_1: + +1- Importing and opening results files +************************************** + +First, import the DPF-Core module as ``dpf`` and import the included examples file + +.. code-block:: python + + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + +`DataSources' is a class that manages paths to their files. Use this object to declare +data inputs for DPF and define their locations. + +.. code-block:: python + + # Define the DataSources object + my_data_sources = dpf.DataSources(result_path=examples.find_simple_bar()) + + +The model is a helper designed to give shortcuts to access the analysis results +metadata, by opening a DataSources or a Streams, and to instanciate results provider for it. + +Printing the model displays: + + - Analysis type + - Available results + - Size of the mesh + - Number of results + +.. code-block:: python + + # Define the Model object + my_model = dpf.Model(data_sources=my_data_sources) + print(my_model) + +.. rst-class:: sphx-glr-script-out + + .. jupyter-execute:: + :hide-code: + + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + my_data_sources = dpf.DataSources(result_path=examples.find_simple_bar()) + my_model = dpf.Model(data_sources=my_data_sources) + print(my_model) + +.. _tutorials_main_steps_2: + +2- Access and extract results +***************************** + +We see in the model that a displacement result is available. You can access this result by: + +.. code-block:: python + + # Define the displacement results through the models property `results` + my_displacements = my_model.results.displacement.eval() + print(my_displacements) + +.. rst-class:: sphx-glr-script-out + + .. jupyter-execute:: + :hide-code: + + my_displacements = my_model.results.displacement.eval() + print(my_displacements) + +The displacement data can be extract by: + +.. code-block:: python + + # Extract the data of the displacement field + my_displacements_0 = my_displacements[0].data + print(my_displacements_0) + +.. rst-class:: sphx-glr-script-out + + .. jupyter-execute:: + :hide-code: + + my_displacements_0 = my_displacements[0].data + print(my_displacements_0) + +.. _tutorials_main_steps_3: + +3- Transform available data +*************************** + +Several transformations can be made with the data. They can be a single operation, +by using only one operator, or they can represent a succession of operations, by defining a +workflow with chained operators. + +Here we star by computing the displacements norm. + +.. code-block:: python + + # Define the norm operator (here for a fields container) for the displacement + my_norm = ops.math.norm_fc(fields_container=my_displacements).eval() + print(my_norm[0].data) + +.. rst-class:: sphx-glr-script-out + + .. jupyter-execute:: + :hide-code: + + my_norm = ops.math.norm_fc(fields_container=my_displacements).eval() + print(my_norm[0].data) + +Then we compute the maximum values of the normalised displacement + +.. code-block:: python + + # Define the maximum operator and chain it to the norm operator + my_max= ops.min_max.min_max_fc(fields_container=my_norm).outputs.field_max() + print(my_max) + +.. rst-class:: sphx-glr-script-out + + .. jupyter-execute:: + :hide-code: + + my_max = ops.min_max.min_max_fc(fields_container=my_norm).outputs.field_max() + print(my_max) + +.. _tutorials_main_steps_4: + +4- Visualize the data +********************* + +Plot the transformed displacement results + +.. code-block:: python + + # Define the support of the plot (here we plot the displacement over the mesh) + my_model.metadata.meshed_region.plot(field_or_fields_container=my_displacements) + +.. rst-class:: sphx-glr-script-out + + .. jupyter-execute:: + :hide-code: + + my_model.metadata.meshed_region.plot(field_or_fields_container=my_displacements) + +.. _tutorials_main_steps_5: + +5- Extract the data +******************* \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/post_processing_basics/index.rst b/doc/source/user_guide/tutorials/post_processing_basics/index.rst new file mode 100644 index 0000000000..b98cf33a34 --- /dev/null +++ b/doc/source/user_guide/tutorials/post_processing_basics/index.rst @@ -0,0 +1,14 @@ +.. _ref_tutorials_processing_basics: + +====================== +Processing data basics +====================== + +Data Processing consists in a series of operations applied to data to achieve a goal. DPF enables +you to access and transform simulation data using customizable workflows. + +There is an extensive catalog of operators with different kinds and complexity that can be used together. + +The tutorials in this section presents a basic application of PyDPF-Core as post-processing tool. + +.. include:: 01-main-steps.rst \ No newline at end of file diff --git a/requirements/requirements_docs.txt b/requirements/requirements_docs.txt index 8c5c093be2..cba5a89f22 100644 --- a/requirements/requirements_docs.txt +++ b/requirements/requirements_docs.txt @@ -3,6 +3,8 @@ enum-tools[sphinx]==0.13.0 graphviz==0.20.1 imageio==2.37.0 imageio-ffmpeg==0.6.0 +jupyter_sphinx==0.5.3 +nbsphinx==0.9.5 pypandoc==1.15 pytest-sphinx==0.6.3 pyvista==0.45.2