diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8901688fd..da22ee48f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -3,7 +3,7 @@ name: ci on: [push, pull_request] env: - X_PYTHON_MIN_VERSION: "3.9" + X_PYTHON_MIN_VERSION: "3.10" X_PYTHON_MAX_VERSION: "3.12" jobs: @@ -45,7 +45,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.9", "3.12"] + python-version: ["3.10", "3.12"] env: COUCHDB_ADMIN_PASSWORD: "yo0Quai3" # (2024-10-11, s-heppner) @@ -154,15 +154,14 @@ jobs: working-directory: ./sdk steps: - uses: actions/checkout@v4 - - name: Set up Python ${{ env.X_PYTHON_MIN_VERSION }} + - name: Set up Python ${{ env.X_PYTHON_MAX_VERSION }} uses: actions/setup-python@v5 with: - python-version: ${{ env.X_PYTHON_MIN_VERSION }} + python-version: ${{ env.X_PYTHON_MAX_VERSION }} - name: Install Python dependencies run: | python -m pip install --upgrade pip - pip install . - pip install -r docs/add-requirements.txt + pip install .[docs] - name: Check documentation for errors run: | SPHINXOPTS="-a -E -n -W --keep-going" make -C docs html @@ -209,7 +208,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.9", "3.12"] + python-version: ["3.10", "3.12"] defaults: run: working-directory: ./compliance_tool @@ -255,13 +254,13 @@ jobs: pip install .[dev] - name: Check typing with MyPy run: | - mypy ./aas_compliance_tool test + mypy aas_compliance_tool test - name: Check code style with PyCodestyle run: | - pycodestyle --count --max-line-length 120 ./aas_compliance_tool test + pycodestyle --count --max-line-length 120 aas_compliance_tool test - compliance-tool-readme-codeblocks: - # This job runs the same static code analysis (mypy and pycodestyle) on the codeblocks in our docstrings. + compliance-tool-package: + # This job checks if we can build our compliance_tool package runs-on: ubuntu-latest defaults: @@ -273,42 +272,43 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ env.X_PYTHON_MIN_VERSION }} - - name: Install Python dependencies - # install the local sdk in editable mode so it does not get overwritten + - name: Install dependencies run: | python -m pip install --upgrade pip - pip install -e ../sdk[dev] - pip install .[dev] - - name: Check typing with MyPy - run: | - mypy <(codeblocks python README.md) - - name: Check code style with PyCodestyle - run: | - codeblocks --wrap python README.md | pycodestyle --count --max-line-length 120 - - - name: Run readme codeblocks with Python + pip install build + - name: Create source and wheel dist run: | - codeblocks python README.md | python + python -m build - compliance-tool-package: - # This job checks if we can build our compliance_tool package + #server-test: + # TODO: This job runs the unittests on the python versions specified down at the matrix + # and aas-test-engines on the server + + + server-static-analysis: + # This job runs static code analysis, namely pycodestyle and mypy runs-on: ubuntu-latest defaults: run: - working-directory: ./compliance_tool + working-directory: ./server/app steps: - uses: actions/checkout@v4 - name: Set up Python ${{ env.X_PYTHON_MIN_VERSION }} uses: actions/setup-python@v5 with: python-version: ${{ env.X_PYTHON_MIN_VERSION }} - - name: Install dependencies + - name: Install Python dependencies run: | python -m pip install --upgrade pip - pip install build - - name: Create source and wheel dist + pip install ../../sdk + pip install .[dev] + - name: Check typing with MyPy run: | - python -m build + mypy . + - name: Check code style with PyCodestyle + run: | + pycodestyle --count --max-line-length 120 . server-package: # This job checks if we can build our server package diff --git a/.gitignore b/.gitignore index 18b522c3a..fe9b6f9c4 100644 --- a/.gitignore +++ b/.gitignore @@ -21,7 +21,7 @@ htmlcov/ docs/build/ .hypothesis/ -# customized config files +# Customized config files sdk/test/test_config.ini # Schema files needed for testing sdk/test/adapter/schemas @@ -29,6 +29,8 @@ sdk/test/adapter/schemas # Ignore dynamically generated version file sdk/basyx/version.py compliance_tool/aas_compliance_tool/version.py +server/app/version.py -# ignore the content of the server storage +# Ignore the content of the server storage +server/input/ server/storage/ diff --git a/compliance_tool/pyproject.toml b/compliance_tool/pyproject.toml index c907f90df..e235bdc9b 100644 --- a/compliance_tool/pyproject.toml +++ b/compliance_tool/pyproject.toml @@ -34,7 +34,7 @@ classifiers = [ "Operating System :: OS Independent", "Development Status :: 5 - Production/Stable" ] -requires-python = ">=3.9" +requires-python = ">=3.10" dependencies = [ "pyecma376-2>=0.2.4", "jsonschema>=4.21.1", diff --git a/sdk/.readthedocs.yaml b/sdk/.readthedocs.yaml index e64e5daaf..7fd098a4c 100644 --- a/sdk/.readthedocs.yaml +++ b/sdk/.readthedocs.yaml @@ -6,7 +6,7 @@ version: 2 build: os: ubuntu-20.04 tools: - python: "3.9" + python: "3.10" sphinx: configuration: docs/source/conf.py @@ -15,4 +15,5 @@ python: install: - method: pip path: . - - requirements: docs/add-requirements.txt + extra_requirements: + - docs diff --git a/sdk/README.md b/sdk/README.md index f63f7afcb..74d63b8f3 100644 --- a/sdk/README.md +++ b/sdk/README.md @@ -42,8 +42,7 @@ The BaSyx Python SDK requires the following Python packages to be installed for * `lxml` (BSD 3-clause License, using `libxml2` under MIT License) * `python-dateutil` (BSD 3-clause License) * `pyecma376-2` (Apache License v2.0) -* `urllib3` (MIT License) -* `Werkzeug` (BSD 3-clause License) + Development/testing/documentation/example dependencies: * `mypy` (MIT License) @@ -128,7 +127,7 @@ For further examples and tutorials, check out the `basyx.aas.examples`-package. * [`tutorial_storage`](./basyx/aas/examples/tutorial_storage.py): Manage a larger number of Asset Administration Shells in an ObjectStore and resolve references * [`tutorial_serialization_deserialization`](./basyx/aas/examples/tutorial_serialization_deserialization.py): Use the JSON and XML serialization/deserialization for single objects or full standard-compliant files * [`tutorial_aasx`](./basyx/aas/examples/tutorial_aasx.py): Export Asset Administration Shells with related objects and auxiliary files to AASX package files -* [`tutorial_backend_couchdb`](./basyx/aas/examples/tutorial_backend_couchdb.py): Use the *Backends* interface (`update()/commit()` methods) to manage and retrieve AAS objects in a CouchDB document database +* [`tutorial_backend_couchdb`](./basyx/aas/examples/tutorial_backend_couchdb.py): Use the *CouchDBObjectStore* to manage and retrieve AAS objects in a CouchDB document database ### Documentation diff --git a/sdk/basyx/aas/adapter/__init__.py b/sdk/basyx/aas/adapter/__init__.py index 7f96702e9..0fca01291 100644 --- a/sdk/basyx/aas/adapter/__init__.py +++ b/sdk/basyx/aas/adapter/__init__.py @@ -7,3 +7,45 @@ Python SDK objects to/from XML. * :ref:`aasx `: This package offers functions for reading and writing AASX-files. """ + +from basyx.aas.adapter.aasx import AASXReader, DictSupplementaryFileContainer +from basyx.aas.adapter.json import read_aas_json_file_into +from basyx.aas.adapter.xml import read_aas_xml_file_into +from basyx.aas.model.provider import DictObjectStore +from pathlib import Path +from typing import Union + + +def load_directory(directory: Union[Path, str]) -> tuple[DictObjectStore, DictSupplementaryFileContainer]: + """ + Create a new :class:`~basyx.aas.model.provider.DictObjectStore` and use it to load Asset Administration Shell and + Submodel files in ``AASX``, ``JSON`` and ``XML`` format from a given directory into memory. Additionally, load all + embedded supplementary files into a new :class:`~basyx.aas.adapter.aasx.DictSupplementaryFileContainer`. + + :param directory: :class:`~pathlib.Path` or ``str`` pointing to the directory containing all Asset Administration + Shell and Submodel files to load + :return: Tuple consisting of a :class:`~basyx.aas.model.provider.DictObjectStore` and a + :class:`~basyx.aas.adapter.aasx.DictSupplementaryFileContainer` containing all loaded data + """ + + dict_object_store: DictObjectStore = DictObjectStore() + file_container: DictSupplementaryFileContainer = DictSupplementaryFileContainer() + + directory = Path(directory) + + for file in directory.iterdir(): + if not file.is_file(): + continue + + suffix = file.suffix.lower() + if suffix == ".json": + with open(file) as f: + read_aas_json_file_into(dict_object_store, f) + elif suffix == ".xml": + with open(file) as f: + read_aas_xml_file_into(dict_object_store, f) + elif suffix == ".aasx": + with AASXReader(file) as reader: + reader.read_into(object_store=dict_object_store, file_store=file_container) + + return dict_object_store, file_container diff --git a/sdk/basyx/aas/adapter/_generic.py b/sdk/basyx/aas/adapter/_generic.py index 79c98fc8c..65d14d8d3 100644 --- a/sdk/basyx/aas/adapter/_generic.py +++ b/sdk/basyx/aas/adapter/_generic.py @@ -19,6 +19,13 @@ PathOrBinaryIO = Union[Path, BinaryIO] PathOrIO = Union[Path, IO] # IO is TextIO or BinaryIO +# JSON top-level keys and their corresponding model classes +JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES = ( + ('assetAdministrationShells', model.AssetAdministrationShell), + ('submodels', model.Submodel), + ('conceptDescriptions', model.ConceptDescription), +) + # XML Namespace definition XML_NS_MAP = {"aas": "https://admin-shell.io/aas/3/0"} XML_NS_AAS = "{" + XML_NS_MAP["aas"] + "}" diff --git a/sdk/basyx/aas/adapter/aasx.py b/sdk/basyx/aas/adapter/aasx.py index 0b66e533e..8bb5958f6 100644 --- a/sdk/basyx/aas/adapter/aasx.py +++ b/sdk/basyx/aas/adapter/aasx.py @@ -28,7 +28,6 @@ import itertools import logging import os -import re from typing import Dict, Tuple, IO, Union, List, Set, Optional, Iterable, Iterator from .xml import read_aas_xml_file, write_aas_xml_file @@ -60,7 +59,7 @@ class AASXReader: reader.read_into(objects, files) """ - def __init__(self, file: Union[os.PathLike, str, IO]): + def __init__(self, file: Union[os.PathLike, str, IO], failsafe: bool = True): """ Open an AASX reader for the given filename or file handle @@ -69,16 +68,19 @@ def __init__(self, file: Union[os.PathLike, str, IO]): closing under any circumstances. :param file: A filename, file path or an open file-like object in binary mode + :param failsafe: If ``True``, the document is parsed in a failsafe way: Missing attributes and elements are + logged instead of causing exceptions. Defect objects are skipped. :raises FileNotFoundError: If the file does not exist :raises ValueError: If the file is not a valid OPC zip package """ + self.failsafe: bool = failsafe try: - logger.debug("Opening {} as AASX pacakge for reading ...".format(file)) + logger.debug(f"Opening {file} as AASX package for reading ...") self.reader = pyecma376_2.ZipPackageReader(file) except FileNotFoundError: raise except Exception as e: - raise ValueError("{} is not a valid ECMA376-2 (OPC) file: {}".format(file, e)) from e + raise ValueError(f"{file} is not a valid ECMA376-2 (OPC) file: {e}") from e def get_core_properties(self) -> pyecma376_2.OPCCoreProperties: """ @@ -132,7 +134,7 @@ def read_into(self, object_store: model.AbstractObjectStore, objects from the AASX file to :param file_store: A :class:`SupplementaryFileContainer <.AbstractSupplementaryFileContainer>` to add the embedded supplementary files to - :param override_existing: If ``True``, existing objects in the object store are overridden with objects from the + :param override_existing: If ``True``, existing objects in the ObjectStore are overridden with objects from the AASX that have the same :class:`~basyx.aas.model.base.Identifier`. Default behavior is to skip those objects from the AASX. :return: A set of the :class:`Identifiers ` of all @@ -143,7 +145,20 @@ def read_into(self, object_store: model.AbstractObjectStore, try: aasx_origin_part = core_rels[RELATIONSHIP_TYPE_AASX_ORIGIN][0] except IndexError as e: - raise ValueError("Not a valid AASX file: aasx-origin Relationship is missing.") from e + if core_rels.get("http://www.admin-shell.io/aasx/relationships/aasx-origin"): + # Since there are many AASX files with this (wrong) relationship URls in the wild, we make an exception + # and try to read it anyway. However, we notify the user that this may lead to data loss, since it is + # highly likely that the other relationship URLs are also wrong in that file. + # See also [#383](https://github.com/eclipse-basyx/basyx-python-sdk/issues/383) for the discussion. + logger.warning("SPECIFICATION VIOLATED: The Relationship-URL in your AASX file " + "('http://www.admin-shell.io/aasx/relationships/aasx-origin') " + "is not valid, it should be 'http://admin-shell.io/aasx/relationships/aasx-origin'. " + "We try to read the AASX file anyway, but this cannot guaranteed in the future," + "and the file may not be fully readable, so data losses may occur." + "Please fix this and/or notify the source of the AASX.") + aasx_origin_part = core_rels["http://www.admin-shell.io/aasx/relationships/aasx-origin"][0] + else: + raise ValueError("Not a valid AASX file: aasx-origin Relationship is missing.") from e read_identifiables: Set[model.Identifier] = set() @@ -159,7 +174,10 @@ def read_into(self, object_store: model.AbstractObjectStore, self._read_aas_part_into(split_part, object_store, file_store, read_identifiables, override_existing, **kwargs) if no_aas_files_found: - logger.warning("No AAS files found in AASX package") + if self.failsafe: + logger.warning("No AAS files found in AASX package") + else: + raise ValueError("No AAS files found in AASX package") return read_identifiables @@ -192,7 +210,7 @@ def _read_aas_part_into(self, part_name: str, from a File object of this part :param read_identifiables: A set of Identifiers of objects which have already been read. New objects' Identifiers are added to this set. Objects with already known Identifiers are skipped silently. - :param override_existing: If True, existing objects in the object store are overridden with objects from the + :param override_existing: If True, existing objects in the ObjectStore are overridden with objects from the AASX that have the same Identifier. Default behavior is to skip those objects from the AASX. """ for obj in self._parse_aas_part(part_name, **kwargs): @@ -200,12 +218,15 @@ def _read_aas_part_into(self, part_name: str, continue if obj.id in object_store: if override_existing: - logger.info("Overriding existing object in ObjectStore with {} ...".format(obj)) + logger.info(f"Overriding existing object in ObjectStore with {obj} ...") object_store.discard(obj) else: - logger.warning("Skipping {}, since an object with the same id is already contained in the " - "ObjectStore".format(obj)) - continue + if self.failsafe: + logger.warning(f"Skipping {obj}, since an object with the same id is already contained in the " + "ObjectStore") + continue + else: + raise ValueError(f"Object with id {obj} is already contained in the ObjectStore") object_store.add(obj) read_identifiables.add(obj.id) if isinstance(obj, model.Submodel): @@ -223,17 +244,21 @@ def _parse_aas_part(self, part_name: str, **kwargs) -> model.DictObjectStore: content_type = self.reader.get_content_type(part_name) extension = part_name.split("/")[-1].split(".")[-1] if content_type.split(";")[0] in ("text/xml", "application/xml") or content_type == "" and extension == "xml": - logger.debug("Parsing AAS objects from XML stream in OPC part {} ...".format(part_name)) + logger.debug(f"Parsing AAS objects from XML stream in OPC part {part_name} ...") with self.reader.open_part(part_name) as p: - return read_aas_xml_file(p, **kwargs) + return read_aas_xml_file(p, failsafe=self.failsafe, **kwargs) elif content_type.split(";")[0] in ("text/json", "application/json") \ or content_type == "" and extension == "json": - logger.debug("Parsing AAS objects from JSON stream in OPC part {} ...".format(part_name)) + logger.debug(f"Parsing AAS objects from JSON stream in OPC part {part_name} ...") with self.reader.open_part(part_name) as p: - return read_aas_json_file(io.TextIOWrapper(p, encoding='utf-8-sig'), **kwargs) + return read_aas_json_file(io.TextIOWrapper(p, encoding='utf-8-sig'), failsafe=self.failsafe, **kwargs) else: - logger.error("Could not determine part format of AASX part {} (Content Type: {}, extension: {}" - .format(part_name, content_type, extension)) + error_message = (f"Could not determine part format of AASX part {part_name} (Content Type: {content_type}," + f" extension: {extension}") + if self.failsafe: + logger.error(error_message) + else: + raise ValueError(error_message) return model.DictObjectStore() def _collect_supplementary_files(self, part_name: str, submodel: model.Submodel, @@ -242,7 +267,7 @@ def _collect_supplementary_files(self, part_name: str, submodel: model.Submodel, Helper function to search File objects within a single parsed Submodel, extract the referenced supplementary files and update the File object's values with the absolute path. - :param part_name: The OPC part name of the part the submodel has been parsed from. This is used to resolve + :param part_name: The OPC part name of the part the Submodel has been parsed from. This is used to resolve relative file paths. :param submodel: The Submodel to process :param file_store: The SupplementaryFileContainer to add the extracted supplementary files to @@ -255,11 +280,11 @@ def _collect_supplementary_files(self, part_name: str, submodel: model.Submodel, # to refer to files within the AASX package. Thus, we must skip all other types of URIs (esp. absolute # URIs and network-path references) if element.value.startswith('//') or ':' in element.value.split('/')[0]: - logger.info("Skipping supplementary file %s, since it seems to be an absolute URI or network-path " - "URI reference", element.value) + logger.info(f"Skipping supplementary file {element.value}, since it seems to be an absolute URI or " + f"network-path URI reference") continue absolute_name = pyecma376_2.package_model.part_realpath(element.value, part_name) - logger.debug("Reading supplementary file {} from AASX package ...".format(absolute_name)) + logger.debug(f"Reading supplementary file {absolute_name} from AASX package ...") with self.reader.open_part(absolute_name) as p: final_name = file_store.add_file(absolute_name, p, self.reader.get_content_type(absolute_name)) element.value = final_name @@ -295,7 +320,7 @@ class AASXWriter: """ AASX_ORIGIN_PART_NAME = "/aasx/aasx-origin" - def __init__(self, file: Union[os.PathLike, str, IO]): + def __init__(self, file: Union[os.PathLike, str, IO], failsafe: bool = True): """ Create a new AASX package in the given file and open the AASXWriter to add contents to the package. @@ -303,8 +328,11 @@ def __init__(self, file: Union[os.PathLike, str, IO]): AAS parts to the file and close the underlying ZIP file writer. You may also use the AASXWriter as a context manager to ensure closing under any circumstances. + :param failsafe: If ``True``, the document is written in a failsafe way: Missing attributes and elements are + logged instead of causing exceptions. Defect objects are skipped. :param file: filename, path, or binary file handle opened for writing """ + self.failsafe: bool = failsafe # names of aas-spec parts, used by `_write_aasx_origin_relationships()` self._aas_part_names: List[str] = [] # name of the thumbnail part (if any) @@ -364,7 +392,7 @@ def write_aas(self, :param write_json: If ``True``, JSON parts are created for the AAS and each :class:`~basyx.aas.model.submodel.Submodel` in the AASX package file instead of XML parts. Defaults to ``False``. - :raises KeyError: If one of the AAS could not be retrieved from the object store (unresolvable + :raises KeyError: If one of the AAS could not be retrieved from the ObjectStore (unresolvable :class:`Submodels ` and :class:`ConceptDescriptions ` are skipped, logging a warning/info message) @@ -378,12 +406,15 @@ def write_aas(self, for aas_id in aas_ids: try: aas = object_store.get_identifiable(aas_id) - # TODO add failsafe mode - except KeyError: - raise - if not isinstance(aas, model.AssetAdministrationShell): - raise TypeError(f"Identifier {aas_id} does not belong to an AssetAdministrationShell object but to " - f"{aas!r}") + if not isinstance(aas, model.AssetAdministrationShell): + raise TypeError(f"Identifier {aas_id} does not belong to an AssetAdministrationShell object but to " + f"{aas!r}") + except (KeyError, TypeError) as e: + if self.failsafe: + logger.error(f"Skipping AAS {aas_id}: {e}") + continue + else: + raise # Add the AssetAdministrationShell object to the data part objects_to_be_written.add(aas) @@ -393,8 +424,11 @@ def write_aas(self, try: submodel = submodel_ref.resolve(object_store) except KeyError: - logger.warning("Could not find submodel %s. Skipping it.", str(submodel_ref)) - continue + if self.failsafe: + logger.warning(f"Could not find Submodel {submodel_ref}. Skipping it.") + continue + else: + raise KeyError(f"Could not find Submodel {submodel_ref!r}") objects_to_be_written.add(submodel) # Traverse object tree and check if semanticIds are referencing to existing ConceptDescriptions in the @@ -410,13 +444,20 @@ def write_aas(self, try: cd = semantic_id.resolve(object_store) except KeyError: - logger.warning("ConceptDescription for semanticId %s not found in object store. Skipping it.", - str(semantic_id)) - continue + if self.failsafe: + logger.warning(f"ConceptDescription for semanticId {semantic_id} not found in ObjectStore. " + f"Skipping it.") + continue + else: + raise KeyError(f"ConceptDescription for semanticId {semantic_id!r} not found in ObjectStore.") except model.UnexpectedTypeError as e: - logger.error("semanticId %s resolves to %s, which is not a ConceptDescription. Skipping it.", - str(semantic_id), e.value) - continue + if self.failsafe: + logger.error(f"semanticId {semantic_id} resolves to {e.value}, " + f"which is not a ConceptDescription. Skipping it.") + continue + else: + raise TypeError(f"semanticId {semantic_id!r} resolves to {e.value!r}, which is not a" + f" ConceptDescription.") from e concept_descriptions.append(cd) objects_to_be_written.update(concept_descriptions) @@ -440,7 +481,7 @@ def write_aas_objects(self, This method takes the AAS's :class:`~basyx.aas.model.base.Identifier` (as ``aas_id``) to retrieve it from the given object_store. If the list of written objects includes :class:`~basyx.aas.model.submodel.Submodel` objects, Supplementary files which are referenced by :class:`~basyx.aas.model.submodel.File` objects within - those submodels, are also added to the AASX package. + those Submodels, are also added to the AASX package. .. attention:: @@ -465,7 +506,7 @@ def write_aas_objects(self, :param additional_relationships: Optional OPC/ECMA376 relationships which should originate at the AAS object part to be written, in addition to the aas-suppl relationships which are created automatically. """ - logger.debug("Writing AASX part {} with AAS objects ...".format(part_name)) + logger.debug(f"Writing AASX part {part_name} with AAS objects ...") objects: model.DictObjectStore[model.Identifiable] = model.DictObjectStore() @@ -474,8 +515,11 @@ def write_aas_objects(self, try: the_object = object_store.get_identifiable(identifier) except KeyError: - logger.error("Could not find object {} in ObjectStore".format(identifier)) - continue + if self.failsafe: + logger.error(f"Could not find object {identifier} in ObjectStore") + continue + else: + raise KeyError(f"Could not find object {identifier!r} in ObjectStore") objects.add(the_object) self.write_all_aas_objects(part_name, objects, file_store, write_json, split_part, additional_relationships) @@ -516,7 +560,7 @@ def write_all_aas_objects(self, :param additional_relationships: Optional OPC/ECMA376 relationships which should originate at the AAS object part to be written, in addition to the aas-suppl relationships which are created automatically. """ - logger.debug("Writing AASX part {} with AAS objects ...".format(part_name)) + logger.debug(f"Writing AASX part {part_name} with AAS objects ...") supplementary_files: List[str] = [] # Retrieve objects and scan for referenced supplementary files @@ -543,29 +587,36 @@ def write_all_aas_objects(self, else: write_aas_xml_file(p, objects) - # Write submodel's supplementary files to AASX file + # Write Submodel's supplementary files to AASX file supplementary_file_names = [] for file_name in supplementary_files: try: content_type = file_store.get_content_type(file_name) hash = file_store.get_sha256(file_name) except KeyError: - logger.warning("Could not find file {} in file store.".format(file_name)) - continue + if self.failsafe: + logger.warning(f"Could not find file {file_name} in FileStore.") + continue + else: + raise KeyError(f"Could not find file {file_name} in FileStore.") # Check if this supplementary file has already been written to the AASX package or has a name conflict if self._supplementary_part_names.get(file_name) == hash: continue elif file_name in self._supplementary_part_names: - logger.error("Trying to write supplementary file {} to AASX twice with different contents" - .format(file_name)) - logger.debug("Writing supplementary file {} to AASX package ...".format(file_name)) + if self.failsafe: + logger.error(f"Trying to write supplementary file {file_name} to AASX " + f"twice with different contents") + else: + raise ValueError(f"Trying to write supplementary file {file_name} to AASX twice with" + f" different contents") + logger.debug(f"Writing supplementary file {file_name} to AASX package ...") with self.writer.open_part(file_name, content_type) as p: file_store.write_file(file_name, p) supplementary_file_names.append(pyecma376_2.package_model.normalize_part_name(file_name)) self._supplementary_part_names[file_name] = hash - # Add relationships from submodel to supplementary parts - logger.debug("Writing aas-suppl relationships for AAS object part {} to AASX package ...".format(part_name)) + # Add relationships from Submodel to supplementary parts + logger.debug(f"Writing aas-suppl relationships for AAS object part {part_name} to AASX package ...") self.writer.write_relationships( itertools.chain( (pyecma376_2.OPCRelationship("r{}".format(i), @@ -604,7 +655,7 @@ def write_thumbnail(self, name: str, data: bytearray, content_type: str): :param content_type: OPC content type (MIME type) of the image file """ if self._thumbnail_part is not None: - raise RuntimeError("package thumbnail has already been written to {}.".format(self._thumbnail_part)) + raise RuntimeError(f"package thumbnail has already been written to {self._thumbnail_part}.") with self.writer.open_part(name, content_type) as p: p.write(data) self._thumbnail_part = name @@ -820,3 +871,6 @@ def __contains__(self, item: object) -> bool: def __iter__(self) -> Iterator[str]: return iter(self._name_map) + + def __len__(self) -> int: + return len(self._name_map) diff --git a/sdk/basyx/aas/adapter/json/json_deserialization.py b/sdk/basyx/aas/adapter/json/json_deserialization.py index 78e3713f5..84635703d 100644 --- a/sdk/basyx/aas/adapter/json/json_deserialization.py +++ b/sdk/basyx/aas/adapter/json/json_deserialization.py @@ -34,12 +34,13 @@ import json import logging import pprint -from typing import Dict, Callable, ContextManager, TypeVar, Type, List, IO, Optional, Set, get_args +from typing import (Dict, Callable, ContextManager, TypeVar, Type, + List, IO, Optional, Set, get_args, Tuple, Iterable, Any) from basyx.aas import model from .._generic import MODELLING_KIND_INVERSE, ASSET_KIND_INVERSE, KEY_TYPES_INVERSE, ENTITY_TYPES_INVERSE, \ IEC61360_DATA_TYPES_INVERSE, IEC61360_LEVEL_TYPES_INVERSE, KEY_TYPES_CLASSES_INVERSE, REFERENCE_TYPES_INVERSE, \ - DIRECTION_INVERSE, STATE_OF_EVENT_INVERSE, QUALIFIER_KIND_INVERSE, PathOrIO, Path + DIRECTION_INVERSE, STATE_OF_EVENT_INVERSE, QUALIFIER_KIND_INVERSE, PathOrIO, Path, JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES logger = logging.getLogger(__name__) @@ -154,19 +155,20 @@ def __init__(self, *args, **kwargs): json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs) @classmethod - def object_hook(cls, dct: Dict[str, object]) -> object: - # Check if JSON object seems to be a deserializable AAS object (i.e. it has a modelType). Otherwise, the JSON - # object is returned as is, so it's possible to mix AAS objects with other data within a JSON structure. - if 'modelType' not in dct: - return dct + def _get_aas_class_parsers(cls) -> Dict[str, Callable[[Dict[str, object]], object]]: + """ + Returns the dictionary of AAS class parsers. - # The following dict specifies a constructor method for all AAS classes that may be identified using the - # ``modelType`` attribute in their JSON representation. Each of those constructor functions takes the JSON - # representation of an object and tries to construct a Python object from it. Embedded objects that have a - # modelType themselves are expected to be converted to the correct PythonType already. Additionally, each - # function takes a bool parameter ``failsafe``, which indicates weather to log errors and skip defective objects - # instead of raising an Exception. - AAS_CLASS_PARSERS: Dict[str, Callable[[Dict[str, object]], object]] = { + The following dict specifies a constructor method for all AAS classes that may be identified using the + ``modelType`` attribute in their JSON representation. Each of those constructor functions takes the JSON + representation of an object and tries to construct a Python object from it. Embedded objects that have a + modelType themselves are expected to be converted to the correct PythonType already. Additionally, each + function takes a bool parameter ``failsafe``, which indicates weather to log errors and skip defective objects + instead of raising an Exception. + + :return: The dictionary of AAS class parsers + """ + aas_class_parsers: Dict[str, Callable[[Dict[str, object]], object]] = { 'AssetAdministrationShell': cls._construct_asset_administration_shell, 'AssetInformation': cls._construct_asset_information, 'SpecificAssetId': cls._construct_specific_asset_id, @@ -189,6 +191,16 @@ def object_hook(cls, dct: Dict[str, object]) -> object: 'ReferenceElement': cls._construct_reference_element, 'DataSpecificationIec61360': cls._construct_data_specification_iec61360, } + return aas_class_parsers + + @classmethod + def object_hook(cls, dct: Dict[str, object]) -> object: + # Check if JSON object seems to be a deserializable AAS object (i.e. it has a modelType). Otherwise, the JSON + # object is returned as is, so it's possible to mix AAS objects with other data within a JSON structure. + if 'modelType' not in dct: + return dct + + AAS_CLASS_PARSERS = cls._get_aas_class_parsers() # Get modelType and constructor function if not isinstance(dct['modelType'], str): @@ -799,7 +811,9 @@ def _select_decoder(failsafe: bool, stripped: bool, decoder: Optional[Type[AASFr def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathOrIO, replace_existing: bool = False, ignore_existing: bool = False, failsafe: bool = True, stripped: bool = False, - decoder: Optional[Type[AASFromJsonDecoder]] = None) -> Set[model.Identifier]: + decoder: Optional[Type[AASFromJsonDecoder]] = None, + keys_to_types: Iterable[Tuple[str, Any]] = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES) \ + -> Set[model.Identifier]: """ Read an Asset Administration Shell JSON file according to 'Details of the Asset Administration Shell', chapter 5.5 into a given object store. @@ -817,6 +831,7 @@ def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathO See https://git.rwth-aachen.de/acplt/pyi40aas/-/issues/91 This parameter is ignored if a decoder class is specified. :param decoder: The decoder class used to decode the JSON objects + :param keys_to_types: A dictionary of JSON keys to expected types. This is used to check the type of the objects :raises KeyError: **Non-failsafe**: Encountered a duplicate identifier :raises KeyError: Encountered an identifier that already exists in the given ``object_store`` with both ``replace_existing`` and ``ignore_existing`` set to ``False`` @@ -843,55 +858,55 @@ def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathO with cm as fp: data = json.load(fp, cls=decoder_) - for name, expected_type in (('assetAdministrationShells', model.AssetAdministrationShell), - ('submodels', model.Submodel), - ('conceptDescriptions', model.ConceptDescription)): + for name, expected_type in keys_to_types: try: lst = _get_ts(data, name, list) except (KeyError, TypeError): continue for item in lst: - error_message = "Expected a {} in list '{}', but found {}".format( - expected_type.__name__, name, repr(item)) + error_msg = f"Expected a {expected_type.__name__} in list '{name}', but found {repr(item)}." if isinstance(item, model.Identifiable): if not isinstance(item, expected_type): - if decoder_.failsafe: - logger.warning("{} was in wrong list '{}'; nevertheless, we'll use it".format(item, name)) - else: - raise TypeError(error_message) + if not decoder_.failsafe: + raise TypeError(f"{item} was in the wrong list '{name}'") + logger.warning(f"{item} was in the wrong list '{name}'; nevertheless, we'll use it") + if item.id in ret: - error_message = f"{item} has a duplicate identifier already parsed in the document!" + error_msg = f"{item} has a duplicate identifier already parsed in the document!" if not decoder_.failsafe: - raise KeyError(error_message) - logger.error(error_message + " skipping it...") + raise KeyError(error_msg) + logger.error(f"{error_msg} Skipping it...") continue + existing_element = object_store.get(item.id) if existing_element is not None: if not replace_existing: - error_message = f"object with identifier {item.id} already exists " \ - f"in the object store: {existing_element}!" + error_msg = f"Object with id '{item.id}' already exists in store: {existing_element}!" if not ignore_existing: - raise KeyError(error_message + f" failed to insert {item}!") - logger.info(error_message + f" skipping insertion of {item}...") + raise KeyError(f"{error_msg} Failed to insert {item}!") + logger.info(f"{error_msg} Skipping {item}...") continue object_store.discard(existing_element) + object_store.add(item) ret.add(item.id) elif decoder_.failsafe: - logger.error(error_message) + logger.error(f"{error_msg} Skipping it...") else: - raise TypeError(error_message) + raise TypeError(error_msg) return ret -def read_aas_json_file(file: PathOrIO, **kwargs) -> model.DictObjectStore[model.Identifiable]: +def read_aas_json_file(file: PathOrIO, failsafe: bool = True, **kwargs) -> model.DictObjectStore[model.Identifiable]: """ A wrapper of :meth:`~basyx.aas.adapter.json.json_deserialization.read_aas_json_file_into`, that reads all objects in an empty :class:`~basyx.aas.model.provider.DictObjectStore`. This function supports the same keyword arguments as :meth:`~basyx.aas.adapter.json.json_deserialization.read_aas_json_file_into`. :param file: A filename or file-like object to read the JSON-serialized data from + :param failsafe: If ``True``, the document is parsed in a failsafe way: Missing attributes and elements are logged + instead of causing exceptions. Defect objects are skipped. :param kwargs: Keyword arguments passed to :meth:`read_aas_json_file_into` :raises KeyError: **Non-failsafe**: Encountered a duplicate identifier :raises (~basyx.aas.model.base.AASConstraintViolation, KeyError, ValueError, TypeError): **Non-failsafe**: @@ -901,5 +916,5 @@ def read_aas_json_file(file: PathOrIO, **kwargs) -> model.DictObjectStore[model. :return: A :class:`~basyx.aas.model.provider.DictObjectStore` containing all AAS objects from the JSON file """ object_store: model.DictObjectStore[model.Identifiable] = model.DictObjectStore() - read_aas_json_file_into(object_store, file, **kwargs) + read_aas_json_file_into(object_store, file, failsafe=failsafe, **kwargs) return object_store diff --git a/sdk/basyx/aas/adapter/json/json_serialization.py b/sdk/basyx/aas/adapter/json/json_serialization.py index f7d6626eb..0b0df0164 100644 --- a/sdk/basyx/aas/adapter/json/json_serialization.py +++ b/sdk/basyx/aas/adapter/json/json_serialization.py @@ -30,11 +30,12 @@ import contextlib import inspect import io -from typing import ContextManager, List, Dict, Optional, TextIO, Type, Callable, get_args +from typing import ContextManager, List, Dict, Optional, TextIO, Type, Callable, get_args, Iterable, Tuple import json from basyx.aas import model from .. import _generic +from .._generic import JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES class AASToJsonEncoder(json.JSONEncoder): @@ -57,6 +58,40 @@ class AASToJsonEncoder(json.JSONEncoder): """ stripped = False + @classmethod + def _get_aas_class_serializers(cls) -> Dict[Type, Callable]: + mapping: Dict[Type, Callable] = { + model.AdministrativeInformation: cls._administrative_information_to_json, + model.AnnotatedRelationshipElement: cls._annotated_relationship_element_to_json, + model.AssetAdministrationShell: cls._asset_administration_shell_to_json, + model.AssetInformation: cls._asset_information_to_json, + model.BasicEventElement: cls._basic_event_element_to_json, + model.Blob: cls._blob_to_json, + model.Capability: cls._capability_to_json, + model.ConceptDescription: cls._concept_description_to_json, + model.DataSpecificationIEC61360: cls._data_specification_iec61360_to_json, + model.Entity: cls._entity_to_json, + model.Extension: cls._extension_to_json, + model.File: cls._file_to_json, + model.Key: cls._key_to_json, + model.LangStringSet: cls._lang_string_set_to_json, + model.MultiLanguageProperty: cls._multi_language_property_to_json, + model.Operation: cls._operation_to_json, + model.Property: cls._property_to_json, + model.Qualifier: cls._qualifier_to_json, + model.Range: cls._range_to_json, + model.Reference: cls._reference_to_json, + model.ReferenceElement: cls._reference_element_to_json, + model.RelationshipElement: cls._relationship_element_to_json, + model.Resource: cls._resource_to_json, + model.SpecificAssetId: cls._specific_asset_id_to_json, + model.Submodel: cls._submodel_to_json, + model.SubmodelElementCollection: cls._submodel_element_collection_to_json, + model.SubmodelElementList: cls._submodel_element_list_to_json, + model.ValueReferencePair: cls._value_reference_pair_to_json, + } + return mapping + def default(self, obj: object) -> object: """ The overwritten ``default`` method for :class:`json.JSONEncoder` @@ -64,36 +99,7 @@ def default(self, obj: object) -> object: :param obj: The object to serialize to json :return: The serialized object """ - mapping: Dict[Type, Callable] = { - model.AdministrativeInformation: self._administrative_information_to_json, - model.AnnotatedRelationshipElement: self._annotated_relationship_element_to_json, - model.AssetAdministrationShell: self._asset_administration_shell_to_json, - model.AssetInformation: self._asset_information_to_json, - model.BasicEventElement: self._basic_event_element_to_json, - model.Blob: self._blob_to_json, - model.Capability: self._capability_to_json, - model.ConceptDescription: self._concept_description_to_json, - model.DataSpecificationIEC61360: self._data_specification_iec61360_to_json, - model.Entity: self._entity_to_json, - model.Extension: self._extension_to_json, - model.File: self._file_to_json, - model.Key: self._key_to_json, - model.LangStringSet: self._lang_string_set_to_json, - model.MultiLanguageProperty: self._multi_language_property_to_json, - model.Operation: self._operation_to_json, - model.Property: self._property_to_json, - model.Qualifier: self._qualifier_to_json, - model.Range: self._range_to_json, - model.Reference: self._reference_to_json, - model.ReferenceElement: self._reference_element_to_json, - model.RelationshipElement: self._relationship_element_to_json, - model.Resource: self._resource_to_json, - model.SpecificAssetId: self._specific_asset_id_to_json, - model.Submodel: self._submodel_to_json, - model.SubmodelElementCollection: self._submodel_element_collection_to_json, - model.SubmodelElementList: self._submodel_element_list_to_json, - model.ValueReferencePair: self._value_reference_pair_to_json, - } + mapping = self._get_aas_class_serializers() for typ in mapping: if isinstance(obj, typ): mapping_method = mapping[typ] @@ -130,7 +136,7 @@ def _abstract_classes_to_json(cls, obj: object) -> Dict[str, object]: if obj.description: data['description'] = obj.description try: - ref_type = next(iter(t for t in inspect.getmro(type(obj)) if t in model.KEY_TYPES_CLASSES)) + ref_type = model.resolve_referable_class_in_key_types(obj) except StopIteration as e: raise TypeError("Object of type {} is Referable but does not inherit from a known AAS type" .format(obj.__class__.__name__)) from e @@ -693,26 +699,34 @@ def _select_encoder(stripped: bool, encoder: Optional[Type[AASToJsonEncoder]] = return AASToJsonEncoder if not stripped else StrippedAASToJsonEncoder -def _create_dict(data: model.AbstractObjectStore) -> dict: - # separate different kind of objects - asset_administration_shells: List[model.AssetAdministrationShell] = [] - submodels: List[model.Submodel] = [] - concept_descriptions: List[model.ConceptDescription] = [] +def _create_dict(data: model.AbstractObjectStore, + keys_to_types: Iterable[Tuple[str, Type]] = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES) \ + -> Dict[str, List[model.Identifiable]]: + """ + Categorizes objects from an AbstractObjectStore into a dictionary based on their types. + + This function iterates over the objects in the provided AbstractObjectStore and groups them into lists + based on their types, as defined in the `keys_to_types` mapping. The resulting dictionary contains + keys corresponding to the names in `keys_to_types` and values as lists of objects of the respective types. + + :param data: An AbstractObjectStore containing objects to be categorized. + :param keys_to_types: An iterable of tuples where each tuple contains: + - A string key representing the category name. + - A type to match objects against. + :return: A dictionary where keys are category names and values are lists of objects of the corresponding types. + """ + objects: Dict[str, List[model.Identifiable]] = {} + for obj in data: - if isinstance(obj, model.AssetAdministrationShell): - asset_administration_shells.append(obj) - elif isinstance(obj, model.Submodel): - submodels.append(obj) - elif isinstance(obj, model.ConceptDescription): - concept_descriptions.append(obj) - dict_: Dict[str, List] = {} - if asset_administration_shells: - dict_['assetAdministrationShells'] = asset_administration_shells - if submodels: - dict_['submodels'] = submodels - if concept_descriptions: - dict_['conceptDescriptions'] = concept_descriptions - return dict_ + # Iterate through the mapping of category names to expected types + for name, expected_type in keys_to_types: + # Check if the object matches the expected type + if isinstance(obj, expected_type): + # Add the object to the appropriate category in the dictionary + objects.setdefault(name, []) + objects[name].append(obj) + break # Exit the inner loop once a match is found + return objects def object_store_to_json(data: model.AbstractObjectStore, stripped: bool = False, diff --git a/sdk/basyx/aas/adapter/xml/xml_deserialization.py b/sdk/basyx/aas/adapter/xml/xml_deserialization.py index 4063b8df7..b263820d1 100644 --- a/sdk/basyx/aas/adapter/xml/xml_deserialization.py +++ b/sdk/basyx/aas/adapter/xml/xml_deserialization.py @@ -1502,13 +1502,16 @@ def read_aas_xml_file_into(object_store: model.AbstractObjectStore[model.Identif return ret -def read_aas_xml_file(file: PathOrIO, **kwargs: Any) -> model.DictObjectStore[model.Identifiable]: +def read_aas_xml_file(file: PathOrIO, failsafe: bool = True, **kwargs: Any)\ + -> model.DictObjectStore[model.Identifiable]: """ A wrapper of :meth:`~basyx.aas.adapter.xml.xml_deserialization.read_aas_xml_file_into`, that reads all objects in an empty :class:`~basyx.aas.model.provider.DictObjectStore`. This function supports the same keyword arguments as :meth:`~basyx.aas.adapter.xml.xml_deserialization.read_aas_xml_file_into`. :param file: A filename or file-like object to read the XML-serialized data from + :param failsafe: If ``True``, the document is parsed in a failsafe way: Missing attributes and elements are logged + instead of causing exceptions. Defect objects are skipped. :param kwargs: Keyword arguments passed to :meth:`~basyx.aas.adapter.xml.xml_deserialization.read_aas_xml_file_into` :raises ~lxml.etree.XMLSyntaxError: **Non-failsafe**: If the given file(-handle) has invalid XML :raises KeyError: **Non-failsafe**: If a required namespace has not been declared on the XML document @@ -1519,5 +1522,5 @@ def read_aas_xml_file(file: PathOrIO, **kwargs: Any) -> model.DictObjectStore[mo :return: A :class:`~basyx.aas.model.provider.DictObjectStore` containing all AAS objects from the XML file """ object_store: model.DictObjectStore[model.Identifiable] = model.DictObjectStore() - read_aas_xml_file_into(object_store, file, **kwargs) + read_aas_xml_file_into(object_store, file, failsafe=failsafe, **kwargs) return object_store diff --git a/sdk/basyx/aas/adapter/xml/xml_serialization.py b/sdk/basyx/aas/adapter/xml/xml_serialization.py index 7e6585cea..2dc578ca0 100644 --- a/sdk/basyx/aas/adapter/xml/xml_serialization.py +++ b/sdk/basyx/aas/adapter/xml/xml_serialization.py @@ -875,83 +875,49 @@ def object_to_xml_element(obj: object) -> etree._Element: :param obj: The object to serialize """ - serialization_func: Callable[..., etree._Element] - if isinstance(obj, model.Key): - serialization_func = key_to_xml - elif isinstance(obj, model.Reference): - serialization_func = reference_to_xml + return key_to_xml(obj) elif isinstance(obj, model.Reference): - serialization_func = reference_to_xml + return reference_to_xml(obj) elif isinstance(obj, model.AdministrativeInformation): - serialization_func = administrative_information_to_xml + return administrative_information_to_xml(obj) elif isinstance(obj, model.Qualifier): - serialization_func = qualifier_to_xml - elif isinstance(obj, model.AnnotatedRelationshipElement): - serialization_func = annotated_relationship_element_to_xml - elif isinstance(obj, model.BasicEventElement): - serialization_func = basic_event_element_to_xml - elif isinstance(obj, model.Blob): - serialization_func = blob_to_xml - elif isinstance(obj, model.Capability): - serialization_func = capability_to_xml - elif isinstance(obj, model.Entity): - serialization_func = entity_to_xml + return qualifier_to_xml(obj) elif isinstance(obj, model.Extension): - serialization_func = extension_to_xml - elif isinstance(obj, model.File): - serialization_func = file_to_xml + return extension_to_xml(obj) elif isinstance(obj, model.Resource): - serialization_func = resource_to_xml - elif isinstance(obj, model.MultiLanguageProperty): - serialization_func = multi_language_property_to_xml - elif isinstance(obj, model.Operation): - serialization_func = operation_to_xml - elif isinstance(obj, model.Property): - serialization_func = property_to_xml - elif isinstance(obj, model.Range): - serialization_func = range_to_xml - elif isinstance(obj, model.ReferenceElement): - serialization_func = reference_element_to_xml - elif isinstance(obj, model.RelationshipElement): - serialization_func = relationship_element_to_xml - elif isinstance(obj, model.SubmodelElementCollection): - serialization_func = submodel_element_collection_to_xml - elif isinstance(obj, model.SubmodelElementList): - serialization_func = submodel_element_list_to_xml + return resource_to_xml(obj) elif isinstance(obj, model.AssetAdministrationShell): - serialization_func = asset_administration_shell_to_xml + return asset_administration_shell_to_xml(obj) elif isinstance(obj, model.AssetInformation): - serialization_func = asset_information_to_xml + return asset_information_to_xml(obj) elif isinstance(obj, model.SpecificAssetId): - serialization_func = specific_asset_id_to_xml + return specific_asset_id_to_xml(obj) elif isinstance(obj, model.Submodel): - serialization_func = submodel_to_xml + return submodel_to_xml(obj) elif isinstance(obj, model.ValueReferencePair): - serialization_func = value_reference_pair_to_xml + return value_reference_pair_to_xml(obj) elif isinstance(obj, model.ConceptDescription): - serialization_func = concept_description_to_xml + return concept_description_to_xml(obj) elif isinstance(obj, model.LangStringSet): - serialization_func = lang_string_set_to_xml + # FIXME: `lang_string_set_to_xml` expects `tag` parameter, `tag` doesn't have default value + # Issue: https://github.com/eclipse-basyx/basyx-python-sdk/issues/397 + return lang_string_set_to_xml(obj) # type: ignore[call-arg] elif isinstance(obj, model.EmbeddedDataSpecification): - serialization_func = embedded_data_specification_to_xml + return embedded_data_specification_to_xml(obj) elif isinstance(obj, model.DataSpecificationIEC61360): - serialization_func = data_specification_iec61360_to_xml + return data_specification_iec61360_to_xml(obj) # generic serialization using the functions for abstract classes - elif isinstance(obj, model.DataElement): - serialization_func = data_element_to_xml elif isinstance(obj, model.SubmodelElement): - serialization_func = submodel_to_xml + return submodel_element_to_xml(obj) elif isinstance(obj, model.DataSpecificationContent): - serialization_func = data_specification_content_to_xml + return data_specification_content_to_xml(obj) # type aliases elif isinstance(obj, model.ValueList): - serialization_func = value_list_to_xml + return value_list_to_xml(obj) else: raise ValueError(f"{obj!r} cannot be serialized!") - return serialization_func(obj) - def write_aas_xml_element(file: _generic.PathOrBinaryIO, obj: object, **kwargs) -> None: """ diff --git a/sdk/basyx/aas/backend/__init__.py b/sdk/basyx/aas/backend/__init__.py index f58060995..c1da90b1e 100644 --- a/sdk/basyx/aas/backend/__init__.py +++ b/sdk/basyx/aas/backend/__init__.py @@ -1,7 +1,3 @@ """ -This module implements a standardized way of integrating data from existing systems into AAS objects. To achieve this, -the abstract :class:`~basyx.aas.backend.backends.Backend` class implements the class methods -:meth:`~basyx.aas.backend.backends.Backend.update_object` and :meth:`~basyx.aas.backend.backends.Backend.commit_object`, -which every implementation of a backend needs to overwrite. For a tutorial on how to implement a backend, see -:ref:`this tutorial ` +This module implements a standardized way of persisting AAS objects using various backends. """ diff --git a/sdk/basyx/aas/backend/backends.py b/sdk/basyx/aas/backend/backends.py deleted file mode 100644 index 31be12628..000000000 --- a/sdk/basyx/aas/backend/backends.py +++ /dev/null @@ -1,175 +0,0 @@ -# Copyright (c) 2025 the Eclipse BaSyx Authors -# -# This program and the accompanying materials are made available under the terms of the MIT License, available in -# the LICENSE file of this project. -# -# SPDX-License-Identifier: MIT -""" -This module provides a registry and abstract base class for Backends. A :class:`~.Backend` is a class that allows to -synchronize Referable AAS objects or their included data with external data sources such as a remote API or a local -source for real time data. Each backend provides access to one kind of data source. - -The data source of an individual object is specified as a URI in its ``source`` attribute. The schema part of that URI -defines the type of data source and, in consequence, the backend class to use for synchronizing this object. - -Custom backends for additional types of data sources can be implemented by subclassing :class:`Backend` and -implementing the :meth:`~.Backend.commit_object` and :meth:`~.Backend.update_object` class methods. These are used -internally by the objects' :meth:`~basyx.aas.model.base.Referable.update` and -:meth:`~basyx.aas.model.base.Referable.commit` methods when the backend is applicable for the relevant source URI. -Then, the Backend class needs to be registered to handle update/commit requests for a specific URI schema, using -:meth:`~basyx.aas.backend.backends.register_backend`. -""" -import abc -import re -from typing import List, Dict, Type, TYPE_CHECKING - -if TYPE_CHECKING: - from ..model import Referable - - -class Backend(metaclass=abc.ABCMeta): - """ - Abstract base class for all Backend classes. - - Each Backend class is typically capable of synchronizing (updating/committing) objects with a type of external data - source, identified by one or more source URI schemas. Custom backends for custom source URI schemas should inherit - from this class and be registered via :meth:`~basyx.aas.backend.backends.register_backend`. to be used by Referable - object's :meth:`~basyx.aas.model.base.Referable.update` and :meth:`~basyx.aas.model.base.Referable.commit` methods - when required. - """ - - @classmethod - @abc.abstractmethod - def commit_object(cls, - committed_object: "Referable", - store_object: "Referable", - relative_path: List[str]) -> None: - """ - Function (class method) to be called when an object shall be committed (local changes pushed to the external - data source) via this backend implementation. - - It is automatically called by the :meth:`~basyx.aas.model.base.Referable.commit` implementation, when the source - URI of the object or the source URI one of its ancestors in the AAS object containment hierarchy include a - URI schema for which this - backend has been registered. Both of the objects are passed to this function: the one which shall be committed - (``committed_object``) and its ancestor with the relevant source URI (``store_object``). They may be the same, - the committed object has a source with the relevant schema itself. Additionally, the ``relative_path`` from the - ``store_object`` down to the ``committed_object`` is provided. - - The backend MUST ensure to commit all local changes of at least the ``committed_object`` and all objects - contained within it (if any) to the data source. It MAY additionally commit changes to other objects (i.e. the - ``store_object`` and any additional contained object). - - For this purpose a concrete implementation of this method would typically use the ``source`` attribute of the - ``store_object`` to identify the data source. If the data source supports fine-grained access to contained - objects, the ``relative_path`` may become handy to compose the committed object's address within the data - source's interface. - - :param committed_object: The object which shall be synced to the external data source - :param store_object: The object which originates from the relevant data source (i.e. has the relevant source - attribute). It may be the ``committed_object`` or one of its ancestors in the AAS object hierarchy. - :param relative_path: List of idShort strings to resolve the ``committed_object`` starting at the - ``store_object``, such that `obj = store_object; for i in relative_path: obj = obj.get_referable(i)` - resolves to the ``committed_object``. In case that ``store_object is committed_object``, it is an empty - list. - :raises BackendNotAvailableException: when the external data source cannot be reached - """ - pass - - @classmethod - @abc.abstractmethod - def update_object(cls, - updated_object: "Referable", - store_object: "Referable", - relative_path: List[str]) -> None: - """ - Function (class method) to be called when an object shall be updated (local object updated with changes from the - external data source) via this backend implementation. - - It is automatically called by the :meth:`~basyx.aas.model.base.Referable.update` implementation, - when the source URI of the object or the source URI one of its ancestors in the AAS object containment hierarchy - include a URI schema for which this backend has been registered. Both of the objects are passed - to this function: the one which shall be update (``updated_object``) and its ancestor with - the relevant source URI (``store_object``). They may be the same, the updated object has a source with - the relevant schema itself. Additionally, the ``relative_path`` from the ``store_object`` down to - the ``updated_object`` is provided. - - The backend MUST ensure to update at least the ``updated_object`` and all objects contained within it (if any) - with any changes from the data source. It MAY additionally update other objects (i.e. the ``store_object`` and - any additional contained object). - - For this purpose a concrete implementation of this method would typically use the ``source`` attribute of the - ``store_object`` to identify the data source. If the data source supports fine-grained access to contained - objects, the ``relative_path`` may become handy to compose the updated object's address within the data source's - interface. - - :param updated_object: The object which shall be synced from the external data source - :param store_object: The object which originates from the relevant data source (i.e. has the relevant source - attribute). It may be the ``committed_object`` or one of its ancestors in the AAS object hierarchy. - :param relative_path: List of idShort strings to resolve the ``updated_object`` starting at the - ``store_object``, such that `obj = store_object; for i in relative_path: obj = obj.get_referable(i)` - resolves to the ``updated_object``. In case that ``store_object is updated_object``, it is an empty list. - :raises BackendNotAvailableException: when the external data source cannot be reached - """ - pass - - -# Global registry for backends by URI scheme -# TODO allow multiple backends per scheme with priority -_backends_map: Dict[str, Type[Backend]] = {} - - -def register_backend(scheme: str, backend_class: Type[Backend]) -> None: - """ - Register a Backend implementation to handle update/commit operations for a specific type of external data sources, - identified by a source URI schema. - - This method may be called multiple times for a single Backend class, to register that class as a backend - implementation for different source URI schemas (e.g. use the same backend for 'http://' and 'https://' sources). - - :param scheme: The URI schema of source URIs to be handled with Backend class, without trailing colon and slashes. - E.g. 'http', 'https', 'couchdb', etc. - :param backend_class: The Backend implementation class. Should inherit from :class:`Backend`. - """ - # TODO handle multiple backends per scheme - _backends_map[scheme] = backend_class - - -RE_URI_SCHEME = re.compile(r"^([a-zA-Z][a-zA-Z+\-\.]*):") - - -def get_backend(url: str) -> Type[Backend]: - """ - Internal function to retrieve the Backend implementation for the external data source identified by the given - ``url`` via the url's schema. - - :param url: External data source URI to find an appropriate Backend implementation for - :return: A Backend class, capable of updating/committing from/to the external data source - :raises UnknownBackendException: When no backend is available for that url - """ - # TODO handle multiple backends per scheme - scheme_match = RE_URI_SCHEME.match(url) - if not scheme_match: - raise ValueError("{} is not a valid URL with URI scheme.".format(url)) - scheme = scheme_match[1] - try: - return _backends_map[scheme] - except KeyError as e: - raise UnknownBackendException("Could not find Backend for source '{}'".format(url)) from e - - -# ################################################################################################# -# Custom Exception classes for reporting errors during interaction with Backends -class BackendError(Exception): - """Base class of all exceptions raised by the backends module""" - pass - - -class UnknownBackendException(BackendError): - """Raised, if the backend is not found in the registry""" - pass - - -class BackendNotAvailableException(BackendError): - """Raised, if the backend does exist in the registry, but is not available for some reason""" - pass diff --git a/sdk/basyx/aas/backend/couchdb.py b/sdk/basyx/aas/backend/couchdb.py index 4b6f43611..6f2b3a0fc 100644 --- a/sdk/basyx/aas/backend/couchdb.py +++ b/sdk/basyx/aas/backend/couchdb.py @@ -8,8 +8,7 @@ This module adds the functionality of storing and retrieving :class:`~basyx.aas.model.base.Identifiable` objects in a CouchDB. -The :class:`~.CouchDBBackend` takes care of updating and committing objects from and to the CouchDB, while the -:class:`~CouchDBObjectStore` handles adding, deleting and otherwise managing the AAS objects in a specific CouchDB. +The :class:`~CouchDBObjectStore` handles adding, deleting and otherwise managing the AAS objects in a specific CouchDB. """ import threading import weakref @@ -21,7 +20,6 @@ import json import urllib3 # type: ignore -from . import backends from ..adapter.json import json_serialization, json_deserialization from basyx.aas import model @@ -30,146 +28,6 @@ _http_pool_manager = urllib3.PoolManager() -class CouchDBBackend(backends.Backend): - """ - This Backend stores each Identifiable object as a single JSON document in the configured CouchDB database. Each - document's id is build from the object's identifier. The document's contents comprise a single property ``data``, - containing the JSON serialization of the BaSyx Python SDK object. The :ref:`adapter.json ` - package is used for serialization and deserialization of objects. - """ - @classmethod - def update_object(cls, - updated_object: model.Referable, - store_object: model.Referable, - relative_path: List[str]) -> None: - - if not isinstance(store_object, model.Identifiable): - raise CouchDBSourceError("The given store_object is not Identifiable, therefore cannot be found " - "in the CouchDB") - url = CouchDBBackend._parse_source(store_object.source) - - try: - data = CouchDBBackend.do_request(url) - except CouchDBServerError as e: - if e.code == 404: - raise KeyError("No Identifiable found in CouchDB at {}".format(url)) from e - raise - - updated_store_object = data['data'] - set_couchdb_revision(url, data["_rev"]) - store_object.update_from(updated_store_object) - - @classmethod - def commit_object(cls, - committed_object: model.Referable, - store_object: model.Referable, - relative_path: List[str]) -> None: - if not isinstance(store_object, model.Identifiable): - raise CouchDBSourceError("The given store_object is not Identifiable, therefore cannot be found " - "in the CouchDB") - url = CouchDBBackend._parse_source(store_object.source) - # We need to get the revision of the object, if it already exists, otherwise we cannot write to the Couchdb - if get_couchdb_revision(url) is None: - raise CouchDBConflictError("No revision found for the given object. Try calling `update` on it.") - - data = json.dumps({'data': store_object, "_rev": get_couchdb_revision(url)}, - cls=json_serialization.AASToJsonEncoder) - - try: - response = CouchDBBackend.do_request( - url, method='PUT', additional_headers={'Content-type': 'application/json'}, body=data.encode('utf-8')) - set_couchdb_revision(url, response["rev"]) - except CouchDBServerError as e: - if e.code == 409: - raise CouchDBConflictError("Could not commit changes to id {} due to a concurrent modification in the " - "database.".format(store_object.id)) from e - elif e.code == 404: - raise KeyError("Object with id {} was not found in the CouchDB at {}" - .format(store_object.id, url)) from e - raise - - @classmethod - def _parse_source(cls, source: str) -> str: - """ - Parses the source parameter of a model.Referable object - - :param source: Source string of the model.Referable object - :return: URL to the document - :raises CouchDBBackendSourceError, if the source has the wrong format - """ - if source.startswith("couchdbs://"): - url = source.replace("couchdbs://", "https://", 1) - elif source.startswith("couchdb://"): - url = source.replace("couchdb://", "http://", 1) - else: - raise CouchDBSourceError("Source has wrong format. " - "Expected to start with {couchdb://, couchdbs://}, got {" + source + "}") - return url - - @classmethod - def do_request(cls, url: str, method: str = "GET", additional_headers: Optional[Dict[str, str]] = None, - body: Optional[bytes] = None) -> MutableMapping[str, Any]: - """ - Perform an HTTP(S) request to the CouchDBServer, parse the result and handle errors - - :param url: The HTTP or HTTPS URL to request - :param method: The HTTP method for the request - :param additional_headers: Additional headers to insert into the request. The default headers include - 'connection: keep-alive', 'accept-encoding: ...', 'authorization: basic ...', 'Accept: ...'. - :param body: Request body for POST, PUT, and PATCH requests - :return: The parsed JSON data if the request ``method`` is other than 'HEAD' or the response headers for 'HEAD' - requests - """ - url_parts = urllib.parse.urlparse(url) - host = url_parts.scheme + url_parts.netloc - auth = _credentials_store.get(host) - headers = urllib3.make_headers(keep_alive=True, accept_encoding=True, - basic_auth="{}:{}".format(*auth) if auth else None) - headers['Accept'] = 'application/json' - headers.update(additional_headers if additional_headers is not None else {}) - try: - response = _http_pool_manager.request(method, url, headers=headers, body=body) - except (urllib3.exceptions.TimeoutError, urllib3.exceptions.SSLError, urllib3.exceptions.ProtocolError) as e: - raise CouchDBConnectionError("Error while connecting to the CouchDB server: {}".format(e)) from e - except urllib3.exceptions.HTTPError as e: - raise CouchDBResponseError("Error while connecting to the CouchDB server: {}".format(e)) from e - - if not (200 <= response.status < 300): - logger.debug("Request %s %s finished with HTTP status code %s.", - method, url, response.status) - if response.headers.get('Content-type', None) != 'application/json': - raise CouchDBResponseError("Unexpected Content-type header {} of response from CouchDB server" - .format(response.headers.get('Content-type', None))) - - if method == 'HEAD': - raise CouchDBServerError(response.status, "", "", "HTTP {}".format(response.status)) - - try: - data = json.loads(response.data.decode('utf-8')) - except json.JSONDecodeError: - raise CouchDBResponseError("Could not parse error message of HTTP {}" - .format(response.status)) - raise CouchDBServerError(response.status, data['error'], data['reason'], - "HTTP {}: {} (reason: {})".format(response.status, data['error'], data['reason'])) - - # Check response & parse data - logger.debug("Request %s %s finished successfully.", method, url) - if method == 'HEAD': - return response.headers - - if response.headers.get('Content-type') != 'application/json': - raise CouchDBResponseError("Unexpected Content-type header") - try: - data = json.loads(response.data.decode('utf-8'), cls=json_deserialization.AASFromJsonDecoder) - except json.JSONDecodeError as e: - raise CouchDBResponseError("Could not parse CouchDB server response as JSON data.") from e - return data - - -backends.register_backend("couchdb", CouchDBBackend) -backends.register_backend("couchdbs", CouchDBBackend) - - # Global registry for credentials for CouchDB Servers _credentials_store: Dict[str, Tuple[str, str]] = {} # Note: The HTTPPasswordMgr is not thread safe during writing, should be thread safe for reading only. @@ -182,8 +40,7 @@ def register_credentials(url: str, username: str, password: str): .. Warning:: Do not use this function, while other threads may be accessing the credentials via the - :class:`~.CouchDBObjectStore` or update or commit functions of :class:`~.basyx.aas.model.base.Referable` - objects! + :class:`~.CouchDBObjectStore`! :param url: Toplevel URL :param username: Username to that CouchDB instance @@ -268,7 +125,7 @@ def check_database(self, create=False): """ try: - CouchDBBackend.do_request("{}/{}".format(self.url, self.database_name), 'HEAD') + self._do_request("{}/{}".format(self.url, self.database_name), 'HEAD') except CouchDBServerError as e: # If an HTTPError is raised, re-raise it, unless it is a 404 error and we are requested to create the # database @@ -280,7 +137,7 @@ def check_database(self, create=False): # Create database logger.info("Creating CouchDB database %s/%s ...", self.url, self.database_name) - CouchDBBackend.do_request("{}/{}".format(self.url, self.database_name), 'PUT') + self._do_request("{}/{}".format(self.url, self.database_name), 'PUT') def get_identifiable_by_couchdb_id(self, couchdb_id: str) -> model.Identifiable: """ @@ -293,7 +150,7 @@ def get_identifiable_by_couchdb_id(self, couchdb_id: str) -> model.Identifiable: # Create and issue HTTP request (raises HTTPError on status != 200) try: - data = CouchDBBackend.do_request( + data = self._do_request( "{}/{}/{}".format(self.url, self.database_name, urllib.parse.quote(couchdb_id, safe=''))) except CouchDBServerError as e: if e.code == 404: @@ -305,7 +162,6 @@ def get_identifiable_by_couchdb_id(self, couchdb_id: str) -> model.Identifiable: if not isinstance(obj, model.Identifiable): raise CouchDBResponseError("The CouchDB document with id {} does not contain an identifiable AAS object." .format(couchdb_id)) - self.generate_source(obj) # Generate the source parameter of this object set_couchdb_revision("{}/{}/{}".format(self.url, self.database_name, urllib.parse.quote(couchdb_id, safe='')), data["_rev"]) @@ -314,12 +170,8 @@ def get_identifiable_by_couchdb_id(self, couchdb_id: str) -> model.Identifiable: with self._object_cache_lock: if obj.id in self._object_cache: old_obj = self._object_cache[obj.id] - # If the source does not match the correct source for this CouchDB backend, the object seems to belong - # to another backend now, so we return a fresh copy - if old_obj.source == obj.source: - old_obj.update_from(obj) - return old_obj - + old_obj.update_from(obj) + return old_obj self._object_cache[obj.id] = obj return obj @@ -351,7 +203,7 @@ def add(self, x: model.Identifiable) -> None: # Create and issue HTTP request (raises HTTPError on status != 200) try: - response = CouchDBBackend.do_request( + response = self._do_request( "{}/{}/{}".format(self.url, self.database_name, self._transform_id(x.id)), 'PUT', {'Content-type': 'application/json'}, @@ -364,7 +216,6 @@ def add(self, x: model.Identifiable) -> None: raise with self._object_cache_lock: self._object_cache[x.id] = x - self.generate_source(x) # Set the source of the object def discard(self, x: model.Identifiable, safe_delete=False) -> None: """ @@ -394,7 +245,7 @@ def discard(self, x: model.Identifiable, safe_delete=False) -> None: # ETag response header try: logger.debug("fetching the current object revision for deletion ...") - headers = CouchDBBackend.do_request( + headers = self._do_request( "{}/{}/{}".format(self.url, self.database_name, self._transform_id(x.id)), 'HEAD') rev = headers['ETag'][1:-1] except CouchDBServerError as e: @@ -403,7 +254,7 @@ def discard(self, x: model.Identifiable, safe_delete=False) -> None: from e raise try: - CouchDBBackend.do_request( + self._do_request( "{}/{}/{}?rev={}".format(self.url, self.database_name, self._transform_id(x.id), rev), 'DELETE') except CouchDBServerError as e: @@ -419,7 +270,65 @@ def discard(self, x: model.Identifiable, safe_delete=False) -> None: self._transform_id(x.id))) with self._object_cache_lock: del self._object_cache[x.id] - x.source = "" + + @classmethod + def _do_request(cls, url: str, method: str = "GET", additional_headers: Dict[str, str] = {}, + body: Optional[bytes] = None) -> MutableMapping[str, Any]: + """ + Perform an HTTP(S) request to the CouchDBServer, parse the result and handle errors + + :param url: The HTTP or HTTPS URL to request + :param method: The HTTP method for the request + :param additional_headers: Additional headers to insert into the request. The default headers include + 'connection: keep-alive', 'accept-encoding: ...', 'authorization: basic ...', 'Accept: ...'. + :param body: Request body for POST, PUT, and PATCH requests + :return: The parsed JSON data if the request ``method`` is other than 'HEAD' or the response headers for 'HEAD' + requests + """ + url_parts = urllib.parse.urlparse(url) + host = url_parts.scheme + url_parts.netloc + auth = _credentials_store.get(host) + headers = urllib3.make_headers(keep_alive=True, accept_encoding=True, + basic_auth="{}:{}".format(*auth) if auth else None) + headers['Accept'] = 'application/json' + headers.update(additional_headers) + try: + response = _http_pool_manager.request(method, url, headers=headers, body=body) + except (urllib3.exceptions.TimeoutError, urllib3.exceptions.SSLError, urllib3.exceptions.ProtocolError) as e: + raise CouchDBConnectionError("Error while connecting to the CouchDB server: {}".format(e)) from e + except urllib3.exceptions.HTTPError as e: + raise CouchDBResponseError("Error while connecting to the CouchDB server: {}".format(e)) from e + + if not (200 <= response.status < 300): + logger.debug("Request %s %s finished with HTTP status code %s.", + method, url, response.status) + if response.headers.get('Content-type', None) != 'application/json': + raise CouchDBResponseError("Unexpected Content-type header {} of response from CouchDB server" + .format(response.headers.get('Content-type', None))) + + if method == 'HEAD': + raise CouchDBServerError(response.status, "", "", "HTTP {}".format(response.status)) + + try: + data = json.loads(response.data.decode('utf-8')) + except json.JSONDecodeError: + raise CouchDBResponseError("Could not parse error message of HTTP {}" + .format(response.status)) + raise CouchDBServerError(response.status, data['error'], data['reason'], + "HTTP {}: {} (reason: {})".format(response.status, data['error'], data['reason'])) + + # Check response & parse data + logger.debug("Request %s %s finished successfully.", method, url) + if method == 'HEAD': + return response.headers + + if response.headers.get('Content-type') != 'application/json': + raise CouchDBResponseError("Unexpected Content-type header") + try: + data = json.loads(response.data.decode('utf-8'), cls=json_deserialization.AASFromJsonDecoder) + except json.JSONDecodeError as e: + raise CouchDBResponseError("Could not parse CouchDB server response as JSON data.") from e + return data def __contains__(self, x: object) -> bool: """ @@ -441,7 +350,7 @@ def __contains__(self, x: object) -> bool: logger.debug("Checking existence of object with id %s in database ...", repr(x)) try: - CouchDBBackend.do_request( + self._do_request( "{}/{}/{}".format(self.url, self.database_name, self._transform_id(identifier)), 'HEAD') except CouchDBServerError as e: if e.code == 404: @@ -458,7 +367,7 @@ def __len__(self) -> int: (see ``_do_request()`` for details) """ logger.debug("Fetching number of documents from database ...") - data = CouchDBBackend.do_request("{}/{}".format(self.url, self.database_name)) + data = self._do_request("{}/{}".format(self.url, self.database_name)) return data['doc_count'] def __iter__(self) -> Iterator[model.Identifiable]: @@ -483,7 +392,7 @@ def __next__(self): # Fetch a list of all ids and construct Iterator object logger.debug("Creating iterator over objects in database ...") - data = CouchDBBackend.do_request("{}/{}/_all_docs".format(self.url, self.database_name)) + data = self._do_request("{}/{}/_all_docs".format(self.url, self.database_name)) return CouchDBIdentifiableIterator(self, (row['id'] for row in data['rows'])) @staticmethod @@ -497,17 +406,6 @@ def _transform_id(identifier: model.Identifier, url_quote=True) -> str: identifier = urllib.parse.quote(identifier, safe='') return identifier - def generate_source(self, identifiable: model.Identifiable): - """ - Generates the source string for an :class:`~basyx.aas.model.base.Identifiable` object that is backed - by the Couchdb - - :param identifiable: Identifiable object - """ - source: str = self.url.replace("https://", "couchdbs://").replace("http://", "couchdb://") - source += "/" + self.database_name + "/" + self._transform_id(identifiable.id) - identifiable.source = source - # ################################################################################################# # Custom Exception classes for reporting errors during interaction with the CouchDB server @@ -516,11 +414,6 @@ class CouchDBError(Exception): pass -class CouchDBSourceError(CouchDBError): - """Exception raised when the source has the wrong format""" - pass - - class CouchDBConnectionError(CouchDBError): """Exception raised when the CouchDB server could not be reached""" pass diff --git a/sdk/basyx/aas/backend/local_file.py b/sdk/basyx/aas/backend/local_file.py index ec0757375..39ff91415 100644 --- a/sdk/basyx/aas/backend/local_file.py +++ b/sdk/basyx/aas/backend/local_file.py @@ -8,8 +8,8 @@ This module adds the functionality of storing and retrieving :class:`~basyx.aas.model.base.Identifiable` objects in local files. -The :class:`~.LocalFileBackend` takes care of updating and committing objects from and to the files, while the -:class:`~LocalFileObjectStore` handles adding, deleting and otherwise managing the AAS objects in a specific Directory. +The :class:`~LocalFileObjectStore` handles adding, deleting and otherwise managing +the AAS objects in a specific Directory. """ from typing import List, Iterator, Iterable, Union import logging @@ -19,7 +19,6 @@ import threading import weakref -from . import backends from ..adapter.json import json_serialization, json_deserialization from basyx.aas import model @@ -27,45 +26,6 @@ logger = logging.getLogger(__name__) -class LocalFileBackend(backends.Backend): - """ - This Backend stores each Identifiable object as a single JSON document as a local file in a directory. - Each document's id is build from the object's identifier using a SHA256 sum of its identifiable; the document's - contents comprise a single property ``data``, containing the JSON serialization of the BaSyx Python SDK object. The - :ref:`adapter.json ` package is used for serialization and deserialization of objects. - """ - - @classmethod - def update_object(cls, - updated_object: model.Referable, - store_object: model.Referable, - relative_path: List[str]) -> None: - - if not isinstance(store_object, model.Identifiable): - raise FileBackendSourceError("The given store_object is not Identifiable, therefore cannot be found " - "in the FileBackend") - file_name: str = store_object.source.replace("file://localhost/", "") - with open(file_name, "r") as file: - data = json.load(file, cls=json_deserialization.AASFromJsonDecoder) - updated_store_object = data["data"] - store_object.update_from(updated_store_object) - - @classmethod - def commit_object(cls, - committed_object: model.Referable, - store_object: model.Referable, - relative_path: List[str]) -> None: - if not isinstance(store_object, model.Identifiable): - raise FileBackendSourceError("The given store_object is not Identifiable, therefore cannot be found " - "in the FileBackend") - file_name: str = store_object.source.replace("file://localhost/", "") - with open(file_name, "w") as file: - json.dump({'data': store_object}, file, cls=json_serialization.AASToJsonEncoder, indent=4) - - -backends.register_backend("file", LocalFileBackend) - - class LocalFileObjectStore(model.AbstractObjectStore): """ An ObjectStore implementation for :class:`~basyx.aas.model.base.Identifiable` BaSyx Python SDK objects backed @@ -112,7 +72,6 @@ def get_identifiable_by_hash(self, hash_: str) -> model.Identifiable: with open("{}/{}.json".format(self.directory_path, hash_), "r") as file: data = json.load(file, cls=json_deserialization.AASFromJsonDecoder) obj = data["data"] - self.generate_source(obj) except FileNotFoundError as e: raise KeyError("No Identifiable with hash {} found in local file database".format(hash_)) from e # If we still have a local replication of that object (since it is referenced from anywhere else), update that @@ -120,11 +79,8 @@ def get_identifiable_by_hash(self, hash_: str) -> model.Identifiable: with self._object_cache_lock: if obj.id in self._object_cache: old_obj = self._object_cache[obj.id] - # If the source does not match the correct source for this CouchDB backend, the object seems to belong - # to another backend now, so we return a fresh copy - if old_obj.source == obj.source: - old_obj.update_from(obj) - return old_obj + old_obj.update_from(obj) + return old_obj self._object_cache[obj.id] = obj return obj @@ -152,7 +108,6 @@ def add(self, x: model.Identifiable) -> None: json.dump({"data": x}, file, cls=json_serialization.AASToJsonEncoder, indent=4) with self._object_cache_lock: self._object_cache[x.id] = x - self.generate_source(x) # Set the source of the object def discard(self, x: model.Identifiable) -> None: """ @@ -168,7 +123,6 @@ def discard(self, x: model.Identifiable) -> None: raise KeyError("No AAS object with id {} exists in local file database".format(x.id)) from e with self._object_cache_lock: del self._object_cache[x.id] - x.source = "" def __contains__(self, x: object) -> bool: """ @@ -214,23 +168,3 @@ def _transform_id(identifier: model.Identifier) -> str: Helper method to represent an ASS Identifier as a string to be used as Local file document id """ return hashlib.sha256(identifier.encode("utf-8")).hexdigest() - - def generate_source(self, identifiable: model.Identifiable) -> str: - """ - Generates the source string for an :class:`~basyx.aas.model.base.Identifiable` object that is backed by the File - - :param identifiable: Identifiable object - """ - source: str = "file://localhost/{}/{}.json".format( - self.directory_path, - self._transform_id(identifiable.id) - ) - identifiable.source = source - return source - - -class FileBackendSourceError(Exception): - """ - Raised, if the given object's source is not resolvable as a local file - """ - pass diff --git a/sdk/basyx/aas/examples/tutorial_backend_couchdb.py b/sdk/basyx/aas/examples/tutorial_backend_couchdb.py index 5b476a80a..dc86bac85 100755 --- a/sdk/basyx/aas/examples/tutorial_backend_couchdb.py +++ b/sdk/basyx/aas/examples/tutorial_backend_couchdb.py @@ -4,9 +4,6 @@ """ Tutorial for storing Asset Administration Shells, Submodels and Assets in a CouchDB database server, using the CouchDBObjectStore and CouchDB Backend. - -This tutorial also shows the usage of the commit()/update() mechanism for synchronizing objects with an external data -source. """ from configparser import ConfigParser @@ -33,7 +30,6 @@ # Step-by-Step Guide: # step 1: connecting to a CouchDB server # step 2: storing objects in CouchDBObjectStore -# step 3: updating objects from the CouchDB and committing changes ########################################## @@ -58,8 +54,7 @@ # Provide the login credentials to the CouchDB backend. -# These credentials are used whenever communication with this CouchDB server is required either via the -# CouchDBObjectStore or via the update()/commit() backend. +# These credentials are used whenever communication with this CouchDB server is required via the CouchDBObjectStore. basyx.aas.backend.couchdb.register_credentials(couchdb_url, couchdb_user, couchdb_password) # Now, we create a CouchDBObjectStore as an interface for managing the objects in the CouchDB server. @@ -75,37 +70,11 @@ example_submodel2 = basyx.aas.examples.data.example_aas.create_example_bill_of_material_submodel() # The CouchDBObjectStore behaves just like other ObjectStore implementations (see `tutorial_storage.py`). The objects -# are transferred to the CouchDB immediately. Additionally, the `source` attribute is set automatically, so update() and -# commit() will work automatically (see below). +# are transferred to the CouchDB immediately. object_store.add(example_submodel1) object_store.add(example_submodel2) - -#################################################################### -# Step 3: Updating Objects from the CouchDB and Committing Changes # -#################################################################### - -# Since the CouchDBObjectStore has set the `source` attribute of our Submodel objects, we can now use update() and -# commit() to synchronize changes to these objects with the database. The `source` indicates (via its URI scheme) that -# the CouchDB backend is used for the synchronization and references the correct CouchDB server url and database. For -# this to work, we must make sure to `import aas.backend.couchdb` at least once in this Python application, so the -# CouchDB backend is loaded. - -# Fetch recent updates from the server -example_submodel1.update() - -# Make some changes to a Property within the submodel -prop = example_submodel1.get_referable('ManufacturerName') -assert isinstance(prop, basyx.aas.model.Property) - -prop.value = "RWTH Aachen" - -# Commit (upload) these changes to the CouchDB server -# We can simply call commit() on the Property object. It will check the `source` attribute of the object itself as well -# as the source attribute of all ancestors in the object hierarchy (including the Submodel) and commit the changes to -# all of these external data sources. -prop.commit() - +# For more information on how to use `ObjectStore`s in general, please refer to `tutorial_storage.py`. ############ # Clean up # diff --git a/sdk/basyx/aas/examples/tutorial_serialization_deserialization.py b/sdk/basyx/aas/examples/tutorial_serialization_deserialization.py index 6c99409a7..ec281818b 100755 --- a/sdk/basyx/aas/examples/tutorial_serialization_deserialization.py +++ b/sdk/basyx/aas/examples/tutorial_serialization_deserialization.py @@ -55,11 +55,6 @@ ############################################## # Step 2: Serializing Single Objects to JSON # ############################################## - -# Before serializing the data, we should make sure, it's up-to-date. This is irrelevant for the static AAS objects in -# this tutorial, but may be important when dealing with dynamic data. -aashell.update() - # `AASToJsonEncoder` from the `aas.adapter.json` module is a custom JSONEncoder class for serializing # Asset Administration Shell data into the official JSON format according to # 'Details of the Asset Administration Shell', chapter 5.5, using Python's built-in JSON library. When provided to the @@ -102,17 +97,13 @@ obj_store.add(submodel) obj_store.add(aashell) -# step 4.2: Again, make sure that the data is up-to-date -submodel.update() -aashell.update() - -# step 4.3: writing the contents of the ObjectStore to a JSON file +# step 4.2: writing the contents of the ObjectStore to a JSON file basyx.aas.adapter.json.write_aas_json_file('data.json', obj_store) # We can pass the additional keyword argument `indent=4` to `write_aas_json_file()` to format the JSON file in a more # human-readable (but much more space-consuming) manner. -# step 4.4: writing the contents of the ObjectStore to an XML file +# step 4.3: writing the contents of the ObjectStore to an XML file basyx.aas.adapter.xml.write_aas_xml_file('data.xml', obj_store) diff --git a/sdk/basyx/aas/examples/tutorial_storage.py b/sdk/basyx/aas/examples/tutorial_storage.py index 82f5bc1e5..fe978b11b 100755 --- a/sdk/basyx/aas/examples/tutorial_storage.py +++ b/sdk/basyx/aas/examples/tutorial_storage.py @@ -68,8 +68,7 @@ # persistent memory (i.e. on hard disk). In this case, you may choose the `CouchDBObjectStore` from # `aas.backends.couchdb` to use a CouchDB database server as persistent storage. Both ObjectStore implementations # provide the same interface. In addition, the CouchDBObjectStores allows synchronizing the local object with the -# database via a Backend and the update()/commit() mechanism. See the `tutorial_backend_couchdb.py` for more -# information. +# database via a Backend. See the `tutorial_backend_couchdb.py` for more information. obj_store: model.DictObjectStore[model.Identifiable] = model.DictObjectStore() # step 2.2: add submodel and asset administration shell to store diff --git a/sdk/basyx/aas/model/__init__.py b/sdk/basyx/aas/model/__init__.py index e541968b5..2ddd93b51 100644 --- a/sdk/basyx/aas/model/__init__.py +++ b/sdk/basyx/aas/model/__init__.py @@ -39,3 +39,17 @@ RelationshipElement: KeyTypes.RELATIONSHIP_ELEMENT, SubmodelElement: KeyTypes.SUBMODEL_ELEMENT, # type: ignore } + + +def resolve_referable_class_in_key_types(referable: Referable) -> type: + """ + Returns the type of referable if the type is given in `KEY_TYPES_CLASSES`, otherwise return the first parent class + in inheritance chain of the referable which is given in `KEY_TYPES_CLASSES`. + + :raises TypeError: If the type of the referable or any of its parent classes is not given in `KEY_TYPES_CLASSES`. + """ + try: + ref_type = next(iter(t for t in inspect.getmro(type(referable)) if t in KEY_TYPES_CLASSES)) + except StopIteration: + raise TypeError(f"Could not find a matching class in KEY_TYPES_CLASSES for {type(referable)}") + return ref_type diff --git a/sdk/basyx/aas/model/base.py b/sdk/basyx/aas/model/base.py index a93e3cb59..35ccad5a1 100644 --- a/sdk/basyx/aas/model/base.py +++ b/sdk/basyx/aas/model/base.py @@ -18,7 +18,6 @@ import re from . import datatypes, _string_constraints -from ..backend import backends if TYPE_CHECKING: from . import provider @@ -42,6 +41,8 @@ VersionType = str ValueTypeIEC61360 = str +MAX_RECURSION_DEPTH = 32*2 # see https://github.com/admin-shell-io/aas-specs-metamodel/issues/333 + @unique class KeyTypes(Enum): @@ -454,25 +455,31 @@ def from_referable(referable: "Referable") -> "Key": """ # Get the `type` by finding the first class from the base classes list (via inspect.getmro), that is contained # in KEY_ELEMENTS_CLASSES - from . import KEY_TYPES_CLASSES, SubmodelElementList - try: - key_type = next(iter(KEY_TYPES_CLASSES[t] - for t in inspect.getmro(type(referable)) - if t in KEY_TYPES_CLASSES)) - except StopIteration: - key_type = KeyTypes.PROPERTY + key_type = Key._get_key_type_for_referable(referable) + key_value = Key._get_key_value_for_referable(referable) + return Key(key_type, key_value) + + @staticmethod + def _get_key_type_for_referable(referable: "Referable") -> KeyTypes: + from . import KEY_TYPES_CLASSES, resolve_referable_class_in_key_types + ref_type = resolve_referable_class_in_key_types(referable) + key_type = KEY_TYPES_CLASSES[ref_type] + return key_type + @staticmethod + def _get_key_value_for_referable(referable: "Referable") -> str: + from . import SubmodelElementList if isinstance(referable, Identifiable): - return Key(key_type, referable.id) + return referable.id elif isinstance(referable.parent, SubmodelElementList): try: - return Key(key_type, str(referable.parent.value.index(referable))) # type: ignore + return str(referable.parent.value.index(referable)) # type: ignore except ValueError as e: raise ValueError(f"Object {referable!r} is not contained within its parent {referable.parent!r}") from e else: if referable.id_short is None: - raise ValueError(f"Can't create Key for {referable!r} without an id_short!") - return Key(key_type, referable.id_short) + raise ValueError(f"Can't create Key value for {referable!r} without an id_short!") + return referable.id_short _NSO = TypeVar('_NSO', bound=Union["Referable", "Qualifier", "HasSemantics", "Extension"]) @@ -602,10 +609,6 @@ class Referable(HasExtension, metaclass=abc.ABCMeta): :ivar description: Description or comments on the element. :ivar parent: Reference (in form of a :class:`~.UniqueIdShortNamespace`) to the next referable parent element of the element. - - :ivar source: Source of the object, a URI, that defines where this object's data originates from. - This is used to specify where the Referable should be updated from and committed to. - Default is an empty string, making it use the source of its ancestor, if possible. """ @abc.abstractmethod def __init__(self): @@ -617,29 +620,77 @@ def __init__(self): # We use a Python reference to the parent Namespace instead of a Reference Object, as specified. This allows # simpler and faster navigation/checks and it has no effect in the serialized data formats anyway. self.parent: Optional[UniqueIdShortNamespace] = None - self.source: str = "" def __repr__(self) -> str: - reversed_path = [] + root = self.get_identifiable_root() + try: + id_short_path = self.get_id_short_path() + except (ValueError, AttributeError): + id_short_path = self.id_short if self.id_short is not None else "" + item_cls_name = self.__class__.__name__ + + if root is None: + item_path = f"[{id_short_path}]" if id_short_path else "" + else: + item_path = f"[{root.id} / {id_short_path}]" if id_short_path else f"[{root.id}]" + + return f"{item_cls_name}{item_path}" + + def get_identifiable_root(self) -> Optional["Identifiable"]: + """ + Get the root :class:`~.Identifiable` of this referable, if it exists. + + :return: The root :class:`~.Identifiable` or None if no such root exists + """ item = self # type: Any - if item.id_short is not None: - from .submodel import SubmodelElementList - while item is not None: - if isinstance(item, Identifiable): - reversed_path.append(item.id) - break - elif isinstance(item, Referable): - if isinstance(item.parent, SubmodelElementList): - reversed_path.append(f"{item.parent.id_short}[{item.parent.value.index(item)}]") - item = item.parent - else: - reversed_path.append(item.id_short) - item = item.parent - else: - raise AttributeError('Referable must have an identifiable as root object and only parents that are ' - 'referable') + while item is not None: + if isinstance(item, Identifiable): + return item + elif isinstance(item, Referable): + item = item.parent + else: + raise AttributeError('Referable must have an identifiable as root object and only parents that are ' + 'referable') + return None + + def get_id_short_path(self) -> str: + """ + Get the id_short path of this referable, i.e. the id_short of this referable and all its parents. + + :return: The id_short path as a string, e.g. "MySECollection.MySEList[2]MySubProperty1" + """ + path_list = self.get_id_short_path_as_a_list() + return self.build_id_short_path(path_list) + + def get_id_short_path_as_a_list(self) -> List[str]: + """ + Get the id_short path of this referable as a list of id_shorts and indexes. - return self.__class__.__name__ + ("[{}]".format(" / ".join(reversed(reversed_path))) if reversed_path else "") + :return: The id_short path as a list, e.g. '["MySECollection", "MySEList", "2", "MySubProperty1"]' + :raises ValueError: If this referable has no id_short or + if its parent is not a :class:`~basyx.aas.model.submodel.SubmodelElementList` + :raises AttributeError: If the parent chain is broken, i.e. if a parent is neither a :class:`~.Referable` nor an + :class:`~.Identifiable` + """ + from .submodel import SubmodelElementList + if self.id_short is None and not isinstance(self.parent, SubmodelElementList): + raise ValueError(f"Can't create id_short_path for {self.__class__.__name__} without an id_short or " + f"if its parent is a SubmodelElementList!") + + item = self # type: Any + path: List[str] = [] + while item is not None: + if not isinstance(item, Referable): + raise AttributeError('Referable must have an identifiable as root object and only parents that are ' + 'referable') + if isinstance(item, Identifiable): + break + elif isinstance(item.parent, SubmodelElementList): + path.insert(0, str(item.parent.value.index(item))) + else: + path.insert(0, item.id_short) + item = item.parent + return path def _get_id_short(self) -> Optional[NameType]: return self._id_short @@ -659,6 +710,49 @@ def _set_category(self, category: Optional[NameType]): def _get_category(self) -> Optional[NameType]: return self._category + @classmethod + def parse_id_short_path(cls, id_short_path: str) -> List[str]: + """ + Parse an id_short_path string into a list of id_shorts and indexes. + + :param id_short_path: The id_short_path string, e.g. "MySECollection.MySEList[2]MySubProperty1" + :return: The id_short path as a list, e.g. '["MySECollection", "MySEList", "2", "MySubProperty1"]' + """ + id_shorts_and_indexes = [] + for part in id_short_path.split("."): + id_short = part[0:part.find('[')] if '[' in part else part + id_shorts_and_indexes.append(id_short) + + indexes_part = part.removeprefix(id_short) + if indexes_part: + if not re.fullmatch(r'(?:\[\d+\])+', indexes_part): + raise ValueError(f"Invalid index format in id_short_path: '{id_short_path}', part: '{part}'") + indexes = indexes_part.strip("[]").split("][") + id_shorts_and_indexes.extend(indexes) + cls.validate_id_short_path(id_shorts_and_indexes) + return id_shorts_and_indexes + + @classmethod + def build_id_short_path(cls, id_short_path: Iterable[str]) -> str: + """ + Build an id_short_path string from a list of id_shorts and indexes. + """ + if isinstance(id_short_path, str): + raise ValueError("id_short_path must be an Iterable of strings, not a single string") + path_list_with_dots_and_brackets = [f"[{part}]" if part.isdigit() else f".{part}" for part in id_short_path] + id_short_path = "".join(path_list_with_dots_and_brackets).removeprefix(".") + return id_short_path + + @classmethod + def validate_id_short_path(cls, id_short_path: Union[str, NameType, Iterable[NameType]]): + if isinstance(id_short_path, str): + id_short_path = cls.parse_id_short_path(id_short_path) + for id_short in id_short_path: + if id_short.isdigit(): + # This is an index, skip validation + continue + cls.validate_id_short(id_short) + @classmethod def validate_id_short(cls, id_short: NameType) -> None: """ @@ -733,130 +827,40 @@ def _set_id_short(self, id_short: Optional[NameType]): # Redundant to the line above. However, this way, we make sure that we really update the _id_short self._id_short = id_short - def update(self, - max_age: float = 0, - recursive: bool = True, - _indirect_source: bool = True) -> None: - """ - Update the local Referable object from any underlying external data source, using an appropriate backend - - If there is no source given, it will find its next ancestor with a source and update from this source. - If there is no source in any ancestor, this function will do nothing - - :param max_age: Maximum age of the local data in seconds. This method may return early, if the previous update - of the object has been performed less than ``max_age`` seconds ago. - :param recursive: Also call update on all children of this object. Default is True - :param _indirect_source: Internal parameter to avoid duplicate updating. - :raises backends.BackendError: If no appropriate backend or the data source is not available - """ - # TODO consider max_age - if not _indirect_source: - # Update was already called on an ancestor of this Referable. Only update it, if it has its own source - if self.source != "": - backends.get_backend(self.source).update_object(updated_object=self, - store_object=self, - relative_path=[]) - - else: - # Try to find a valid source for this Referable - if self.source != "": - backends.get_backend(self.source).update_object(updated_object=self, - store_object=self, - relative_path=[]) - else: - store_object, relative_path = self.find_source() - if store_object and relative_path is not None: - backends.get_backend(store_object.source).update_object(updated_object=self, - store_object=store_object, - relative_path=list(relative_path)) - - if recursive: - # update all the children who have their own source - if isinstance(self, UniqueIdShortNamespace): - for namespace_set in self.namespace_element_sets: - if "id_short" not in namespace_set.get_attribute_name_list(): - continue - for referable in namespace_set: - referable.update(max_age, recursive=True, _indirect_source=False) - - def find_source(self) -> Tuple[Optional["Referable"], Optional[List[str]]]: # type: ignore - """ - Finds the closest source in these objects ancestors. If there is no source, returns None - - :return: Tuple with the closest ancestor with a defined source and the relative path of id_shorts to that - ancestor - """ - referable: Referable = self - relative_path: List[NameType] = [self.id_short] - while referable is not None: - if referable.source != "": - relative_path.reverse() - return referable, relative_path - if referable.parent: - assert isinstance(referable.parent, Referable) - referable = referable.parent - relative_path.append(referable.id_short) - continue - break - return None, None - - def update_from(self, other: "Referable", update_source: bool = False): + def update_from(self, other: "Referable"): """ - Internal function to updates the object's attributes from another object of a similar type. + Internal function to update the object's attributes from a different version of the exact same object. This function should not be used directly. It is typically used by backend implementations (database adapters, - protocol clients, etc.) to update the object's data, after ``update()`` has been called. + protocol clients, etc.) to update the object's data, after ``update_nss_from()`` has been called. :param other: The object to update from - :param update_source: Update the source attribute with the other's source attribute. This is not propagated - recursively """ - for name, var in vars(other).items(): - # do not update the parent, namespace_element_sets or source (depending on update_source parameter) - if name in ("parent", "namespace_element_sets") or name == "source" and not update_source: + for name in dir(other): + # Skip private and protected attributes + if name.startswith('_'): continue - if isinstance(var, NamespaceSet): - # update the elements of the NameSpaceSet - vars(self)[name].update_nss_from(var) - else: - vars(self)[name] = var # that variable is not a NameSpaceSet, so it isn't Referable - - def commit(self) -> None: - """ - Transfer local changes on this object to all underlying external data sources. - This function commits the current state of this object to its own and each external data source of its - ancestors. If there is no source, this function will do nothing. - """ - current_ancestor = self.parent - relative_path: List[NameType] = [self.id_short] - # Commit to all ancestors with sources - while current_ancestor: - assert isinstance(current_ancestor, Referable) - if current_ancestor.source != "": - backends.get_backend(current_ancestor.source).commit_object(committed_object=self, - store_object=current_ancestor, - relative_path=list(relative_path)) - relative_path.insert(0, current_ancestor.id_short) - current_ancestor = current_ancestor.parent - # Commit to own source and check if there are children with sources to commit to - self._direct_source_commit() + # Do not update 'parent', 'namespace_element_sets' + if name in ("parent", "namespace_element_sets"): + continue - def _direct_source_commit(self): - """ - Commits children of an ancestor recursively, if they have a specific source given - """ - if self.source != "": - backends.get_backend(self.source).commit_object(committed_object=self, - store_object=self, - relative_path=[]) + # Skip methods + attr = getattr(other, name) + if callable(attr): + continue - if isinstance(self, UniqueIdShortNamespace): - for namespace_set in self.namespace_element_sets: - if "id_short" not in namespace_set.get_attribute_name_list(): - continue - for referable in namespace_set: - referable._direct_source_commit() + if isinstance(attr, NamespaceSet): + # update the elements of the NameSpaceSet + getattr(self, name).update_nss_from(attr) + else: + # Check if this is a property and if it has no setter + prop = getattr(type(self), name, None) + if isinstance(prop, property) and prop.fset is None: + if getattr(self, name) != attr: + raise ValueError(f"property {name} is immutable but has changed between versions of the object") + else: + setattr(self, name, attr) id_short = property(_get_id_short, _set_id_short) @@ -1097,22 +1101,24 @@ def from_referable(referable: Referable) -> "ModelReference": object's ancestors """ # Get the first class from the base classes list (via inspect.getmro), that is contained in KEY_ELEMENTS_CLASSES - from . import KEY_TYPES_CLASSES + from . import resolve_referable_class_in_key_types try: - ref_type = next(iter(t for t in inspect.getmro(type(referable)) if t in KEY_TYPES_CLASSES)) + ref_type = resolve_referable_class_in_key_types(referable) except StopIteration: ref_type = Referable ref: Referable = referable keys: List[Key] = [] while True: - keys.append(Key.from_referable(ref)) + keys.insert(0, Key.from_referable(ref)) if isinstance(ref, Identifiable): - keys.reverse() return ModelReference(tuple(keys), ref_type) if ref.parent is None or not isinstance(ref.parent, Referable): - raise ValueError("The given Referable object is not embedded within an Identifiable object") + raise ValueError(f"The given Referable object is not embedded within an Identifiable object: {ref}") ref = ref.parent + if len(keys) > MAX_RECURSION_DEPTH: + raise ValueError(f"The given Referable object is embedded in >64 layers of Referables " + f"or there is a loop in the parent chain {ref}") @_string_constraints.constrain_content_type("content_type") @@ -1488,7 +1494,7 @@ def __init__(self, self.value = value self.refers_to: Set[ModelReference] = set(refers_to) self.semantic_id: Optional[Reference] = semantic_id - self.supplemental_semantic_id: ConstrainedList[Reference] = ConstrainedList(supplemental_semantic_id) + self.supplemental_semantic_id = ConstrainedList(supplemental_semantic_id) def __repr__(self) -> str: return "Extension(name={})".format(self.name) @@ -1637,7 +1643,7 @@ def __init__(self, self.value_id: Optional[Reference] = value_id self.kind: QualifierKind = kind self.semantic_id: Optional[Reference] = semantic_id - self.supplemental_semantic_id: ConstrainedList[Reference] = ConstrainedList(supplemental_semantic_id) + self.supplemental_semantic_id = ConstrainedList(supplemental_semantic_id) def __repr__(self) -> str: return "Qualifier(type={})".format(self.type) @@ -1720,12 +1726,12 @@ def __init__(self) -> None: super().__init__() self.namespace_element_sets: List[NamespaceSet] = [] - def get_referable(self, id_short: Union[NameType, Iterable[NameType]]) -> Referable: + def get_referable(self, id_short_path: Union[str, NameType, Iterable[NameType]]) -> Referable: """ Find a :class:`~.Referable` in this Namespace by its id_short or by its id_short path. The id_short path may contain :class:`~basyx.aas.model.submodel.SubmodelElementList` indices. - :param id_short: id_short or id_short path as any :class:`Iterable` + :param id_short_path: id_short or id_short path as a str or any :class:`Iterable` :returns: :class:`~.Referable` :raises TypeError: If one of the intermediate objects on the path is not a :class:`~.UniqueIdShortNamespace` @@ -1734,10 +1740,10 @@ def get_referable(self, id_short: Union[NameType, Iterable[NameType]]) -> Refera :raises KeyError: If no such :class:`~.Referable` can be found """ from .submodel import SubmodelElementList - if isinstance(id_short, NameType): - id_short = [id_short] + if isinstance(id_short_path, (str, NameType)): + id_short_path = Referable.parse_id_short_path(id_short_path) item: Union[UniqueIdShortNamespace, Referable] = self - for id_ in id_short: + for id_ in id_short_path: # This is redundant on first iteration, but it's a negligible overhead. # Also, ModelReference.resolve() relies on this check. if not isinstance(item, UniqueIdShortNamespace): @@ -2064,15 +2070,15 @@ def update_nss_from(self, other: "NamespaceSet"): if isinstance(other_object, Referable): backend, case_sensitive = self._backend["id_short"] referable = backend[other_object.id_short if case_sensitive else other_object.id_short.upper()] - referable.update_from(other_object, update_source=True) # type: ignore + referable.update_from(other_object) # type: ignore elif isinstance(other_object, Qualifier): backend, case_sensitive = self._backend["type"] qualifier = backend[other_object.type if case_sensitive else other_object.type.upper()] - # qualifier.update_from(other_object, update_source=True) # TODO: What should happen here? + # qualifier.update_from(other_object) # TODO: What should happend here? elif isinstance(other_object, Extension): backend, case_sensitive = self._backend["name"] extension = backend[other_object.name if case_sensitive else other_object.name.upper()] - # extension.update_from(other_object, update_source=True) # TODO: What should happen here? + # extension.update_from(other_object) # TODO: What should happend here? else: raise TypeError("Type not implemented") except KeyError: diff --git a/sdk/basyx/aas/model/provider.py b/sdk/basyx/aas/model/provider.py index ac50d33da..d13758308 100644 --- a/sdk/basyx/aas/model/provider.py +++ b/sdk/basyx/aas/model/provider.py @@ -11,7 +11,7 @@ """ import abc -from typing import MutableSet, Iterator, Generic, TypeVar, Dict, List, Optional, Iterable, Set +from typing import MutableSet, Iterator, Generic, TypeVar, Dict, List, Optional, Iterable, Set, Tuple, cast from .base import Identifier, Identifiable @@ -65,8 +65,9 @@ class AbstractObjectStore(AbstractObjectProvider, MutableSet[_IT], Generic[_IT], ObjectStores are special ObjectProvides that – in addition to retrieving objects by :class:`~basyx.aas.model.base.Identifier` – allow to add and delete objects (i.e. behave like a Python set). - This includes local object stores (like :class:`~.DictObjectStore`) and database - :class:`Backends `. + This includes local object stores (like :class:`~.DictObjectStore`) and specific object stores + (like :class:`~basyx.aas.backend.couchdb.CouchDBObjectStore` and + :class:`~basyx.aas.backend.local_file.LocalFileObjectStore`). The AbstractObjectStore inherits from the :class:`~collections.abc.MutableSet` abstract collections class and therefore implements all the functions of this class. @@ -79,6 +80,36 @@ def update(self, other: Iterable[_IT]) -> None: for x in other: self.add(x) + def sync(self, other: Iterable[_IT], overwrite: bool) -> Tuple[int, int, int]: + """ + Merge :class:`Identifiables ` from an + :class:`~collections.abc.Iterable` into this :class:`~basyx.aas.model.provider.AbstractObjectStore`. + + :param other: :class:`~collections.abc.Iterable` to sync with + :param overwrite: Flag to overwrite existing :class:`Identifiables ` in this + :class:`~basyx.aas.model.provider.AbstractObjectStore` with updated versions from ``other``, + :class:`Identifiables ` unique to this + :class:`~basyx.aas.model.provider.AbstractObjectStore` are always preserved + :return: Counts of processed :class:`Identifiables ` as + ``(added, overwritten, skipped)`` + """ + + added, overwritten, skipped = 0, 0, 0 + for identifiable in other: + identifiable_id = identifiable.id + if identifiable_id in self: + if overwrite: + existing = self.get_identifiable(identifiable_id) + self.discard(cast(_IT, existing)) + self.add(identifiable) + overwritten += 1 + else: + skipped += 1 + else: + self.add(identifiable) + added += 1 + return added, overwritten, skipped + class DictObjectStore(AbstractObjectStore[_IT], Generic[_IT]): """ diff --git a/sdk/basyx/aas/model/submodel.py b/sdk/basyx/aas/model/submodel.py index 75403f359..9e7321c41 100644 --- a/sdk/basyx/aas/model/submodel.py +++ b/sdk/basyx/aas/model/submodel.py @@ -1,4 +1,4 @@ -# Copyright (c) 2024 the Eclipse BaSyx Authors +# Copyright (c) 2025 the Eclipse BaSyx Authors # # This program and the accompanying materials are made available under the terms of the MIT License, available in # the LICENSE file of this project. @@ -76,8 +76,7 @@ def __init__(self, self.semantic_id: Optional[base.Reference] = semantic_id self.qualifier = base.NamespaceSet(self, [("type", True)], qualifier) self.extension = base.NamespaceSet(self, [("name", True)], extension) - self.supplemental_semantic_id: base.ConstrainedList[base.Reference] = \ - base.ConstrainedList(supplemental_semantic_id) + self.supplemental_semantic_id = base.ConstrainedList(supplemental_semantic_id) self.embedded_data_specifications: List[base.EmbeddedDataSpecification] = list(embedded_data_specifications) @@ -147,8 +146,7 @@ def __init__(self, self.qualifier = base.NamespaceSet(self, [("type", True)], qualifier) self._kind: base.ModellingKind = kind self.extension = base.NamespaceSet(self, [("name", True)], extension) - self.supplemental_semantic_id: base.ConstrainedList[base.Reference] = \ - base.ConstrainedList(supplemental_semantic_id) + self.supplemental_semantic_id = base.ConstrainedList(supplemental_semantic_id) self.embedded_data_specifications: List[base.EmbeddedDataSpecification] = list(embedded_data_specifications) diff --git a/sdk/docs/add-requirements.txt b/sdk/docs/add-requirements.txt deleted file mode 100644 index d80dd1a4d..000000000 --- a/sdk/docs/add-requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -# Additional requirements for building the docs -sphinx~=7.2 -sphinx-rtd-theme~=2.0 -sphinx-argparse~=0.4.0 diff --git a/sdk/docs/source/backend/backends.rst b/sdk/docs/source/backend/backends.rst deleted file mode 100644 index d8c603849..000000000 --- a/sdk/docs/source/backend/backends.rst +++ /dev/null @@ -1,4 +0,0 @@ -backends - Base class and functionality for Backends -==================================================== - -.. automodule:: basyx.aas.backend.backends diff --git a/sdk/docs/source/backend/index.rst b/sdk/docs/source/backend/index.rst index 232867818..97554b3f5 100644 --- a/sdk/docs/source/backend/index.rst +++ b/sdk/docs/source/backend/index.rst @@ -7,6 +7,5 @@ basyx.aas.backend - Storing and Retrieving of AAS-objects in Backends :maxdepth: 2 :caption: Contents: - backends couchdb local_file diff --git a/sdk/pyproject.toml b/sdk/pyproject.toml index 0fe88d1bd..b2a7a6b7e 100644 --- a/sdk/pyproject.toml +++ b/sdk/pyproject.toml @@ -34,13 +34,11 @@ classifiers = [ "Operating System :: OS Independent", "Development Status :: 5 - Production/Stable" ] -requires-python = ">=3.9" +requires-python = ">=3.10" dependencies = [ - "lxml>=5.3", + "lxml>=6.0.2", "python-dateutil>=2.8,<3", - "pyecma376-2>=1.0.1", - "urllib3>=1.26,<3", - "Werkzeug>=3.0.3,<4", + "pyecma376-2>=1.0.2" ] [project.optional-dependencies] @@ -55,6 +53,11 @@ dev = [ "types-python-dateutil", "lxml-stubs~=0.5.1", ] +docs= [ + "sphinx~=8.2", + "sphinx-rtd-theme~=3.0", + "sphinx-argparse~=0.5.0" +] [project.urls] "Homepage" = "https://github.com/eclipse-basyx/basyx-python-sdk" diff --git a/sdk/test/adapter/json/test_json_deserialization.py b/sdk/test/adapter/json/test_json_deserialization.py index 9272bdf98..0dba6dbdb 100644 --- a/sdk/test/adapter/json/test_json_deserialization.py +++ b/sdk/test/adapter/json/test_json_deserialization.py @@ -37,7 +37,8 @@ def test_file_format_wrong_list(self) -> None: } ] }""" - with self.assertRaisesRegex(TypeError, r"submodels.*AssetAdministrationShell"): + with self.assertRaisesRegex(TypeError, r"AssetAdministrationShell.* was " + r"in the wrong list 'submodels'"): read_aas_json_file(io.StringIO(data), failsafe=False) with self.assertLogs(logging.getLogger(), level=logging.WARNING) as cm: read_aas_json_file(io.StringIO(data), failsafe=True) @@ -196,7 +197,7 @@ def get_clean_store() -> model.DictObjectStore: with self.assertLogs(logging.getLogger(), level=logging.INFO) as log_ctx: identifiers = read_aas_json_file_into(object_store, string_io, replace_existing=False, ignore_existing=True) self.assertEqual(len(identifiers), 0) - self.assertIn("already exists in the object store", log_ctx.output[0]) # type: ignore + self.assertIn("already exists in store", log_ctx.output[0]) # type: ignore submodel = object_store.pop() self.assertIsInstance(submodel, model.Submodel) self.assertEqual(submodel.id_short, "test123") @@ -204,7 +205,7 @@ def get_clean_store() -> model.DictObjectStore: string_io.seek(0) object_store = get_clean_store() - with self.assertRaisesRegex(KeyError, r"already exists in the object store"): + with self.assertRaisesRegex(KeyError, r"already exists in store"): identifiers = read_aas_json_file_into(object_store, string_io, replace_existing=False, ignore_existing=False) self.assertEqual(len(identifiers), 0) diff --git a/sdk/test/backend/test_backends.py b/sdk/test/backend/test_backends.py deleted file mode 100644 index e0beee8f8..000000000 --- a/sdk/test/backend/test_backends.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (c) 2025 the Eclipse BaSyx Authors -# -# This program and the accompanying materials are made available under the terms of the MIT License, available in -# the LICENSE file of this project. -# -# SPDX-License-Identifier: MIT - -from typing import List -import unittest - -from basyx.aas.backend import backends -from basyx.aas.model import Referable - - -class ExampleBackend(backends.Backend): - @classmethod - def commit_object(cls, committed_object: Referable, store_object: Referable, relative_path: List[str]) -> None: - raise NotImplementedError("This is a mock") - - @classmethod - def update_object(cls, updated_object: Referable, store_object: Referable, relative_path: List[str]) -> None: - raise NotImplementedError("This is a mock") - - -class BackendsTest(unittest.TestCase): - def test_backend_store(self): - backends.register_backend("mockScheme", ExampleBackend) - self.assertIs(backends.get_backend("mockScheme:x-test:test_backend"), ExampleBackend) - - backends.register_backend("", ExampleBackend) - with self.assertRaises(ValueError) as cm: - backends.get_backend("") - self.assertEqual(" is not a valid URL with URI scheme.", str(cm.exception)) - - with self.assertRaises(backends.UnknownBackendException): - backends.get_backend("some-unkown-scheme://example.com") diff --git a/sdk/test/backend/test_couchdb.py b/sdk/test/backend/test_couchdb.py index 89fe992de..36e5ef039 100644 --- a/sdk/test/backend/test_couchdb.py +++ b/sdk/test/backend/test_couchdb.py @@ -18,32 +18,6 @@ TEST_CONFIG["couchdb"]["database"] + "/" -class CouchDBBackendOfflineMethodsTest(unittest.TestCase): - def test_parse_source(self): - couchdb.register_credentials(url="couchdb.plt.rwth-aachen.de:5984", - username="test_user", - password="test_password") - - url = couchdb.CouchDBBackend._parse_source( - "couchdbs://couchdb.plt.rwth-aachen.de:5984/path_to_db/path_to_doc" - ) - expected_url = "https://couchdb.plt.rwth-aachen.de:5984/path_to_db/path_to_doc" - self.assertEqual(expected_url, url) - - url = couchdb.CouchDBBackend._parse_source( - "couchdb://couchdb.plt.rwth-aachen.de:5984/path_to_db/path_to_doc" - ) - expected_url = "http://couchdb.plt.rwth-aachen.de:5984/path_to_db/path_to_doc" - self.assertEqual(expected_url, url) - - with self.assertRaises(couchdb.CouchDBSourceError) as cm: - couchdb.CouchDBBackend._parse_source("wrong_scheme:plt.rwth-aachen.couchdb:5984/path_to_db/path_to_doc") - self.assertEqual("Source has wrong format. " - "Expected to start with {couchdb://, couchdbs://}, got " - "{wrong_scheme:plt.rwth-aachen.couchdb:5984/path_to_db/path_to_doc}", - str(cm.exception)) - - @unittest.skipUnless(COUCHDB_OKAY, "No CouchDB is reachable at {}/{}: {}".format(TEST_CONFIG['couchdb']['url'], TEST_CONFIG['couchdb']['database'], COUCHDB_ERROR)) @@ -62,7 +36,8 @@ def tearDown(self) -> None: def test_object_store_add(self): test_object = create_example_submodel() self.object_store.add(test_object) - self.assertEqual(test_object.source, source_core+"https%3A%2F%2Facplt.org%2FTest_Submodel") + # Note that this test is only checking that there are no errors during adding. + # The actual logic is tested together with retrieval in `test_retrieval`. def test_retrieval(self): test_object = create_example_submodel() @@ -77,11 +52,6 @@ def test_retrieval(self): test_object_retrieved_again = self.object_store.get_identifiable('https://acplt.org/Test_Submodel') self.assertIs(test_object_retrieved, test_object_retrieved_again) - # However, a changed source should invalidate the cached object, so we should get a new copy - test_object_retrieved.source = "couchdb://example.com/example/https%3A%2F%2Facplt.org%2FTest_Submodel" - test_object_retrieved_third = self.object_store.get_identifiable('https://acplt.org/Test_Submodel') - self.assertIsNot(test_object_retrieved, test_object_retrieved_third) - def test_example_submodel_storing(self) -> None: example_submodel = create_example_submodel() @@ -138,46 +108,3 @@ def test_key_errors(self) -> None: self.object_store.discard(retrieved_submodel) self.assertEqual("'No AAS object with id https://acplt.org/Test_Submodel exists in " "CouchDB database'", str(cm.exception)) - - def test_conflict_errors(self): - # Preperation: add object and retrieve it from the database - example_submodel = create_example_submodel() - self.object_store.add(example_submodel) - retrieved_submodel = self.object_store.get_identifiable('https://acplt.org/Test_Submodel') - - # Simulate a concurrent modification (Commit submodel, while preventing that the couchdb revision store is - # updated) - with unittest.mock.patch("basyx.aas.backend.couchdb.set_couchdb_revision"): - retrieved_submodel.commit() - - # Committing changes to the retrieved object should now raise a conflict error - retrieved_submodel.id_short = "myOtherNewIdShort" - with self.assertRaises(couchdb.CouchDBConflictError) as cm: - retrieved_submodel.commit() - self.assertEqual("Could not commit changes to id https://acplt.org/Test_Submodel due to a " - "concurrent modification in the database.", str(cm.exception)) - - # Deleting the submodel with safe_delete should also raise a conflict error. Deletion without safe_delete should - # work - with self.assertRaises(couchdb.CouchDBConflictError) as cm: - self.object_store.discard(retrieved_submodel, True) - self.assertEqual("Object with id https://acplt.org/Test_Submodel has been modified in the " - "database since the version requested to be deleted.", str(cm.exception)) - self.object_store.discard(retrieved_submodel, False) - self.assertEqual(0, len(self.object_store)) - - # Committing after deletion should not raise a conflict error due to removal of the source attribute - retrieved_submodel.commit() - - def test_editing(self): - test_object = create_example_submodel() - self.object_store.add(test_object) - - # Test if commit uploads changes - test_object.id_short = "SomeNewIdShort" - test_object.commit() - - # Test if update restores changes - test_object.id_short = "AnotherIdShort" - test_object.update() - self.assertEqual("SomeNewIdShort", test_object.id_short) diff --git a/sdk/test/backend/test_local_file.py b/sdk/test/backend/test_local_file.py index 22aaa3155..7d96d8713 100644 --- a/sdk/test/backend/test_local_file.py +++ b/sdk/test/backend/test_local_file.py @@ -31,10 +31,8 @@ def tearDown(self) -> None: def test_object_store_add(self): test_object = create_example_submodel() self.object_store.add(test_object) - self.assertEqual( - test_object.source, - source_core+"fd787262b2743360f7ad03a3b4e9187e4c088aa37303448c9c43fe4c973dac53.json" - ) + # Note that this test is only checking that there are no errors during adding. + # The actual logic is tested together with retrieval in `test_retrieval`. def test_retrieval(self): test_object = create_example_submodel() @@ -49,11 +47,6 @@ def test_retrieval(self): test_object_retrieved_again = self.object_store.get_identifiable('https://acplt.org/Test_Submodel') self.assertIs(test_object_retrieved, test_object_retrieved_again) - # However, a changed source should invalidate the cached object, so we should get a new copy - test_object_retrieved.source = "couchdb://example.com/example/IRI-https%3A%2F%2Facplt.org%2FTest_Submodel" - test_object_retrieved_third = self.object_store.get_identifiable('https://acplt.org/Test_Submodel') - self.assertIsNot(test_object_retrieved, test_object_retrieved_third) - def test_example_submodel_storing(self) -> None: example_submodel = create_example_submodel() @@ -111,16 +104,3 @@ def test_key_errors(self) -> None: self.object_store.discard(retrieved_submodel) self.assertEqual("'No AAS object with id https://acplt.org/Test_Submodel exists in " "local file database'", str(cm.exception)) - - def test_editing(self): - test_object = create_example_submodel() - self.object_store.add(test_object) - - # Test if commit uploads changes - test_object.id_short = "SomeNewIdShort" - test_object.commit() - - # Test if update restores changes - test_object.id_short = "AnotherIdShort" - test_object.update() - self.assertEqual("SomeNewIdShort", test_object.id_short) diff --git a/sdk/test/examples/test_helpers.py b/sdk/test/examples/test_helpers.py index faca8602b..0257b8bca 100644 --- a/sdk/test/examples/test_helpers.py +++ b/sdk/test/examples/test_helpers.py @@ -227,7 +227,7 @@ def test_submodel_element_collection_checker(self): self.assertEqual("FAIL: Attribute value of SubmodelElementCollection[Collection] must contain 2 " "SubmodelElements (count=1)", repr(next(checker_iterator))) - self.assertEqual("FAIL: Submodel Element Property[Collection / Prop1] must exist ()", + self.assertEqual("FAIL: Submodel Element Property[Collection.Prop1] must exist ()", repr(next(checker_iterator))) collection.add_referable(property) @@ -291,7 +291,7 @@ def test_annotated_relationship_element(self): self.assertEqual("FAIL: Attribute annotation of AnnotatedRelationshipElement[test] must contain 1 DataElements " "(count=0)", repr(next(checker_iterator))) - self.assertEqual("FAIL: Annotation Property[test / ExampleAnnotatedProperty] must exist ()", + self.assertEqual("FAIL: Annotation Property[test.ExampleAnnotatedProperty] must exist ()", repr(next(checker_iterator))) def test_submodel_checker(self): diff --git a/sdk/test/model/test_base.py b/sdk/test/model/test_base.py index 1e0432a58..460bce563 100644 --- a/sdk/test/model/test_base.py +++ b/sdk/test/model/test_base.py @@ -11,7 +11,6 @@ from collections import OrderedDict from basyx.aas import model -from basyx.aas.backend import backends from basyx.aas.model import Identifier, Identifiable from basyx.aas.examples.data import example_aas @@ -42,7 +41,7 @@ def test_from_referable(self): self.assertEqual(model.Key(model.KeyTypes.MULTI_LANGUAGE_PROPERTY, "0"), model.Key.from_referable(mlp2)) with self.assertRaises(ValueError) as cm: model.Key.from_referable(mlp1) - self.assertEqual("Can't create Key for MultiLanguageProperty without an id_short!", str(cm.exception)) + self.assertEqual("Can't create Key value for MultiLanguageProperty without an id_short!", str(cm.exception)) mlp1.id_short = "mlp1" self.assertEqual(model.Key(model.KeyTypes.MULTI_LANGUAGE_PROPERTY, "mlp1"), model.Key.from_referable(mlp1)) @@ -52,28 +51,11 @@ def __init__(self): super().__init__() -class ExampleRefereableWithNamespace(model.Referable, model.UniqueIdShortNamespace): +class ExampleReferableWithNamespace(model.Referable, model.UniqueIdShortNamespace): def __init__(self): super().__init__() -class MockBackend(backends.Backend): - @classmethod - def update_object(cls, - updated_object: "Referable", # type: ignore - store_object: "Referable", # type: ignore - relative_path: List[str]) -> None: ... - - @classmethod - def commit_object(cls, - committed_object: "Referable", # type: ignore - store_object: "Referable", # type: ignore - relative_path: List[str]) -> None: ... - - update_object = mock.Mock() - commit_object = mock.Mock() - - class ExampleIdentifiable(model.Identifiable): def __init__(self): super().__init__() @@ -84,7 +66,6 @@ def generate_example_referable_tree() -> model.Referable: Generates an example referable tree, built like this: example_grandparent -> example_parent -> example_referable -> example_child -> example_grandchild - example_grandparent and example_grandchild both have an nonempty source, pointing to the mock-backend :return: example_referable """ @@ -98,7 +79,7 @@ def generate_example_referable_with_namespace(id_short: model.NameType, :param child: Child to be added to the namespace sets of the Referable :return: The generated Referable """ - referable = ExampleRefereableWithNamespace() + referable = ExampleReferableWithNamespace() referable.id_short = id_short if child: namespace_set = model.NamespaceSet(parent=referable, attribute_names=[("id_short", True)], @@ -111,9 +92,6 @@ def generate_example_referable_with_namespace(id_short: model.NameType, example_parent = generate_example_referable_with_namespace("exampleParent", example_referable) example_grandparent = generate_example_referable_with_namespace("exampleGrandparent", example_parent) - example_grandchild.source = "mockScheme:exampleGrandchild" - example_grandparent.source = "mockScheme:exampleGrandparent" - return example_referable @@ -157,97 +135,95 @@ def __init__(self, value: model.Referable): self.assertEqual('Referable must have an identifiable as root object and only parents that are referable', str(cm.exception)) - def test_update(self): - backends.register_backend("mockScheme", MockBackend) - example_referable = generate_example_referable_tree() - example_grandparent = example_referable.parent.parent - example_grandchild = example_referable.get_referable("exampleChild").get_referable("exampleGrandchild") - - # Test update with parameter "recursive=False" - example_referable.update(recursive=False) - MockBackend.update_object.assert_called_once_with( - updated_object=example_referable, - store_object=example_grandparent, - relative_path=["exampleGrandparent", "exampleParent", "exampleReferable"] - ) - MockBackend.update_object.reset_mock() - - # Test update with parameter "recursive=True" - example_referable.update() - self.assertEqual(MockBackend.update_object.call_count, 2) - MockBackend.update_object.assert_has_calls([ - mock.call(updated_object=example_referable, - store_object=example_grandparent, - relative_path=["exampleGrandparent", "exampleParent", "exampleReferable"]), - mock.call(updated_object=example_grandchild, - store_object=example_grandchild, - relative_path=[]) - ]) - MockBackend.update_object.reset_mock() - - # Test update with source != "" in example_referable - example_referable.source = "mockScheme:exampleReferable" - example_referable.update(recursive=False) - MockBackend.update_object.assert_called_once_with( - updated_object=example_referable, - store_object=example_referable, - relative_path=[] - ) - MockBackend.update_object.reset_mock() - - # Test update with no source available - example_grandparent.source = "" - example_referable.source = "" - example_referable.update(recursive=False) - MockBackend.update_object.assert_not_called() - - def test_commit(self): - backends.register_backend("mockScheme", MockBackend) - example_referable = generate_example_referable_tree() - example_grandparent = example_referable.parent.parent - example_grandchild = example_referable.get_referable("exampleChild").get_referable("exampleGrandchild") - - # Test commit starting from example_referable - example_referable.commit() - self.assertEqual(MockBackend.commit_object.call_count, 2) - MockBackend.commit_object.assert_has_calls([ - mock.call(committed_object=example_referable, - store_object=example_grandparent, - relative_path=["exampleParent", "exampleReferable"]), - mock.call(committed_object=example_grandchild, - store_object=example_grandchild, - relative_path=[]) - ]) - MockBackend.commit_object.reset_mock() - - # Test commit starting from example_grandchild - example_grandchild.commit() - self.assertEqual(MockBackend.commit_object.call_count, 2) - MockBackend.commit_object.assert_has_calls([ - mock.call(committed_object=example_grandchild, - store_object=example_grandparent, - relative_path=["exampleParent", "exampleReferable", "exampleChild", "exampleGrandchild"]), - mock.call(committed_object=example_grandchild, - store_object=example_grandchild, - relative_path=[]) - ]) - MockBackend.commit_object.reset_mock() - - # Test commit starting from example_grandchild after adding a source to example_referable - example_referable.source = "mockScheme:exampleReferable" - example_grandchild.commit() - self.assertEqual(MockBackend.commit_object.call_count, 3) - MockBackend.commit_object.assert_has_calls([ - mock.call(committed_object=example_grandchild, - store_object=example_referable, - relative_path=["exampleChild", "exampleGrandchild"]), - mock.call(committed_object=example_grandchild, - store_object=example_grandparent, - relative_path=["exampleParent", "exampleReferable", "exampleChild", "exampleGrandchild"]), - mock.call(committed_object=example_grandchild, - store_object=example_grandchild, - relative_path=[]) - ]) + def test_get_identifiable_root(self): + ref_with_no_parent = ExampleReferableWithNamespace() + ref_with_no_parent.id_short = "NotNone" + + identifiable = ExampleIdentifiable() + + ref_child = ExampleReferable() + ref_child.id_short = "Child" + ref_child.parent = identifiable + + list1 = model.SubmodelElementList("List1", model.SubmodelElementList) + list2 = model.SubmodelElementList(None, model.Property, value_type_list_element=model.datatypes.Int) + prop1 = model.Property(None, model.datatypes.Int) + + list1.parent = ref_child + list1.add_referable(list2) + list2.add_referable(prop1) + + self.assertIs(ref_with_no_parent.get_identifiable_root(), None) + self.assertIs(identifiable.get_identifiable_root(), identifiable) + self.assertIs(ref_child.get_identifiable_root(), identifiable) + self.assertIs(list1.get_identifiable_root(), identifiable) + self.assertIs(list2.get_identifiable_root(), identifiable) + self.assertIs(prop1.get_identifiable_root(), identifiable) + + def test_get_id_short_path(self): + """ + Tests the get_id_short_path() method of Referable objects. + + Example structure: + - SMC: MySubmodelElementCollection + - Property: MySubProperty1 + - Property: MySubProperty2 + - SMC: MySubSubmodelElementCollection + - Property: MySubSubProperty1 + - Property: MySubSubProperty2 + - SML: MySubSubmodelElementList1 + - Property: "MySubTestValue1" + - Property: "MySubTestValue2" + - SML: MySubSubmodelElementList2 + - SML: MySubSubmodelElementList3 + - SMC: MySubmodelElementCollectionInSML3 + - Property: "MySubTestValue3" + """ + MySubmodelElementCollection = model.SubmodelElementCollection("MySubmodelElementCollection") + MySubProperty1 = model.Property("MySubProperty1", model.datatypes.String) + MySubProperty2 = model.Property("MySubProperty2", model.datatypes.String) + MySubSubmodelElementCollection = model.SubmodelElementCollection("MySubSubmodelElementCollection") + MySubSubProperty1 = model.Property("MySubSubProperty1", model.datatypes.String) + MySubSubProperty2 = model.Property("MySubSubProperty2", model.datatypes.String) + MySubSubmodelElementList1 = model.SubmodelElementList("MySubSubmodelElementList1", model.Property, + value_type_list_element=model.datatypes.String) + MySubTestValue1 = model.Property(None, model.datatypes.String) + MySubTestValue2 = model.Property(None, model.datatypes.String) + MySubSubmodelElementList2 = model.SubmodelElementList("MySubSubmodelElementList2", model.SubmodelElementList) + MySubSubmodelElementList3 = model.SubmodelElementList(None, model.SubmodelElementCollection) + MySubmodelElementCollectionInSML3 = model.SubmodelElementCollection(None) + MySubTestValue3 = model.Property("MySubTestValue3", model.datatypes.String) + + MySubmodelElementCollection.add_referable(MySubProperty1) + MySubmodelElementCollection.add_referable(MySubProperty2) + MySubmodelElementCollection.add_referable(MySubSubmodelElementCollection) + MySubSubmodelElementCollection.add_referable(MySubSubProperty1) + MySubSubmodelElementCollection.add_referable(MySubSubProperty2) + MySubmodelElementCollection.add_referable(MySubSubmodelElementList1) + MySubSubmodelElementList1.add_referable(MySubTestValue1) + MySubSubmodelElementList1.add_referable(MySubTestValue2) + MySubmodelElementCollection.add_referable(MySubSubmodelElementList2) + MySubSubmodelElementList2.add_referable(MySubSubmodelElementList3) + MySubSubmodelElementList3.add_referable(MySubmodelElementCollectionInSML3) + MySubmodelElementCollectionInSML3.add_referable(MySubTestValue3) + + expected_id_short_paths = { + MySubmodelElementCollection: "MySubmodelElementCollection", + MySubProperty1: "MySubmodelElementCollection.MySubProperty1", + MySubProperty2: "MySubmodelElementCollection.MySubProperty2", + MySubSubmodelElementCollection: "MySubmodelElementCollection.MySubSubmodelElementCollection", + MySubSubProperty1: "MySubmodelElementCollection.MySubSubmodelElementCollection.MySubSubProperty1", + MySubSubProperty2: "MySubmodelElementCollection.MySubSubmodelElementCollection.MySubSubProperty2", + MySubSubmodelElementList1: "MySubmodelElementCollection.MySubSubmodelElementList1", + MySubTestValue1: "MySubmodelElementCollection.MySubSubmodelElementList1[0]", + MySubTestValue2: "MySubmodelElementCollection.MySubSubmodelElementList1[1]", + MySubSubmodelElementList2: "MySubmodelElementCollection.MySubSubmodelElementList2", + MySubSubmodelElementList3: "MySubmodelElementCollection.MySubSubmodelElementList2[0]", + MySubmodelElementCollectionInSML3: "MySubmodelElementCollection.MySubSubmodelElementList2[0][0]", + MySubTestValue3: "MySubmodelElementCollection.MySubSubmodelElementList2[0][0].MySubTestValue3", + } + for referable, expected_path in expected_id_short_paths.items(): + self.assertEqual(referable.get_id_short_path(), expected_path) def test_update_from(self): example_submodel = example_aas.create_example_submodel() @@ -270,20 +246,8 @@ def test_update_from(self): self.assertIs(example_submodel.namespace_element_sets[0], example_submodel.submodel_element) self.assertIs(example_relel.parent, example_submodel) - # Test source update - example_relel.source = "scheme:OldRelElSource" - other_submodel.source = "scheme:NewSource" - other_relel.source = "scheme:NewRelElSource" - - example_submodel.update_from(other_submodel) - # Sources of the object itself should not be updated by default - self.assertEqual("", example_submodel.source) - # Sources of embedded objects should always be updated - self.assertEqual("scheme:NewRelElSource", example_relel.source) - def test_update_commit_qualifier_extension_semantic_id(self): submodel = model.Submodel("https://acplt.org/Test_Submodel") - submodel.update() qualifier = model.Qualifier("test", model.datatypes.String) extension = model.Extension("test") collection = model.SubmodelElementCollection("test") @@ -293,7 +257,6 @@ def test_update_commit_qualifier_extension_semantic_id(self): submodel.add_qualifier(qualifier) submodel.add_extension(extension) submodel.add_referable(collection) - submodel.commit() self.assertEqual(next(iter(submodel.qualifier)), qualifier) self.assertEqual(next(iter(submodel.extension)), extension) @@ -320,7 +283,6 @@ def test_update_commit_qualifier_extension_semantic_id(self): next(iter(submodel.submodel_element)) with self.assertRaises(StopIteration): next(iter(collection.value)) - submodel.commit() class ExampleNamespaceReferable(model.UniqueIdShortNamespace, model.UniqueSemanticIdNamespace, model.Identifiable): @@ -609,7 +571,7 @@ def test_id_short_path_resolution(self) -> None: with self.assertRaises(TypeError) as cm_3: self.namespace.get_referable(["List1", "0", "Prop1", "Test"]) self.assertEqual("Cannot resolve id_short or index 'Test' at " - f"Property[{self.namespace.id} / List1[0] / Prop1], " + f"Property[{self.namespace.id} / List1[0].Prop1], " "because it is not a UniqueIdShortNamespace!", str(cm_3.exception)) self.namespace.get_referable(["List1", "0", "Prop1"]) @@ -696,7 +658,7 @@ def test_aasd_117(self) -> None: se_collection.add_referable(property) with self.assertRaises(model.AASConstraintViolation) as cm: property.id_short = None - self.assertEqual("id_short of Property[foo / property] cannot be unset, since it is already contained in " + self.assertEqual("id_short of Property[foo.property] cannot be unset, since it is already contained in " "SubmodelElementCollection[foo] (Constraint AASd-117)", str(cm.exception)) property.id_short = "bar" @@ -947,7 +909,7 @@ def get_identifiable(self, identifier: Identifier) -> Identifiable: model.Property) with self.assertRaises(TypeError) as cm_3: ref4.resolve(DummyObjectProvider()) - self.assertEqual("Cannot resolve id_short or index 'prop' at Property[urn:x-test:submodel / list[0] / prop], " + self.assertEqual("Cannot resolve id_short or index 'prop' at Property[urn:x-test:submodel / list[0].prop], " "because it is not a UniqueIdShortNamespace!", str(cm_3.exception)) with self.assertRaises(AttributeError) as cm_4: @@ -1024,13 +986,13 @@ def test_from_referable(self) -> None: submodel.submodel_element.remove(collection) with self.assertRaises(ValueError) as cm: ref3 = model.ModelReference.from_referable(prop) - self.assertEqual("The given Referable object is not embedded within an Identifiable object", str(cm.exception)) + self.assertEqual("The given Referable object is not embedded within an Identifiable object", + str(cm.exception).split(":")[0]) - # Test creating a reference to a custom Referable class - class DummyThing(model.Referable): + # Test creating a reference to a custom SubmodelElement class + class DummyThing(model.SubmodelElement): def __init__(self, id_short: model.NameType): - super().__init__() - self.id_short = id_short + super().__init__(id_short) class DummyIdentifyableNamespace(model.Submodel, model.UniqueIdShortNamespace): def __init__(self, id_: model.Identifier): @@ -1041,7 +1003,7 @@ def __init__(self, id_: model.Identifier): identifable_thing = DummyIdentifyableNamespace("urn:x-test:thing") identifable_thing.things.add(thing) ref4 = model.ModelReference.from_referable(thing) - self.assertIs(ref4.type, model.Referable) + self.assertIs(ref4.type, model.SubmodelElement) class AdministrativeInformationTest(unittest.TestCase): diff --git a/server/Dockerfile b/server/Dockerfile index 4df672c41..7ad70bc66 100644 --- a/server/Dockerfile +++ b/server/Dockerfile @@ -23,7 +23,7 @@ RUN chmod +x /etc/supervisor/stop-supervisor.sh # Makes it possible to use a different configuration ENV UWSGI_INI=/etc/uwsgi/uwsgi.ini -# object stores aren't thread-safe yet +# Object stores aren't thread-safe yet # https://github.com/eclipse-basyx/basyx-python-sdk/issues/205 ENV UWSGI_CHEAPER=0 ENV UWSGI_PROCESSES=1 @@ -31,6 +31,14 @@ ENV NGINX_MAX_UPLOAD=1M ENV NGINX_WORKER_PROCESSES=1 ENV LISTEN_PORT=80 ENV CLIENT_BODY_BUFFER_SIZE=1M +ENV API_BASE_PATH=/api/v3.0/ + +# Default values for the storage envs +ENV INPUT=/input +ENV STORAGE=/storage +ENV STORAGE_PERSISTENCY=False +ENV STORAGE_OVERWRITE=False +VOLUME ["/input", "/storage"] # Copy the entrypoint that will generate Nginx additional configs COPY server/entrypoint.sh /entrypoint.sh @@ -44,5 +52,6 @@ COPY ./sdk /sdk COPY ./server/app /app WORKDIR /app RUN pip install ../sdk +RUN pip install . CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.ini"] diff --git a/server/README.md b/server/README.md index 37b649d79..d368d4ae5 100644 --- a/server/README.md +++ b/server/README.md @@ -6,68 +6,87 @@ The server currently implements the following interfaces: - [Asset Administration Shell Repository Service][4] - [Submodel Repository Service][5] -It uses the [HTTP API][1] and the [AASX][7], [JSON][8], and [XML][9] Adapters of the [BaSyx Python SDK][3], to serve regarding files from a given directory. +It uses the [HTTP API][1] and the [*AASX*][7], [*JSON*][8], and [*XML*][9] Adapters of the [BaSyx Python SDK][3], to serve regarding files from a given directory. The files are only read, changes won't persist. -Alternatively, the container can also be told to use the [Local-File Backend][2] instead, which stores AAS and Submodels as individual JSON files and allows for persistent changes (except supplementary files, i.e. files referenced by `File` submodel elements). +Alternatively, the container can also be told to use the [Local-File Backend][2] instead, which stores Asset Administration Shells (AAS) and Submodels as individual *JSON* files and allows for persistent changes (except supplementary files, i.e. files referenced by `File` SubmodelElements). See [below](#options) on how to configure this. ## Building + The container image can be built via: ``` $ docker build -t basyx-python-server -f Dockerfile .. ``` +Note that when cloning this repository on Windows, Git may convert the line separators to CRLF. This breaks [`entrypoint.sh`](entrypoint.sh) and [`stop-supervisor.sh`](stop-supervisor.sh). Ensure both files use LF line separators (`\n`) before building. + ## Running ### Storage -The container needs to be provided with the directory `/storage` to store AAS and Submodel files: AASX, JSON, XML or JSON files of Local-File Backend. -This directory can be mapped via the `-v` option from another image or a local directory. -To map the directory `storage` inside the container, `-v ./storage:/storage` can be used. -The directory `storage` will be created in the current working directory, if it doesn't already exist. +The server makes use of two directories: + +- **`/input`** - *start-up data*: Directory from which the server loads AAS and Submodel files in *AASX*, *JSON* or *XML* format during start-up. The server will not modify these files. +- **`/storage`** - *persistent store*: Directory where all AAS and Submodels are stored as individual *JSON* files if the server is [configured](#options) for persistence. The server will modify these files. + +The directories can be mapped via the `-v` option from another image or a local directory. +To mount the host directories into the container, `-v ./input:/input -v ./storage:/storage` can be used. +Both local directories `./input` and `./storage` will be created in the current working directory, if they don't already exist. ### Port + The HTTP server inside the container listens on port 80 by default. To expose it on the host on port 8080, use the option `-p 8080:80` when running it. ### Options -The container can be configured via environment variables: -- `API_BASE_PATH` determines the base path under which all other API paths are made available. - Default: `/api/v3.0` -- `STORAGE_TYPE` can be one of `LOCAL_FILE_READ_ONLY` or `LOCAL_FILE_BACKEND`: - - When set to `LOCAL_FILE_READ_ONLY` (the default), the server will read and serve AASX, JSON, XML files from the storage directory. - The files are not modified, all changes done via the API are only stored in memory. - - When instead set to `LOCAL_FILE`, the server makes use of the [LocalFileBackend][2], where AAS and Submodels are persistently stored as JSON files. - Supplementary files, i.e. files referenced by `File` submodel elements, are not stored in this case. -- `STORAGE_PATH` sets the directory to read the files from *within the container*. If you bind your files to a directory different from the default `/storage`, you can use this variable to adjust the server accordingly. + +The container can be configured via environment variables. The most important ones are summarised below: + +| Variable | Description | Default | +|-----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------| +| `API_BASE_PATH` | Base path under which the API is served. | `/api/v3.0/` | +| `INPUT` | Path inside the container pointing to the directory from which the server takes its start-up data (*AASX*, *JSON*, *XML*). | `/input` | +| `STORAGE` | Path inside the container pointing to the directory used by the server to persistently store data (*JSON*). | `/storage` | +| `STORAGE_PERSISTENCY` | Flag to enable data persistence via the [LocalFileBackend][2]. AAS/Submodels are stored as *JSON* files in the directory specified by `STORAGE`. Supplementary files, i.e. files referenced by `File` SubmodelElements, are not stored. If disabled, any changes made via the API are only stored in memory. | `False` | +| `STORAGE_OVERWRITE` | Flag to enable storage overwrite if `STORAGE_PERSISTENCY` is enabled. Any AAS/Submodel from the `INPUT` directory already present in the LocalFileBackend replaces its existing version. If disabled, the existing version is kept. | `False` | + + +This implies the following start-up behaviour: + +- Any AAS/Submodel found in `INPUT` is loaded during start-up. +- If `STORAGE_PERSISTENCY = True`: + - Any AAS/Submodel *not* present in the LocalFileBackend is added to it. + - Any AAS/Submodel *already present* is skipped, unless `STORAGE_OVERWRITE = True`, in which case it is replaced. +- Supplementary files (e.g., `File` SubmodelElements) are never persisted by the LocalFileBackend. ### Running Examples Putting it all together, the container can be started via the following command: ``` -$ docker run -p 8080:80 -v ./storage:/storage basyx-python-server +$ docker run -p 8080:80 -v ./input:/input -v ./storage:/storage basyx-python-server ``` Since Windows uses backslashes instead of forward slashes in paths, you'll have to adjust the path to the storage directory there: ``` -> docker run -p 8080:80 -v .\storage:/storage basyx-python-server +> docker run -p 8080:80 -v .\input:/input -v .\storage:/storage basyx-python-server ``` -Per default, the server will use the `LOCAL_FILE_READ_ONLY` storage type and serve the API under `/api/v3.0` and read files from `/storage`. If you want to change this, you can do so like this: +By default, the server will use the standard settings described [above](#options). Those settings can be adapted in the following way: ``` -$ docker run -p 8080:80 -v ./storage2:/storage2 -e API_BASE_PATH=/api/v3.1 -e STORAGE_TYPE=LOCAL_FILE_BACKEND -e STORAGE_PATH=/storage2 basyx-python-server +$ docker run -p 8080:80 -v ./input:/input2 -v ./storage:/storage2 -e API_BASE_PATH=/api/v3.1/ -e INPUT=/input2 -e STORAGE=/storage2 -e STORAGE_PERSISTENCY=True -e STORAGE_OVERWRITE=True basyx-python-server ``` -## Building and running the image with docker-compose +## Building and Running the Image with Docker Compose The container image can also be built and run via: ``` $ docker compose up ``` -This is the exemplary `docker-compose` file for the server: -````yaml +An exemplary [`compose.yml`](compose.yml) file for the server is given [here](compose.yml): +```yaml +name: basyx-python-server services: app: build: @@ -76,12 +95,64 @@ services: ports: - "8080:80" volumes: + - ./input:/input - ./storage:/storage + environment: + STORAGE_PERSISTENCY: True +``` + +Input files are read from `./input` and stored persistently under `./storage` on your host system. The server can be accessed at http://localhost:8080/api/v3.0/ from your host system. +To get a different setup, the [`compose.yml`](compose.yml) file can be adapted using the options described [above](#options), similar to the third [running example](#running-examples). + +Note that the `Dockerfile` has to be specified explicitly via `dockerfile: server/Dockerfile`, as the build context must be set to the parent directory of `/server` to allow access to the local `/sdk`. + +## Running without Docker (Debugging Only) + +The server can also be run directly on the host system without Docker, NGINX and supervisord. Although this is not suitable for production, it may be useful for debugging. + +> [!warning] +> Not supported for production systems! + +1. Install the local SDK and the local server package. + ```bash + $ pip install ../sdk + $ pip install ./app + ``` + +2. Run the server by executing the main function in [`./app/interfaces/repository.py`](./app/interfaces/repository.py). + ```bash + $ python -m app.interfaces.repository + ``` + +The server can be accessed at http://localhost:8080/api/v3.0/ from your host system. + +## Currently Unimplemented +Several features and routes are currently not supported: + +1. Correlation ID: Not implemented because it was deemed unnecessary for this server. + +2. Extent Parameter (`withBlobValue/withoutBlobValue`): + Not implemented due to the lack of support in JSON/XML serialization. + +3. Route `/shells/{aasIdentifier}/asset-information/thumbnail`: Not implemented because the specification lacks clarity. + +4. Serialization and Description Routes: + - `/serialization` + - `/description` + These routes are not implemented at this time. -```` +5. Value, Path, and PATCH Routes: + - All `/…/value$`, `/…/path$`, and `PATCH` routes are currently not implemented. -Here files are read from `/storage` and the server can be accessed at http://localhost:8080/api/v3.0/ from your host system. -To get a different setup this compose.yaml file can be adapted and expanded. +6. Operation Invocation Routes: The following routes are not implemented because operation invocation + is not yet supported by the `basyx-python-sdk`: + - `POST /submodels/{submodelIdentifier}/submodel-elements/{idShortPath}/invoke` + - `POST /submodels/{submodelIdentifier}/submodel-elements/{idShortPath}/invoke/$value` + - `POST /submodels/{submodelIdentifier}/submodel-elements/{idShortPath}/invoke-async` + - `POST /submodels/{submodelIdentifier}/submodel-elements/{idShortPath}/invoke-async/$value` + - `GET /submodels/{submodelIdentifier}/submodel-elements/{idShortPath}/operation-status/{handleId}` + - `GET /submodels/{submodelIdentifier}/submodel-elements/{idShortPath}/operation-results/{handleId}` + - `GET /submodels/{submodelIdentifier}/submodel-elements/{idShortPath}/operation-results/{handleId}/$value` ## Acknowledgments @@ -92,7 +163,7 @@ This Dockerfile is inspired by the [tiangolo/uwsgi-nginx-docker][10] repository. [3]: https://github.com/eclipse-basyx/basyx-python-sdk [4]: https://app.swaggerhub.com/apis/Plattform_i40/AssetAdministrationShellRepositoryServiceSpecification/V3.0.1_SSP-001 [5]: https://app.swaggerhub.com/apis/Plattform_i40/SubmodelRepositoryServiceSpecification/V3.0.1_SSP-001 -[6]: https://industrialdigitaltwin.org/content-hub/aasspecifications/idta_01002-3-0_application_programming_interfaces +[6]: https://industrialdigitaltwin.io/aas-specifications/IDTA-01002/v3.0/index.html [7]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/aasx.html#adapter-aasx [8]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/json.html [9]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/xml.html diff --git a/server/app/interfaces/__init__.py b/server/app/interfaces/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/app/interfaces/base.py b/server/app/interfaces/base.py new file mode 100644 index 000000000..2f23b5622 --- /dev/null +++ b/server/app/interfaces/base.py @@ -0,0 +1,441 @@ +# Copyright (c) 2025 the Eclipse BaSyx Authors +# +# This program and the accompanying materials are made available under the terms of the MIT License, available in +# the LICENSE file of this project. +# +# SPDX-License-Identifier: MIT +import abc +import datetime +import enum +import io +import itertools +import json +from typing import Iterable, Type, Iterator, Tuple, Optional, List, Union, Dict, Callable, TypeVar, Any + +import werkzeug.exceptions +import werkzeug.routing +import werkzeug.utils +from lxml import etree +from werkzeug import Response, Request +from werkzeug.exceptions import NotFound, BadRequest +from werkzeug.routing import MapAdapter + +from basyx.aas import model +from basyx.aas.adapter._generic import XML_NS_MAP +from basyx.aas.adapter.json import StrictStrippedAASFromJsonDecoder, StrictAASFromJsonDecoder, AASToJsonEncoder +from basyx.aas.adapter.xml import xml_serialization, XMLConstructables, read_aas_xml_element +from basyx.aas.model import AbstractObjectStore +from util.converters import base64url_decode + + +T = TypeVar("T") + + +@enum.unique +class MessageType(enum.Enum): + UNDEFINED = enum.auto() + INFO = enum.auto() + WARNING = enum.auto() + ERROR = enum.auto() + EXCEPTION = enum.auto() + + def __str__(self): + return self.name.capitalize() + + +class Message: + def __init__(self, code: str, text: str, message_type: MessageType = MessageType.UNDEFINED, + timestamp: Optional[datetime.datetime] = None): + self.code: str = code + self.text: str = text + self.message_type: MessageType = message_type + self.timestamp: datetime.datetime = timestamp if timestamp is not None \ + else datetime.datetime.now(datetime.timezone.utc) + + +class Result: + def __init__(self, success: bool, messages: Optional[List[Message]] = None): + if messages is None: + messages = [] + self.success: bool = success + self.messages: List[Message] = messages + + +ResponseData = Union[Result, object, List[object]] + + +class APIResponse(abc.ABC, Response): + @abc.abstractmethod + def __init__(self, obj: Optional[ResponseData] = None, cursor: Optional[int] = None, + stripped: bool = False, *args, **kwargs): + super().__init__(*args, **kwargs) + if obj is None: + self.status_code = 204 + else: + self.data = self.serialize(obj, cursor, stripped) + + @abc.abstractmethod + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + pass + + +class JsonResponse(APIResponse): + def __init__(self, *args, content_type="application/json", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + if cursor is None: + data = obj + else: + data = { + "paging_metadata": {"cursor": str(cursor)}, + "result": obj + } + return json.dumps( + data, + cls=StrippedResultToJsonEncoder if stripped else ResultToJsonEncoder, + separators=(",", ":") + ) + + +class XmlResponse(APIResponse): + def __init__(self, *args, content_type="application/xml", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + root_elem = etree.Element("response", nsmap=XML_NS_MAP) + if cursor is not None: + root_elem.set("cursor", str(cursor)) + if isinstance(obj, Result): + result_elem = self.result_to_xml(obj, **XML_NS_MAP) + for child in result_elem: + root_elem.append(child) + elif isinstance(obj, list): + for item in obj: + item_elem = xml_serialization.object_to_xml_element(item) + root_elem.append(item_elem) + else: + obj_elem = xml_serialization.object_to_xml_element(obj) + for child in obj_elem: + root_elem.append(child) + etree.cleanup_namespaces(root_elem) + xml_str = etree.tostring(root_elem, xml_declaration=True, encoding="utf-8") + return xml_str # type: ignore[return-value] + + @classmethod + def result_to_xml(cls, result: Result, **kwargs) -> etree._Element: + result_elem = etree.Element("result", **kwargs) + success_elem = etree.Element("success") + success_elem.text = xml_serialization.boolean_to_xml(result.success) + messages_elem = etree.Element("messages") + for message in result.messages: + messages_elem.append(cls.message_to_xml(message)) + + result_elem.append(success_elem) + result_elem.append(messages_elem) + return result_elem + + @classmethod + def message_to_xml(cls, message: Message) -> etree._Element: + message_elem = etree.Element("message") + message_type_elem = etree.Element("messageType") + message_type_elem.text = str(message.message_type) + text_elem = etree.Element("text") + text_elem.text = message.text + code_elem = etree.Element("code") + code_elem.text = message.code + timestamp_elem = etree.Element("timestamp") + timestamp_elem.text = message.timestamp.isoformat() + + message_elem.append(message_type_elem) + message_elem.append(text_elem) + message_elem.append(code_elem) + message_elem.append(timestamp_elem) + return message_elem + + +class XmlResponseAlt(XmlResponse): + def __init__(self, *args, content_type="text/xml", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + +class ResultToJsonEncoder(AASToJsonEncoder): + @classmethod + def _result_to_json(cls, result: Result) -> Dict[str, object]: + return { + "success": result.success, + "messages": result.messages + } + + @classmethod + def _message_to_json(cls, message: Message) -> Dict[str, object]: + return { + "messageType": message.message_type, + "text": message.text, + "code": message.code, + "timestamp": message.timestamp.isoformat() + } + + def default(self, obj: object) -> object: + if isinstance(obj, Result): + return self._result_to_json(obj) + if isinstance(obj, Message): + return self._message_to_json(obj) + if isinstance(obj, MessageType): + return str(obj) + return super().default(obj) + + +class StrippedResultToJsonEncoder(ResultToJsonEncoder): + stripped = True + + +class BaseWSGIApp: + url_map: werkzeug.routing.Map + + # TODO: the parameters can be typed via builtin wsgiref with Python 3.11+ + def __call__(self, environ, start_response) -> Iterable[bytes]: + response: Response = self.handle_request(Request(environ)) + return response(environ, start_response) + + @classmethod + def _get_slice(cls, request: Request, iterator: Iterable[T]) -> Tuple[Iterator[T], int]: + limit_str = request.args.get('limit', default="10") + cursor_str = request.args.get('cursor', default="1") + try: + limit, cursor = int(limit_str), int(cursor_str) - 1 # cursor is 1-indexed + if limit < 0 or cursor < 0: + raise ValueError + except ValueError: + raise BadRequest("Limit can not be negative, cursor must be positive!") + start_index = cursor + end_index = cursor + limit + paginated_slice = itertools.islice(iterator, start_index, end_index) + return paginated_slice, end_index + + def handle_request(self, request: Request): + map_adapter: MapAdapter = self.url_map.bind_to_environ(request.environ) + try: + response_t = self.get_response_type(request) + except werkzeug.exceptions.NotAcceptable as e: + return e + + try: + endpoint, values = map_adapter.match() + return endpoint(request, values, response_t=response_t, map_adapter=map_adapter) + + # any raised error that leaves this function will cause a 500 internal server error + # so catch raised http exceptions and return them + except werkzeug.exceptions.HTTPException as e: + return self.http_exception_to_response(e, response_t) + + @staticmethod + def get_response_type(request: Request) -> Type[APIResponse]: + response_types: Dict[str, Type[APIResponse]] = { + "application/json": JsonResponse, + "application/xml": XmlResponse, + "text/xml": XmlResponseAlt + } + if len(request.accept_mimetypes) == 0 or request.accept_mimetypes.best in (None, "*/*"): + return JsonResponse + mime_type = request.accept_mimetypes.best_match(response_types) + if mime_type is None: + raise werkzeug.exceptions.NotAcceptable("This server supports the following content types: " + + ", ".join(response_types.keys())) + return response_types[mime_type] + + @staticmethod + def http_exception_to_response(exception: werkzeug.exceptions.HTTPException, response_type: Type[APIResponse]) \ + -> APIResponse: + headers = exception.get_headers() + location = exception.get_response().location + if location is not None: + headers.append(("Location", location)) + if exception.code and exception.code >= 400: + message = Message(type(exception).__name__, + exception.description if exception.description is not None else "", + MessageType.ERROR) + result = Result(False, [message]) + else: + result = Result(False) + return response_type(result, status=exception.code, headers=headers) + + +class ObjectStoreWSGIApp(BaseWSGIApp): + object_store: AbstractObjectStore + + def _get_all_obj_of_type(self, type_: Type[model.provider._IT]) -> Iterator[model.provider._IT]: + for obj in self.object_store: + if isinstance(obj, type_): + yield obj + + def _get_obj_ts(self, identifier: model.Identifier, type_: Type[model.provider._IT]) -> model.provider._IT: + identifiable = self.object_store.get(identifier) + if not isinstance(identifiable, type_): + raise NotFound(f"No {type_.__name__} with {identifier} found!") + return identifiable + + +class HTTPApiDecoder: + # these are the types we can construct (well, only the ones we need) + type_constructables_map = { + model.AssetAdministrationShell: XMLConstructables.ASSET_ADMINISTRATION_SHELL, + model.AssetInformation: XMLConstructables.ASSET_INFORMATION, + model.ModelReference: XMLConstructables.MODEL_REFERENCE, + model.SpecificAssetId: XMLConstructables.SPECIFIC_ASSET_ID, + model.Qualifier: XMLConstructables.QUALIFIER, + model.Submodel: XMLConstructables.SUBMODEL, + model.SubmodelElement: XMLConstructables.SUBMODEL_ELEMENT, + model.Reference: XMLConstructables.REFERENCE, + model.ConceptDescription: XMLConstructables.CONCEPT_DESCRIPTION, + } + + @classmethod + def check_type_support(cls, type_: type): + if type_ not in cls.type_constructables_map: + raise TypeError(f"Parsing {type_} is not supported!") + + @classmethod + def assert_type(cls, obj: object, type_: Type[T]) -> T: + if not isinstance(obj, type_): + raise BadRequest(f"Object {obj!r} is not of type {type_.__name__}!") + return obj + + @classmethod + def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: + cls.check_type_support(expect_type) + decoder: Type[StrictAASFromJsonDecoder] = StrictStrippedAASFromJsonDecoder if stripped \ + else StrictAASFromJsonDecoder + try: + parsed = json.loads(data, cls=decoder) + if isinstance(parsed, list) and expect_single: + raise BadRequest(f"Expected a single object of type {expect_type.__name__}, got {parsed!r}!") + if not isinstance(parsed, list) and not expect_single: + raise BadRequest(f"Expected List[{expect_type.__name__}], got {parsed!r}!") + parsed = [parsed] if not isinstance(parsed, list) else parsed + + # TODO: the following is ugly, but necessary because references aren't self-identified objects + # in the json schema + # TODO: json deserialization will always create an ModelReference[Submodel], xml deserialization determines + # that automatically + mapping = { + model.ModelReference: decoder._construct_model_reference, + model.AssetInformation: decoder._construct_asset_information, + model.SpecificAssetId: decoder._construct_specific_asset_id, + model.Reference: decoder._construct_reference, + model.Qualifier: decoder._construct_qualifier, + } + + constructor: Optional[Callable[..., T]] = mapping.get(expect_type) # type: ignore[assignment] + args = [] + if expect_type is model.ModelReference: + args.append(model.Submodel) + + if constructor is not None: + # construct elements that aren't self-identified + return [constructor(obj, *args) for obj in parsed] + + except (KeyError, ValueError, TypeError, json.JSONDecodeError, model.AASConstraintViolation) as e: + raise BadRequest(str(e)) from e + + return [cls.assert_type(obj, expect_type) for obj in parsed] + + @classmethod + def base64url_json_list(cls, data: str, expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: + data = base64url_decode(data) + return cls.json_list(data, expect_type, stripped, expect_single) + + @classmethod + def json(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool) -> T: + return cls.json_list(data, expect_type, stripped, True)[0] + + @classmethod + def base64url_json(cls, data: str, expect_type: Type[T], stripped: bool) -> T: + data = base64url_decode(data) + return cls.json_list(data, expect_type, stripped, True)[0] + + @classmethod + def xml(cls, data: bytes, expect_type: Type[T], stripped: bool) -> T: + cls.check_type_support(expect_type) + try: + xml_data = io.BytesIO(data) + rv = read_aas_xml_element(xml_data, cls.type_constructables_map[expect_type], + stripped=stripped, failsafe=False) + except (KeyError, ValueError) as e: + # xml deserialization creates an error chain. since we only return one error, return the root cause + f: BaseException = e + while f.__cause__ is not None: + f = f.__cause__ + raise BadRequest(str(f)) from e + except (etree.XMLSyntaxError, model.AASConstraintViolation) as e: + raise BadRequest(str(e)) from e + return cls.assert_type(rv, expect_type) + + @classmethod + def request_body(cls, request: Request, expect_type: Type[T], stripped: bool) -> T: + """ + TODO: werkzeug documentation recommends checking the content length before retrieving the body to prevent + running out of memory. but it doesn't state how to check the content length + also: what would be a reasonable maximum content length? the request body isn't limited by the xml/json + schema + In the meeting (25.11.2020) we discussed, this may refer to a reverse proxy in front of this WSGI app, + which should limit the maximum content length. + """ + valid_content_types = ("application/json", "application/xml", "text/xml") + + if request.mimetype not in valid_content_types: + raise werkzeug.exceptions.UnsupportedMediaType( + f"Invalid content-type: {request.mimetype}! Supported types: " + + ", ".join(valid_content_types)) + + if request.mimetype == "application/json": + return cls.json(request.get_data(), expect_type, stripped) + return cls.xml(request.get_data(), expect_type, stripped) + + @classmethod + def request_body_list(cls, request: Request, expect_type: Type[T], stripped: bool) -> List[T]: + """ + Deserializes the request body to an instance (or list of instances) + of the expected type. + """ + # TODO: Refactor this method and request_body to avoid code duplication + valid_content_types = ("application/json", "application/xml", "text/xml") + + if request.mimetype not in valid_content_types: + raise werkzeug.exceptions.UnsupportedMediaType( + f"Invalid content-type: {request.mimetype}! Supported types: " + ", ".join(valid_content_types) + ) + + if request.mimetype == "application/json": + raw_data = request.get_data() + try: + parsed = json.loads(raw_data) + except Exception as e: + raise werkzeug.exceptions.BadRequest(f"Invalid JSON: {e}") + # Prüfe, ob parsed ein Array ist: + if isinstance(parsed, list): + # Für jedes Element wird die Konvertierung angewandt. + return [cls._convert_single_json_item(item, expect_type, stripped) for item in parsed] # type: ignore + else: + return [cls._convert_single_json_item(parsed, expect_type, stripped)] + else: + return [cls.xml(request.get_data(), expect_type, stripped)] + + @classmethod + def _convert_single_json_item(cls, data: Any, expect_type: Type[T], stripped: bool) -> T: + """ + Converts a single JSON-Object (as a Python-Dict) to an object of type expect_type. + Here the dictionary is first serialized back to a JSON-string and returned as bytes. + """ + json_bytes = json.dumps(data).encode("utf-8") + return cls.json(json_bytes, expect_type, stripped) + + +def is_stripped_request(request: Request) -> bool: + level = request.args.get("level") + if level not in {"deep", "core", None}: + raise BadRequest(f"Level {level} is not a valid level!") + extent = request.args.get("extent") + if extent is not None: + raise werkzeug.exceptions.NotImplemented(f"The parameter extent is not yet implemented for this server!") + return level == "core" diff --git a/sdk/basyx/aas/adapter/http.py b/server/app/interfaces/repository.py similarity index 68% rename from sdk/basyx/aas/adapter/http.py rename to server/app/interfaces/repository.py index 12bd533f3..713023d0c 100644 --- a/sdk/basyx/aas/adapter/http.py +++ b/server/app/interfaces/repository.py @@ -6,431 +6,27 @@ # SPDX-License-Identifier: MIT """ This module implements the "Specification of the Asset Administration Shell Part 2 Application Programming Interfaces". -However, several features and routes are currently not supported: - -1. Correlation ID: Not implemented because it was deemed unnecessary for this server. - -2. Extent Parameter (`withBlobValue/withoutBlobValue`): - Not implemented due to the lack of support in JSON/XML serialization. - -3. Route `/shells/{aasIdentifier}/asset-information/thumbnail`: Not implemented because the specification lacks clarity. - -4. Serialization and Description Routes: - - `/serialization` - - `/description` - These routes are not implemented at this time. - -5. Value, Path, and PATCH Routes: - - All `/…/value$`, `/…/path$`, and `PATCH` routes are currently not implemented. - -6. Operation Invocation Routes: The following routes are not implemented because operation invocation - is not yet supported by the `basyx-python-sdk`: - - `POST /submodels/{submodelIdentifier}/submodel-elements/{idShortPath}/invoke` - - `POST /submodels/{submodelIdentifier}/submodel-elements/{idShortPath}/invoke/$value` - - `POST /submodels/{submodelIdentifier}/submodel-elements/{idShortPath}/invoke-async` - - `POST /submodels/{submodelIdentifier}/submodel-elements/{idShortPath}/invoke-async/$value` - - `GET /submodels/{submodelIdentifier}/submodel-elements/{idShortPath}/operation-status/{handleId}` - - `GET /submodels/{submodelIdentifier}/submodel-elements/{idShortPath}/operation-results/{handleId}` - - `GET /submodels/{submodelIdentifier}/submodel-elements/{idShortPath}/operation-results/{handleId}/$value` """ -import abc -import base64 -import binascii -import datetime -import enum import io import json -import itertools -import urllib +from typing import Type, Iterator, List, Dict, Union, Callable, Tuple, Optional, Iterable -from lxml import etree import werkzeug.exceptions import werkzeug.routing -import werkzeug.urls import werkzeug.utils -from werkzeug.exceptions import BadRequest, Conflict, NotFound -from werkzeug.routing import MapAdapter, Rule, Submount -from werkzeug.wrappers import Request, Response +from werkzeug import Response, Request from werkzeug.datastructures import FileStorage +from werkzeug.exceptions import NotFound, BadRequest, Conflict +from werkzeug.routing import Submount, Rule, MapAdapter from basyx.aas import model -from ._generic import XML_NS_MAP -from .xml import XMLConstructables, read_aas_xml_element, xml_serialization, object_to_xml_element -from .json import AASToJsonEncoder, StrictAASFromJsonDecoder, StrictStrippedAASFromJsonDecoder -from . import aasx - -from typing import Callable, Dict, Iterable, Iterator, List, Optional, Type, TypeVar, Union, Tuple - - -@enum.unique -class MessageType(enum.Enum): - UNDEFINED = enum.auto() - INFO = enum.auto() - WARNING = enum.auto() - ERROR = enum.auto() - EXCEPTION = enum.auto() - - def __str__(self): - return self.name.capitalize() - - -class Message: - def __init__(self, code: str, text: str, message_type: MessageType = MessageType.UNDEFINED, - timestamp: Optional[datetime.datetime] = None): - self.code: str = code - self.text: str = text - self.message_type: MessageType = message_type - self.timestamp: datetime.datetime = timestamp if timestamp is not None \ - else datetime.datetime.now(datetime.timezone.utc) - - -class Result: - def __init__(self, success: bool, messages: Optional[List[Message]] = None): - if messages is None: - messages = [] - self.success: bool = success - self.messages: List[Message] = messages - - -class ResultToJsonEncoder(AASToJsonEncoder): - @classmethod - def _result_to_json(cls, result: Result) -> Dict[str, object]: - return { - "success": result.success, - "messages": result.messages - } +from basyx.aas.adapter import aasx +from util.converters import IdentifierToBase64URLConverter, IdShortPathConverter, base64url_decode +from .base import ObjectStoreWSGIApp, APIResponse, is_stripped_request, HTTPApiDecoder, T - @classmethod - def _message_to_json(cls, message: Message) -> Dict[str, object]: - return { - "messageType": message.message_type, - "text": message.text, - "code": message.code, - "timestamp": message.timestamp.isoformat() - } - - def default(self, obj: object) -> object: - if isinstance(obj, Result): - return self._result_to_json(obj) - if isinstance(obj, Message): - return self._message_to_json(obj) - if isinstance(obj, MessageType): - return str(obj) - return super().default(obj) - - -class StrippedResultToJsonEncoder(ResultToJsonEncoder): - stripped = True - - -ResponseData = Union[Result, object, List[object]] - - -class APIResponse(abc.ABC, Response): - @abc.abstractmethod - def __init__(self, obj: Optional[ResponseData] = None, cursor: Optional[int] = None, - stripped: bool = False, *args, **kwargs): - super().__init__(*args, **kwargs) - if obj is None: - self.status_code = 204 - else: - self.data = self.serialize(obj, cursor, stripped) - - @abc.abstractmethod - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - pass - - -class JsonResponse(APIResponse): - def __init__(self, *args, content_type="application/json", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - if cursor is None: - data = obj - else: - data = { - "paging_metadata": {"cursor": str(cursor)}, - "result": obj - } - return json.dumps( - data, - cls=StrippedResultToJsonEncoder if stripped else ResultToJsonEncoder, - separators=(",", ":") - ) - - -class XmlResponse(APIResponse): - def __init__(self, *args, content_type="application/xml", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - root_elem = etree.Element("response", nsmap=XML_NS_MAP) - if cursor is not None: - root_elem.set("cursor", str(cursor)) - if isinstance(obj, Result): - result_elem = result_to_xml(obj, **XML_NS_MAP) - for child in result_elem: - root_elem.append(child) - elif isinstance(obj, list): - for item in obj: - item_elem = object_to_xml_element(item) - root_elem.append(item_elem) - else: - obj_elem = object_to_xml_element(obj) - for child in obj_elem: - root_elem.append(child) - etree.cleanup_namespaces(root_elem) - xml_str = etree.tostring(root_elem, xml_declaration=True, encoding="utf-8") - return xml_str # type: ignore[return-value] - - -class XmlResponseAlt(XmlResponse): - def __init__(self, *args, content_type="text/xml", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - -def result_to_xml(result: Result, **kwargs) -> etree._Element: - result_elem = etree.Element("result", **kwargs) - success_elem = etree.Element("success") - success_elem.text = xml_serialization.boolean_to_xml(result.success) - messages_elem = etree.Element("messages") - for message in result.messages: - messages_elem.append(message_to_xml(message)) - - result_elem.append(success_elem) - result_elem.append(messages_elem) - return result_elem - - -def message_to_xml(message: Message) -> etree._Element: - message_elem = etree.Element("message") - message_type_elem = etree.Element("messageType") - message_type_elem.text = str(message.message_type) - text_elem = etree.Element("text") - text_elem.text = message.text - code_elem = etree.Element("code") - code_elem.text = message.code - timestamp_elem = etree.Element("timestamp") - timestamp_elem.text = message.timestamp.isoformat() - - message_elem.append(message_type_elem) - message_elem.append(text_elem) - message_elem.append(code_elem) - message_elem.append(timestamp_elem) - return message_elem - - -def get_response_type(request: Request) -> Type[APIResponse]: - response_types: Dict[str, Type[APIResponse]] = { - "application/json": JsonResponse, - "application/xml": XmlResponse, - "text/xml": XmlResponseAlt - } - if len(request.accept_mimetypes) == 0 or request.accept_mimetypes.best in (None, "*/*"): - return JsonResponse - mime_type = request.accept_mimetypes.best_match(response_types) - if mime_type is None: - raise werkzeug.exceptions.NotAcceptable("This server supports the following content types: " - + ", ".join(response_types.keys())) - return response_types[mime_type] - - -def http_exception_to_response(exception: werkzeug.exceptions.HTTPException, response_type: Type[APIResponse]) \ - -> APIResponse: - headers = exception.get_headers() - location = exception.get_response().location - if location is not None: - headers.append(("Location", location)) - if exception.code and exception.code >= 400: - message = Message(type(exception).__name__, exception.description if exception.description is not None else "", - MessageType.ERROR) - result = Result(False, [message]) - else: - result = Result(False) - return response_type(result, status=exception.code, headers=headers) - - -def is_stripped_request(request: Request) -> bool: - level = request.args.get("level") - if level not in {"deep", "core", None}: - raise BadRequest(f"Level {level} is not a valid level!") - extent = request.args.get("extent") - if extent is not None: - raise werkzeug.exceptions.NotImplemented(f"The parameter extent is not yet implemented for this server!") - return level == "core" - - -T = TypeVar("T") - -BASE64URL_ENCODING = "utf-8" - - -def base64url_decode(data: str) -> str: - try: - # If the requester omits the base64 padding, an exception will be raised. - # However, Python doesn't complain about too much padding, - # thus we simply always append two padding characters (==). - # See also: https://stackoverflow.com/a/49459036/4780052 - decoded = base64.urlsafe_b64decode(data + "==").decode(BASE64URL_ENCODING) - except binascii.Error: - raise BadRequest(f"Encoded data {data} is invalid base64url!") - except UnicodeDecodeError: - raise BadRequest(f"Encoded base64url value is not a valid {BASE64URL_ENCODING} string!") - return decoded - - -def base64url_encode(data: str) -> str: - encoded = base64.urlsafe_b64encode(data.encode(BASE64URL_ENCODING)).decode("ascii") - return encoded - - -class HTTPApiDecoder: - # these are the types we can construct (well, only the ones we need) - type_constructables_map = { - model.AssetAdministrationShell: XMLConstructables.ASSET_ADMINISTRATION_SHELL, - model.AssetInformation: XMLConstructables.ASSET_INFORMATION, - model.ModelReference: XMLConstructables.MODEL_REFERENCE, - model.SpecificAssetId: XMLConstructables.SPECIFIC_ASSET_ID, - model.Qualifier: XMLConstructables.QUALIFIER, - model.Submodel: XMLConstructables.SUBMODEL, - model.SubmodelElement: XMLConstructables.SUBMODEL_ELEMENT, - model.Reference: XMLConstructables.REFERENCE - } - - @classmethod - def check_type_supportance(cls, type_: type): - if type_ not in cls.type_constructables_map: - raise TypeError(f"Parsing {type_} is not supported!") - - @classmethod - def assert_type(cls, obj: object, type_: Type[T]) -> T: - if not isinstance(obj, type_): - raise BadRequest(f"Object {obj!r} is not of type {type_.__name__}!") - return obj - - @classmethod - def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: - cls.check_type_supportance(expect_type) - decoder: Type[StrictAASFromJsonDecoder] = StrictStrippedAASFromJsonDecoder if stripped \ - else StrictAASFromJsonDecoder - try: - parsed = json.loads(data, cls=decoder) - if not isinstance(parsed, list): - if not expect_single: - raise BadRequest(f"Expected List[{expect_type.__name__}], got {parsed!r}!") - parsed = [parsed] - elif expect_single: - raise BadRequest(f"Expected a single object of type {expect_type.__name__}, got {parsed!r}!") - # TODO: the following is ugly, but necessary because references aren't self-identified objects - # in the json schema - # TODO: json deserialization will always create an ModelReference[Submodel], xml deserialization determines - # that automatically - constructor: Optional[Callable[..., T]] = None - args = [] - if expect_type is model.ModelReference: - constructor = decoder._construct_model_reference # type: ignore[assignment] - args.append(model.Submodel) - elif expect_type is model.AssetInformation: - constructor = decoder._construct_asset_information # type: ignore[assignment] - elif expect_type is model.SpecificAssetId: - constructor = decoder._construct_specific_asset_id # type: ignore[assignment] - elif expect_type is model.Reference: - constructor = decoder._construct_reference # type: ignore[assignment] - elif expect_type is model.Qualifier: - constructor = decoder._construct_qualifier # type: ignore[assignment] - - if constructor is not None: - # construct elements that aren't self-identified - return [constructor(obj, *args) for obj in parsed] - - except (KeyError, ValueError, TypeError, json.JSONDecodeError, model.AASConstraintViolation) as e: - raise BadRequest(str(e)) from e - - return [cls.assert_type(obj, expect_type) for obj in parsed] - @classmethod - def base64urljson_list(cls, data: str, expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: - data = base64url_decode(data) - return cls.json_list(data, expect_type, stripped, expect_single) - - @classmethod - def json(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool) -> T: - return cls.json_list(data, expect_type, stripped, True)[0] - - @classmethod - def base64urljson(cls, data: str, expect_type: Type[T], stripped: bool) -> T: - data = base64url_decode(data) - return cls.json_list(data, expect_type, stripped, True)[0] - - @classmethod - def xml(cls, data: bytes, expect_type: Type[T], stripped: bool) -> T: - cls.check_type_supportance(expect_type) - try: - xml_data = io.BytesIO(data) - rv = read_aas_xml_element(xml_data, cls.type_constructables_map[expect_type], - stripped=stripped, failsafe=False) - except (KeyError, ValueError) as e: - # xml deserialization creates an error chain. since we only return one error, return the root cause - f: BaseException = e - while f.__cause__ is not None: - f = f.__cause__ - raise BadRequest(str(f)) from e - except (etree.XMLSyntaxError, model.AASConstraintViolation) as e: - raise BadRequest(str(e)) from e - return cls.assert_type(rv, expect_type) - - @classmethod - def request_body(cls, request: Request, expect_type: Type[T], stripped: bool) -> T: - """ - TODO: werkzeug documentation recommends checking the content length before retrieving the body to prevent - running out of memory. but it doesn't state how to check the content length - also: what would be a reasonable maximum content length? the request body isn't limited by the xml/json - schema - In the meeting (25.11.2020) we discussed, this may refer to a reverse proxy in front of this WSGI app, - which should limit the maximum content length. - """ - valid_content_types = ("application/json", "application/xml", "text/xml") - - if request.mimetype not in valid_content_types: - raise werkzeug.exceptions.UnsupportedMediaType( - f"Invalid content-type: {request.mimetype}! Supported types: " - + ", ".join(valid_content_types)) - - if request.mimetype == "application/json": - return cls.json(request.get_data(), expect_type, stripped) - return cls.xml(request.get_data(), expect_type, stripped) - - -class Base64URLConverter(werkzeug.routing.UnicodeConverter): - - def to_url(self, value: model.Identifier) -> str: - return super().to_url(base64url_encode(value)) - - def to_python(self, value: str) -> model.Identifier: - value = super().to_python(value) - decoded = base64url_decode(super().to_python(value)) - return decoded - - -class IdShortPathConverter(werkzeug.routing.UnicodeConverter): - id_short_sep = "." - - def to_url(self, value: List[str]) -> str: - return super().to_url(self.id_short_sep.join(value)) - - def to_python(self, value: str) -> List[str]: - id_shorts = super().to_python(value).split(self.id_short_sep) - for id_short in id_shorts: - try: - model.Referable.validate_id_short(id_short) - except (ValueError, model.AASConstraintViolation): - raise BadRequest(f"{id_short} is not a valid id_short!") - return id_shorts - - -class WSGIApp: +class WSGIApp(ObjectStoreWSGIApp): def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.AbstractSupplementaryFileContainer, base_path: str = "/api/v3.0"): self.object_store: model.AbstractObjectStore = object_store @@ -488,8 +84,7 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs Rule("/submodel-elements", methods=["POST"], endpoint=self.post_submodel_submodel_elements_id_short_path), Submount("/submodel-elements", [ - Rule("/$metadata", methods=["GET"], - endpoint=self.get_submodel_submodel_elements_metadata), + Rule("/$metadata", methods=["GET"], endpoint=self.get_submodel_submodel_elements_metadata), Rule("/$reference", methods=["GET"], endpoint=self.get_submodel_submodel_elements_reference), Rule("/$value", methods=["GET"], endpoint=self.not_implemented), @@ -525,10 +120,8 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs Rule("/operation-status/", methods=["GET"], endpoint=self.not_implemented), Submount("/operation-results", [ - Rule("/", methods=["GET"], - endpoint=self.not_implemented), - Rule("//$value", methods=["GET"], - endpoint=self.not_implemented) + Rule("/", methods=["GET"], endpoint=self.not_implemented), + Rule("//$value", methods=["GET"], endpoint=self.not_implemented) ]), Rule("/qualifiers", methods=["GET"], endpoint=self.get_submodel_submodel_element_qualifiers), @@ -544,10 +137,8 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs ]) ]) ]), - Rule("/qualifiers", methods=["GET"], - endpoint=self.get_submodel_submodel_element_qualifiers), - Rule("/qualifiers", methods=["POST"], - endpoint=self.post_submodel_submodel_element_qualifiers), + Rule("/qualifiers", methods=["GET"], endpoint=self.get_submodel_submodel_element_qualifiers), + Rule("/qualifiers", methods=["POST"], endpoint=self.post_submodel_submodel_element_qualifiers), Submount("/qualifiers", [ Rule("/", methods=["GET"], endpoint=self.get_submodel_submodel_element_qualifiers), @@ -567,7 +158,7 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs ]), ]) ], converters={ - "base64url": Base64URLConverter, + "base64url": IdentifierToBase64URLConverter, "id_short_path": IdShortPathConverter }, strict_slashes=False) @@ -580,13 +171,11 @@ def _get_obj_ts(self, identifier: model.Identifier, type_: Type[model.provider._ identifiable = self.object_store.get(identifier) if not isinstance(identifiable, type_): raise NotFound(f"No {type_.__name__} with {identifier} found!") - identifiable.update() return identifiable def _get_all_obj_of_type(self, type_: Type[model.provider._IT]) -> Iterator[model.provider._IT]: for obj in self.object_store: if isinstance(obj, type_): - obj.update() yield obj def _resolve_reference(self, reference: model.ModelReference[model.base._RT]) -> model.base._RT: @@ -651,21 +240,6 @@ def _get_submodel_reference(cls, aas: model.AssetAdministrationShell, submodel_i return ref raise NotFound(f"The AAS {aas!r} doesn't have a submodel reference to {submodel_id!r}!") - @classmethod - def _get_slice(cls, request: Request, iterator: Iterable[T]) -> Tuple[Iterator[T], int]: - limit_str = request.args.get('limit', default="10") - cursor_str = request.args.get('cursor', default="1") - try: - limit, cursor = int(limit_str), int(cursor_str) - 1 # cursor is 1-indexed - if limit < 0 or cursor < 0: - raise ValueError - except ValueError: - raise BadRequest("Limit can not be negative, cursor must be positive!") - start_index = cursor - end_index = cursor + limit - paginated_slice = itertools.islice(iterator, start_index, end_index) - return paginated_slice, end_index - def _get_shells(self, request: Request) -> Tuple[Iterator[model.AssetAdministrationShell], int]: aas: Iterator[model.AssetAdministrationShell] = self._get_all_obj_of_type(model.AssetAdministrationShell) @@ -713,7 +287,7 @@ def _get_submodels(self, request: Request) -> Tuple[Iterator[model.Submodel], in submodels = filter(lambda sm: sm.id_short == id_short, submodels) semantic_id = request.args.get("semanticId") if semantic_id is not None: - spec_semantic_id = HTTPApiDecoder.base64urljson( + spec_semantic_id = HTTPApiDecoder.base64url_json( semantic_id, model.Reference, False) # type: ignore[type-abstract] submodels = filter(lambda sm: sm.semantic_id == spec_semantic_id, submodels) paginated_submodels, end_index = self._get_slice(request, submodels) @@ -737,22 +311,6 @@ def _get_submodel_submodel_elements_id_short_path(self, url_args: Dict) -> model def _get_concept_description(self, url_args): return self._get_obj_ts(url_args["concept_id"], model.ConceptDescription) - def handle_request(self, request: Request): - map_adapter: MapAdapter = self.url_map.bind_to_environ(request.environ) - try: - response_t = get_response_type(request) - except werkzeug.exceptions.NotAcceptable as e: - return e - - try: - endpoint, values = map_adapter.match() - return endpoint(request, values, response_t=response_t, map_adapter=map_adapter) - - # any raised error that leaves this function will cause a 500 internal server error - # so catch raised http exceptions and return them - except werkzeug.exceptions.HTTPException as e: - return http_exception_to_response(e, response_t) - # ------ all not implemented ROUTES ------- def not_implemented(self, request: Request, url_args: Dict, **_kwargs) -> Response: raise werkzeug.exceptions.NotImplemented("This route is not implemented!") @@ -769,7 +327,6 @@ def post_aas(self, request: Request, url_args: Dict, response_t: Type[APIRespons self.object_store.add(aas) except KeyError as e: raise Conflict(f"AssetAdministrationShell with Identifier {aas.id} already exists!") from e - aas.commit() created_resource_url = map_adapter.build(self.get_aas, { "aas_id": aas.id }, force_external=True) @@ -796,7 +353,6 @@ def put_aas(self, request: Request, url_args: Dict, response_t: Type[APIResponse aas = self._get_shell(url_args) aas.update_from(HTTPApiDecoder.request_body(request, model.AssetAdministrationShell, is_stripped_request(request))) - aas.commit() return response_t() def delete_aas(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: @@ -813,7 +369,6 @@ def put_aas_asset_information(self, request: Request, url_args: Dict, response_t **_kwargs) -> Response: aas = self._get_shell(url_args) aas.asset_information = HTTPApiDecoder.request_body(request, model.AssetInformation, False) - aas.commit() return response_t() def get_aas_submodel_refs(self, request: Request, url_args: Dict, response_t: Type[APIResponse], @@ -830,14 +385,12 @@ def post_aas_submodel_refs(self, request: Request, url_args: Dict, response_t: T if sm_ref in aas.submodel: raise Conflict(f"{sm_ref!r} already exists!") aas.submodel.add(sm_ref) - aas.commit() return response_t(sm_ref, status=201) def delete_aas_submodel_refs_specific(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: aas = self._get_shell(url_args) aas.submodel.remove(self._get_submodel_reference(aas, url_args["submodel_id"])) - aas.commit() return response_t() def put_aas_submodel_refs_submodel(self, request: Request, url_args: Dict, response_t: Type[APIResponse], @@ -850,11 +403,9 @@ def put_aas_submodel_refs_submodel(self, request: Request, url_args: Dict, respo id_changed: bool = submodel.id != new_submodel.id # TODO: https://github.com/eclipse-basyx/basyx-python-sdk/issues/216 submodel.update_from(new_submodel) - submodel.commit() if id_changed: aas.submodel.remove(sm_ref) aas.submodel.add(model.ModelReference.from_referable(submodel)) - aas.commit() return response_t() def delete_aas_submodel_refs_submodel(self, request: Request, url_args: Dict, response_t: Type[APIResponse], @@ -864,7 +415,6 @@ def delete_aas_submodel_refs_submodel(self, request: Request, url_args: Dict, re submodel = self._resolve_reference(sm_ref) self.object_store.remove(submodel) aas.submodel.remove(sm_ref) - aas.commit() return response_t() def aas_submodel_refs_redirect(self, request: Request, url_args: Dict, map_adapter: MapAdapter, response_t=None, @@ -893,7 +443,6 @@ def post_submodel(self, request: Request, url_args: Dict, response_t: Type[APIRe self.object_store.add(submodel) except KeyError as e: raise Conflict(f"Submodel with Identifier {submodel.id} already exists!") from e - submodel.commit() created_resource_url = map_adapter.build(self.get_submodel, { "submodel_id": submodel.id }, force_external=True) @@ -935,7 +484,6 @@ def get_submodels_reference(self, request: Request, url_args: Dict, response_t: def put_submodel(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: submodel = self._get_submodel(url_args) submodel.update_from(HTTPApiDecoder.request_body(request, model.Submodel, is_stripped_request(request))) - submodel.commit() return response_t() def get_submodel_submodel_elements(self, request: Request, url_args: Dict, response_t: Type[APIResponse], @@ -1010,7 +558,6 @@ def put_submodel_submodel_elements_id_short_path(self, request: Request, url_arg model.SubmodelElement, # type: ignore[type-abstract] is_stripped_request(request)) submodel_element.update_from(new_submodel_element) - submodel_element.commit() return response_t() def delete_submodel_submodel_elements_id_short_path(self, request: Request, url_args: Dict, @@ -1069,7 +616,6 @@ def put_submodel_submodel_element_attachment(self, request: Request, url_args: D f"while {submodel_element!r} has content_type {submodel_element.content_type!r}!") submodel_element.value = self.file_store.add_file(filename, file_storage.stream, submodel_element.content_type) - submodel_element.commit() return response_t() def delete_submodel_submodel_element_attachment(self, request: Request, url_args: Dict, @@ -1092,7 +638,6 @@ def delete_submodel_submodel_element_attachment(self, request: Request, url_args pass submodel_element.value = None - submodel_element.commit() return response_t() def get_submodel_submodel_element_qualifiers(self, request: Request, url_args: Dict, response_t: Type[APIResponse], @@ -1110,7 +655,6 @@ def post_submodel_submodel_element_qualifiers(self, request: Request, url_args: if sm_or_se.qualifier.contains_id("type", qualifier.type): raise Conflict(f"Qualifier with type {qualifier.type} already exists!") sm_or_se.qualifier.add(qualifier) - sm_or_se.commit() created_resource_url = map_adapter.build(self.get_submodel_submodel_element_qualifiers, { "submodel_id": url_args["submodel_id"], "id_shorts": url_args.get("id_shorts") or None, @@ -1129,7 +673,6 @@ def put_submodel_submodel_element_qualifiers(self, request: Request, url_args: D raise Conflict(f"A qualifier of type {new_qualifier.type!r} already exists for {sm_or_se!r}") sm_or_se.remove_qualifier_by_type(qualifier.type) sm_or_se.qualifier.add(new_qualifier) - sm_or_se.commit() if qualifier_type_changed: created_resource_url = map_adapter.build(self.get_submodel_submodel_element_qualifiers, { "submodel_id": url_args["submodel_id"], @@ -1145,7 +688,6 @@ def delete_submodel_submodel_element_qualifiers(self, request: Request, url_args sm_or_se = self._get_submodel_or_nested_submodel_element(url_args) qualifier_type = url_args["qualifier_type"] self._qualifiable_qualifier_op(sm_or_se, sm_or_se.remove_qualifier_by_type, qualifier_type) - sm_or_se.commit() return response_t() # --------- CONCEPT DESCRIPTION ROUTES --------- @@ -1163,7 +705,6 @@ def post_concept_description(self, request: Request, url_args: Dict, response_t: self.object_store.add(concept_description) except KeyError as e: raise Conflict(f"ConceptDescription with Identifier {concept_description.id} already exists!") from e - concept_description.commit() created_resource_url = map_adapter.build(self.get_concept_description, { "concept_id": concept_description.id }, force_external=True) @@ -1179,7 +720,6 @@ def put_concept_description(self, request: Request, url_args: Dict, response_t: concept_description = self._get_concept_description(url_args) concept_description.update_from(HTTPApiDecoder.request_body(request, model.ConceptDescription, is_stripped_request(request))) - concept_description.commit() return response_t() def delete_concept_description(self, request: Request, url_args: Dict, response_t: Type[APIResponse], diff --git a/server/app/main.py b/server/app/main.py index c502bfbe0..49920f628 100644 --- a/server/app/main.py +++ b/server/app/main.py @@ -1,46 +1,126 @@ +# Copyright (c) 2025 the Eclipse BaSyx Authors +# +# This program and the accompanying materials are made available under the terms of the MIT License, available in +# the LICENSE file of this project. +# +# SPDX-License-Identifier: MIT +""" +This module provides the WSGI entry point for the Asset Administration Shell Repository Server. +""" + +import logging import os -import pathlib -import sys +from basyx.aas.adapter import load_directory +from basyx.aas.adapter.aasx import DictSupplementaryFileContainer +from basyx.aas.backend.local_file import LocalFileObjectStore +from basyx.aas.model.provider import DictObjectStore +from interfaces.repository import WSGIApp +from typing import Tuple, Union -from basyx.aas import model, adapter -from basyx.aas.adapter import aasx -from basyx.aas.backend.local_file import LocalFileObjectStore -from basyx.aas.adapter.http import WSGIApp +# -------- Helper methods -------- + +def setup_logger() -> logging.Logger: + """ + Configure a custom :class:`~logging.Logger` for the start-up sequence of the server. + + :return: Configured :class:`~logging.Logger` + """ + + logger = logging.getLogger(__name__) + if not logger.handlers: + logger.setLevel(logging.INFO) + handler = logging.StreamHandler() + handler.setLevel(logging.INFO) + handler.setFormatter(logging.Formatter("%(levelname)s [Server Start-up] %(message)s")) + logger.addHandler(handler) + logger.propagate = False + return logger + + +def build_storage( + env_input: str, + env_storage: str, + env_storage_persistency: bool, + env_storage_overwrite: bool, + logger: logging.Logger +) -> Tuple[Union[DictObjectStore, LocalFileObjectStore], DictSupplementaryFileContainer]: + """ + Configure the server's storage according to the given start-up settings. + + :param env_input: ``str`` pointing to the input directory of the server + :param env_storage: ``str`` pointing to the :class:`~basyx.aas.backend.local_file.LocalFileObjectStore` storage + directory of the server if persistent storage is enabled + :param env_storage_persistency: Flag to enable persistent storage + :param env_storage_overwrite: Flag to overwrite existing :class:`Identifiables ` + in the :class:`~basyx.aas.backend.local_file.LocalFileObjectStore` if persistent storage is enabled + :param logger: :class:`~logging.Logger` used for start-up diagnostics + :return: Tuple consisting of a :class:`~basyx.aas.model.provider.DictObjectStore` if persistent storage is disabled + or a :class:`~basyx.aas.backend.local_file.LocalFileObjectStore` if persistent storage is enabled and a + :class:`~basyx.aas.adapter.aasx.DictSupplementaryFileContainer` as storage for + :class:`~interfaces.repository.WSGIApp` + """ + + if env_storage_persistency: + storage_files = LocalFileObjectStore(env_storage) + storage_files.check_directory(create=True) + if os.path.isdir(env_input): + input_files, input_supp_files = load_directory(env_input) + added, overwritten, skipped = storage_files.sync(input_files, env_storage_overwrite) + logger.info( + "Loaded %d identifiable(s) and %d supplementary file(s) from \"%s\"", + len(input_files), len(input_supp_files), env_input + ) + logger.info( + "Synced INPUT to STORAGE with %d added and %d %s", + added, + overwritten if env_storage_overwrite else skipped, + "overwritten" if env_storage_overwrite else "skipped" + ) + return storage_files, input_supp_files + else: + logger.warning("INPUT directory \"%s\" not found, starting empty", env_input) + return storage_files, DictSupplementaryFileContainer() + + if os.path.isdir(env_input): + input_files, input_supp_files = load_directory(env_input) + logger.info( + "Loaded %d identifiable(s) and %d supplementary file(s) from \"%s\"", + len(input_files), len(input_supp_files), env_input + ) + return input_files, input_supp_files + else: + logger.warning("INPUT directory \"%s\" not found, starting empty", env_input) + return DictObjectStore(), DictSupplementaryFileContainer() + -storage_path = os.getenv("STORAGE_PATH", "/storage") -storage_type = os.getenv("STORAGE_TYPE", "LOCAL_FILE_READ_ONLY") -base_path = os.getenv("API_BASE_PATH") +# -------- WSGI entrypoint -------- -wsgi_optparams = {} +logger = setup_logger() -if base_path is not None: - wsgi_optparams["base_path"] = base_path +env_input = os.getenv("INPUT", "/input") +env_storage = os.getenv("STORAGE", "/storage") +env_storage_persistency = os.getenv("STORAGE_PERSISTENCY", "false").lower() in {"1", "true", "yes"} +env_storage_overwrite = os.getenv("STORAGE_OVERWRITE", "false").lower() in {"1", "true", "yes"} +env_api_base_path = os.getenv("API_BASE_PATH") -if storage_type == "LOCAL_FILE_BACKEND": - application = WSGIApp(LocalFileObjectStore(storage_path), aasx.DictSupplementaryFileContainer(), **wsgi_optparams) +wsgi_optparams = {"base_path": env_api_base_path} if env_api_base_path else {} -elif storage_type in "LOCAL_FILE_READ_ONLY": - object_store: model.DictObjectStore = model.DictObjectStore() - file_store: aasx.DictSupplementaryFileContainer = aasx.DictSupplementaryFileContainer() +logger.info( + "Loaded settings API_BASE_PATH=\"%s\", INPUT=\"%s\", STORAGE=\"%s\", PERSISTENCY=%s, OVERWRITE=%s", + env_api_base_path or "", env_input, env_storage, env_storage_persistency, env_storage_overwrite +) - for file in pathlib.Path(storage_path).iterdir(): - if not file.is_file(): - continue - print(f"Loading {file}") +storage_files, supp_files = build_storage( + env_input, + env_storage, + env_storage_persistency, + env_storage_overwrite, + logger +) - if file.suffix.lower() == ".json": - with open(file) as f: - adapter.json.read_aas_json_file_into(object_store, f) - elif file.suffix.lower() == ".xml": - with open(file) as f: - adapter.xml.read_aas_xml_file_into(object_store, file) - elif file.suffix.lower() == ".aasx": - with aasx.AASXReader(file) as reader: - reader.read_into(object_store=object_store, file_store=file_store) +application = WSGIApp(storage_files, supp_files, **wsgi_optparams) - application = WSGIApp(object_store, file_store, **wsgi_optparams) -else: - print(f"STORAGE_TYPE must be either LOCAL_FILE or LOCAL_FILE_READ_ONLY! Current value: {storage_type}", - file=sys.stderr) +if __name__ == "__main__": + logger.info("WSGI entrypoint created. Serve this module with uWSGI/Gunicorn/etc.") diff --git a/server/app/py.typed b/server/app/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/server/app/pyproject.toml b/server/app/pyproject.toml new file mode 100644 index 000000000..030be7397 --- /dev/null +++ b/server/app/pyproject.toml @@ -0,0 +1,64 @@ +[build-system] +requires = [ + "setuptools>=45", + "wheel", + "setuptools_scm[toml]>=6.2" +] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +# Configure setuptools_scm for version management: +# - Automatically infers the version number from the most recent git tag +# - Generates a version.py file in the package directory +# - Allows for automatic versioning between releases (e.g., 1.0.1.dev4+g12345) +# If you want to use the version anywhere in the code, use +# ``` +# from app.version import version +# print(f"Project version: {version}") +# ``` +root = "../.." # Defines the path to the root of the repository +version_file = "version.py" + +[project] +name = "basyx-python-server" +dynamic = ["version"] +description = "The Eclipse BaSyx Python Server, an implementation of the BaSyx AAS Server" #FIXME +authors = [ + { name = "The Eclipse BaSyx Authors", email = "admins@iat.rwth-aachen.de" } +] +readme = "README.md" +license = { file = "LICENSE" } +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Development Status :: 5 - Production/Stable" +] +requires-python = ">=3.10" +dependencies = [ + "urllib3>=1.26,<3", + "Werkzeug>=3.0.3,<4", +] + +[project.optional-dependencies] +dev = [ + "mypy", + "pycodestyle", + "codeblocks", + "schemathesis~=3.7", + "jsonschema~=4.7", + "hypothesis~=6.13", + "lxml-stubs~=0.5.1", +] + +[project.urls] +"Homepage" = "https://github.com/eclipse-basyx/basyx-python-sdk" + +[tool.setuptools] +packages = { find = { exclude = ["test*"] } } + +[tool.setuptools.package-data] +app = ["py.typed"] + +[tool.mypy] +exclude = "build/" diff --git a/server/app/util/__init__.py b/server/app/util/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/app/util/converters.py b/server/app/util/converters.py new file mode 100644 index 000000000..4e37c4702 --- /dev/null +++ b/server/app/util/converters.py @@ -0,0 +1,79 @@ +# Copyright (c) 2025 the Eclipse BaSyx Authors +# +# This program and the accompanying materials are made available under the terms of the MIT License, available in +# the LICENSE file of this project. +# +# SPDX-License-Identifier: MIT +""" +This module contains helper classes for converting various types between our Python SDK types +and the HTTP-API formats, such as: +- Base64URLConverter +- IdShortPathConverter +""" + +import base64 +import binascii + +import werkzeug.routing +import werkzeug.utils +from werkzeug.exceptions import BadRequest + +from basyx.aas import model + +from typing import List + +BASE64URL_ENCODING = "utf-8" + + +def base64url_decode(data: str) -> str: + try: + # If the requester omits the base64 padding, an exception will be raised. + # However, Python doesn't complain about too much padding, + # thus we simply always append two padding characters (==). + # See also: https://stackoverflow.com/a/49459036/4780052 + decoded = base64.urlsafe_b64decode(data + "==").decode(BASE64URL_ENCODING) + except binascii.Error: + raise BadRequest(f"Encoded data {data} is invalid base64url!") + except UnicodeDecodeError: + raise BadRequest(f"Encoded base64url value is not a valid {BASE64URL_ENCODING} string!") + return decoded + + +def base64url_encode(data: str) -> str: + encoded = base64.urlsafe_b64encode(data.encode(BASE64URL_ENCODING)).decode("ascii") + return encoded + + +class IdentifierToBase64URLConverter(werkzeug.routing.UnicodeConverter): + """ + A custom URL converter for Werkzeug routing that encodes and decodes + Identifiers using Base64 URL-safe encoding. + """ + def to_url(self, value: model.Identifier) -> str: + return super().to_url(base64url_encode(value)) + + def to_python(self, value: str) -> model.Identifier: + value = super().to_python(value) + decoded = base64url_decode(value) + return decoded + + +class IdShortPathConverter(werkzeug.routing.UnicodeConverter): + """ + A custom Werkzeug URL converter for handling dot-separated idShort paths and indexes. + + This converter joins a list of idShort strings into an id_short_sep-separated path for URLs + (e.g., ["submodel", "element", "1"] -> "submodel.element[1]") and parses incoming URL paths + back into a list, validating each idShort. + """ + + def to_url(self, value: List[str]) -> str: + id_short_path = model.Referable.build_id_short_path(value) + return super().to_url(id_short_path) + + def to_python(self, value: str) -> List[str]: + try: + parsed_id_short_path = model.Referable.parse_id_short_path(value) + except (ValueError, model.AASConstraintViolation) as e: + raise BadRequest(f"{value} is not a valid id_short!") from e + return parsed_id_short_path diff --git a/server/compose.yml b/server/compose.yml index 666484a5d..f7e014c37 100644 --- a/server/compose.yml +++ b/server/compose.yml @@ -1,3 +1,4 @@ +name: basyx-python-server services: app: build: @@ -6,4 +7,7 @@ services: ports: - "8080:80" volumes: + - ./input:/input - ./storage:/storage + environment: + STORAGE_PERSISTENCY: True diff --git a/server/test/__init__.py b/server/test/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/test/interfaces/__init__.py b/server/test/interfaces/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/sdk/test/adapter/test_http.py b/server/test/interfaces/test_repository.py similarity index 98% rename from sdk/test/adapter/test_http.py rename to server/test/interfaces/test_repository.py index 09dadf865..5177dfacb 100644 --- a/sdk/test/adapter/test_http.py +++ b/server/test/interfaces/test_repository.py @@ -1,4 +1,4 @@ -# Copyright (c) 2024 the Eclipse BaSyx Authors +# Copyright (c) 2025 the Eclipse BaSyx Authors # # This program and the accompanying materials are made available under the terms of the MIT License, available in # the LICENSE file of this project. @@ -34,7 +34,7 @@ from basyx.aas import model from basyx.aas.adapter.aasx import DictSupplementaryFileContainer -from basyx.aas.adapter.http import WSGIApp +from server.app.interfaces.repository import WSGIApp from basyx.aas.examples.data.example_aas import create_full_example from typing import Set