diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f37a94217c..c1954e4b66 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -105,8 +105,6 @@ repos: src/aiida/orm/utils/builders/computer.py| src/aiida/orm/utils/calcjob.py| src/aiida/orm/utils/node.py| - src/aiida/repository/backend/disk_object_store.py| - src/aiida/repository/backend/sandbox.py| src/aiida/restapi/common/utils.py| src/aiida/restapi/resources.py| src/aiida/restapi/run_api.py| diff --git a/pyproject.toml b/pyproject.toml index 4b3c2b3d6e..5a0ddbc4ed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -353,9 +353,10 @@ disallow_subclassing_any = true disallow_untyped_calls = true disallow_untyped_defs = true module = [ - 'aiida.tools.graph.*', - 'aiida.cmdline.params.*', 'aiida.cmdline.groups.*', + 'aiida.cmdline.params.*', + 'aiida.repository.*', + 'aiida.tools.graph.*', 'aiida.tools.query.*' ] warn_return_any = true diff --git a/src/aiida/cmdline/commands/cmd_archive.py b/src/aiida/cmdline/commands/cmd_archive.py index 7f002e6387..6da3d32bee 100644 --- a/src/aiida/cmdline/commands/cmd_archive.py +++ b/src/aiida/cmdline/commands/cmd_archive.py @@ -26,6 +26,7 @@ from aiida.common.exceptions import CorruptStorage, IncompatibleStorageSchema, UnreachableStorage from aiida.common.links import GraphTraversalRules from aiida.common.log import AIIDA_LOGGER +from aiida.common.typing import FilePath from aiida.common.utils import DEFAULT_BATCH_SIZE, DEFAULT_FILTER_SIZE EXTRAS_MODE_EXISTING = ['keep_existing', 'update_existing', 'mirror', 'none'] @@ -488,7 +489,7 @@ def _import_archive_and_migrate( dry_run_success = f'import dry-run of archive {archive} completed. Profile storage unmodified.' with SandboxFolder(filepath=filepath) as temp_folder: - archive_path = archive + archive_path: FilePath = archive if web_based: echo.echo_report(f'downloading archive: {archive}') @@ -501,6 +502,7 @@ def _import_archive_and_migrate( archive_path = temp_folder.get_abs_path('downloaded_archive.zip') echo.echo_success('archive downloaded, proceeding with import') + archive_path = str(archive_path) echo.echo_report(f'starting import: {archive}') try: _import_archive(archive_path, archive_format=archive_format, **import_kwargs) @@ -508,7 +510,7 @@ def _import_archive_and_migrate( if try_migration: echo.echo_report(f'incompatible version detected for {archive}, trying migration') try: - new_path = temp_folder.get_abs_path('migrated_archive.aiida') + new_path = str(temp_folder.get_abs_path('migrated_archive.aiida')) archive_format.migrate(archive_path, new_path, archive_format.latest_version, compression=0) archive_path = new_path except Exception as sub_exception: diff --git a/src/aiida/common/folders.py b/src/aiida/common/folders.py index 37b8cb5c62..d2716ae761 100644 --- a/src/aiida/common/folders.py +++ b/src/aiida/common/folders.py @@ -17,9 +17,12 @@ import pathlib import shutil import tempfile +import typing as t +from collections.abc import Iterator from . import timezone from .lang import type_check +from .typing import FilePath, Self # If True, tries to make everything (dirs, files) group-writable. # Otherwise, tries to make everything only readable and writable by the user. @@ -45,7 +48,7 @@ class Folder: to os.path.abspath or normpath are quite slow). """ - def __init__(self, abspath, folder_limit=None): + def __init__(self, abspath: FilePath, folder_limit: FilePath | None = None): """Construct a new instance.""" abspath = os.path.abspath(abspath) if folder_limit is None: @@ -64,7 +67,7 @@ def __init__(self, abspath, folder_limit=None): self._folder_limit = folder_limit @property - def mode_dir(self): + def mode_dir(self) -> int: """Return the mode with which the folders should be created""" if GROUP_WRITABLE: return 0o770 @@ -72,14 +75,14 @@ def mode_dir(self): return 0o700 @property - def mode_file(self): + def mode_file(self) -> int: """Return the mode with which the files should be created""" if GROUP_WRITABLE: return 0o660 return 0o600 - def get_subfolder(self, subfolder, create=False, reset_limit=False): + def get_subfolder(self, subfolder: FilePath, create=False, reset_limit=False) -> Folder: """Return a Folder object pointing to a subfolder. :param subfolder: a string with the relative path of the subfolder, @@ -110,7 +113,7 @@ def get_subfolder(self, subfolder, create=False, reset_limit=False): return new_folder - def get_content_list(self, pattern='*', only_paths=True): + def get_content_list(self, pattern: str = '*', only_paths: bool = True) -> list: """Return a list of files (and subfolders) in the folder, matching a given pattern. Example: If you want to exclude files starting with a dot, you can @@ -134,7 +137,7 @@ def get_content_list(self, pattern='*', only_paths=True): return [(fname, not os.path.isdir(os.path.join(self.abspath, fname))) for fname in file_list] - def create_symlink(self, src, name): + def create_symlink(self, src: FilePath, name: FilePath) -> None: """Create a symlink inside the folder to the location 'src'. :param src: the location to which the symlink must point. Can be @@ -148,7 +151,7 @@ def create_symlink(self, src, name): # For symlinks, permissions should not be set - def insert_path(self, src, dest_name=None, overwrite=True): + def insert_path(self, src: FilePath, dest_name: FilePath | None = None, overwrite: bool = True) -> FilePath: """Copy a file to the folder. :param src: the source filename to copy @@ -205,7 +208,9 @@ def insert_path(self, src, dest_name=None, overwrite=True): return dest_abs_path - def create_file_from_filelike(self, filelike, filename, mode='wb', encoding=None): + def create_file_from_filelike( + self, filelike: t.IO[t.AnyStr], filename: FilePath, mode: str = 'wb', encoding: str | None = None + ) -> FilePath: """Create a file with the given filename from a filelike object. :param filelike: a filelike object whose contents to copy @@ -227,7 +232,7 @@ def create_file_from_filelike(self, filelike, filename, mode='wb', encoding=None return filepath - def remove_path(self, filename): + def remove_path(self, filename: FilePath) -> None: """Remove a file or folder from the folder. :param filename: the relative path name to remove @@ -241,7 +246,7 @@ def remove_path(self, filename): else: os.remove(dest_abs_path) - def get_abs_path(self, relpath, check_existence=False): + def get_abs_path(self, relpath: FilePath, check_existence: bool = False) -> FilePath: """Return an absolute path for a file or folder in this folder. The advantage of using this method is that it checks that filename @@ -268,7 +273,9 @@ def get_abs_path(self, relpath, check_existence=False): return dest_abs_path @contextlib.contextmanager - def open(self, name, mode='r', encoding='utf8', check_existence=False): + def open( + self, name: FilePath, mode: str = 'r', encoding: str | None = 'utf8', check_existence: bool = False + ) -> Iterator[t.Any]: """Open a file in the current folder and return the corresponding file object. :param check_existence: if False, just return the file path. @@ -282,32 +289,32 @@ def open(self, name, mode='r', encoding='utf8', check_existence=False): yield handle @property - def abspath(self): + def abspath(self) -> FilePath: """The absolute path of the folder.""" return self._abspath @property - def folder_limit(self): + def folder_limit(self) -> FilePath: """The folder limit that cannot be crossed when creating files and folders.""" return self._folder_limit - def exists(self): + def exists(self) -> bool: """Return True if the folder exists, False otherwise.""" return os.path.exists(self.abspath) - def isfile(self, relpath): + def isfile(self, relpath: FilePath) -> bool: """Return True if 'relpath' exists inside the folder and is a file, False otherwise. """ return os.path.isfile(os.path.join(self.abspath, relpath)) - def isdir(self, relpath): + def isdir(self, relpath: FilePath) -> bool: """Return True if 'relpath' exists inside the folder and is a directory, False otherwise. """ return os.path.isdir(os.path.join(self.abspath, relpath)) - def erase(self, create_empty_folder=False): + def erase(self, create_empty_folder: bool = False) -> None: """Erases the folder. Should be called only in very specific cases, in general folder should not be erased! @@ -321,7 +328,7 @@ def erase(self, create_empty_folder=False): if create_empty_folder: self.create() - def create(self): + def create(self) -> None: """Creates the folder, if it does not exist on the disk yet. It will also create top directories, if absent. @@ -331,7 +338,7 @@ def create(self): """ os.makedirs(self.abspath, mode=self.mode_dir, exist_ok=True) - def replace_with_folder(self, srcdir, move=False, overwrite=False): + def replace_with_folder(self, srcdir: FilePath, move: bool = False, overwrite: bool = False) -> None: """This routine copies or moves the source folder 'srcdir' to the local folder pointed to by this Folder. :param srcdir: the source folder on the disk; this must be an absolute path @@ -399,11 +406,11 @@ def __init__(self, filepath: pathlib.Path | None = None): super().__init__(abspath=tempfile.mkdtemp(dir=filepath)) - def __enter__(self): + def __enter__(self) -> Self: """Enter a context and return self.""" return self - def __exit__(self, exc_type, exc_value, traceback): + def __exit__(self, exc_type, exc_value, traceback) -> None: """Erase the temporary directory created in the constructor.""" self.erase() @@ -416,9 +423,7 @@ class SubmitTestFolder(Folder): not overwrite already existing created test folders. """ - _sub_folder = None - - def __init__(self, basepath=CALC_JOB_DRY_RUN_BASE_PATH): + def __init__(self, basepath: FilePath = CALC_JOB_DRY_RUN_BASE_PATH): """Construct and create the sandbox folder. The directory will be created in the current working directory with the name given by `basepath`. @@ -451,9 +456,9 @@ def __init__(self, basepath=CALC_JOB_DRY_RUN_BASE_PATH): self._sub_folder = self.get_subfolder(os.path.relpath(subfolder_path, self.abspath), reset_limit=True) - def __enter__(self): + def __enter__(self) -> Folder: """Return the sub folder that should be Called when entering in the with statement.""" return self._sub_folder - def __exit__(self, exc_type, exc_value, traceback): + def __exit__(self, exc_type, exc_value, traceback) -> None: """When context manager is exited, do not delete the folder.""" diff --git a/src/aiida/engine/daemon/execmanager.py b/src/aiida/engine/daemon/execmanager.py index a718e8baa4..a15524053c 100644 --- a/src/aiida/engine/daemon/execmanager.py +++ b/src/aiida/engine/daemon/execmanager.py @@ -33,6 +33,7 @@ from aiida.common.datastructures import CalcInfo, FileCopyOperation from aiida.common.folders import Folder, SandboxFolder from aiida.common.links import LinkType +from aiida.common.typing import FilePath from aiida.engine.processes.exit_code import ExitCode from aiida.manage.configuration import get_config_option from aiida.orm import CalcJobNode, Code, FolderData, Node, PortableCode, RemoteData, load_node @@ -694,7 +695,7 @@ def traverse(node_): async def retrieve_calculation( - calculation: CalcJobNode, transport: Transport, retrieved_temporary_folder: str + calculation: CalcJobNode, transport: Transport, retrieved_temporary_folder: FilePath ) -> FolderData | None: """Retrieve all the files of a completed job calculation using the given transport. diff --git a/src/aiida/engine/processes/calcjobs/calcjob.py b/src/aiida/engine/processes/calcjobs/calcjob.py index 4c9533c71b..e2d2bf24f8 100644 --- a/src/aiida/engine/processes/calcjobs/calcjob.py +++ b/src/aiida/engine/processes/calcjobs/calcjob.py @@ -26,6 +26,7 @@ from aiida.common.folders import Folder from aiida.common.lang import classproperty, override from aiida.common.links import LinkType +from aiida.common.typing import FilePath from ..exit_code import ExitCode from ..ports import PortNamespace @@ -743,7 +744,7 @@ async def _perform_import(self): return self.parse(retrieved_temporary_folder.abspath) def parse( - self, retrieved_temporary_folder: Optional[str] = None, existing_exit_code: ExitCode | None = None + self, retrieved_temporary_folder: FilePath | None = None, existing_exit_code: ExitCode | None = None ) -> ExitCode: """Parse a retrieved job calculation. @@ -771,7 +772,7 @@ def parse( # Call the retrieved output parser try: - exit_code_retrieved = self.parse_retrieved_output(retrieved_temporary_folder) + exit_code_retrieved = self.parse_retrieved_output(str(retrieved_temporary_folder)) finally: if retrieved_temporary_folder is not None: shutil.rmtree(retrieved_temporary_folder, ignore_errors=True) @@ -1122,7 +1123,10 @@ def presubmit(self, folder: Folder) -> CalcInfo: job_tmpl.max_wallclock_seconds = max_wallclock_seconds submit_script_filename = self.node.get_option('submit_script_filename') + assert submit_script_filename is not None script_content = scheduler.get_submit_script(job_tmpl) + # TODO: mypy error: Argument 2 to "create_file_from_filelike" of "Folder" + # has incompatible type "Any | None"; expected "str | PurePath" folder.create_file_from_filelike(io.StringIO(script_content), submit_script_filename, 'w', encoding='utf8') def encoder(obj): diff --git a/src/aiida/repository/backend/abstract.py b/src/aiida/repository/backend/abstract.py index 01cd635d0a..8a4fceb2d5 100644 --- a/src/aiida/repository/backend/abstract.py +++ b/src/aiida/repository/backend/abstract.py @@ -10,12 +10,15 @@ import hashlib import io import pathlib -from typing import BinaryIO, Iterable, Iterator, List, Optional, Tuple, Union +from collections.abc import Iterable, Iterator +from typing import Any, BinaryIO, List, Optional, Tuple, Union from aiida.common.hashing import chunked_file_hash __all__ = ('AbstractRepositoryBackend',) +InfoDictType = dict[str, Union[int, str, dict[str, int], dict[str, float]]] + class AbstractRepositoryBackend(metaclass=abc.ABCMeta): """Class that defines the abstract interface for an object repository. @@ -44,7 +47,7 @@ def key_format(self) -> Optional[str]: """ @abc.abstractmethod - def initialise(self, **kwargs) -> None: + def initialise(self, **kwargs: Any) -> None: """Initialise the repository if it hasn't already been initialised. :param kwargs: parameters for the initialisation. @@ -65,7 +68,7 @@ def erase(self) -> None: """ @staticmethod - def is_readable_byte_stream(handle) -> bool: + def is_readable_byte_stream(handle: Any) -> bool: return hasattr(handle, 'read') and hasattr(handle, 'mode') and 'b' in handle.mode def put_object_from_filelike(self, handle: BinaryIO) -> str: @@ -120,7 +123,7 @@ def list_objects(self) -> Iterable[str]: """ @abc.abstractmethod - def get_info(self, detailed: bool = False, **kwargs) -> dict: + def get_info(self, detailed: bool = False) -> InfoDictType: """Returns relevant information about the content of the repository. :param detailed: @@ -129,19 +132,6 @@ def get_info(self, detailed: bool = False, **kwargs) -> dict: :return: a dictionary with the information. """ - @abc.abstractmethod - def maintain(self, dry_run: bool = False, live: bool = True, **kwargs) -> None: - """Performs maintenance operations. - - :param dry_run: - flag to only print the actions that would be taken without actually executing them. - - :param live: - flag to indicate to the backend whether AiiDA is live or not (i.e. if the profile of the - backend is currently being used/accessed). The backend is expected then to only allow (and - thus set by default) the operations that are safe to perform in this state. - """ - @contextlib.contextmanager def open(self, key: str) -> Iterator[BinaryIO]: # type: ignore[return] """Open a file handle to an object stored under the given key. @@ -168,7 +158,7 @@ def get_object_content(self, key: str) -> bytes: return handle.read() @abc.abstractmethod - def iter_object_streams(self, keys: List[str]) -> Iterator[Tuple[str, BinaryIO]]: + def iter_object_streams(self, keys: Iterable[str]) -> Iterator[Tuple[str, BinaryIO]]: """Return an iterator over the (read-only) byte streams of objects identified by key. .. note:: handles should only be read within the context of this iterator. diff --git a/src/aiida/repository/backend/disk_object_store.py b/src/aiida/repository/backend/disk_object_store.py index f1bd22faaa..f981158242 100644 --- a/src/aiida/repository/backend/disk_object_store.py +++ b/src/aiida/repository/backend/disk_object_store.py @@ -8,7 +8,7 @@ from aiida.common.lang import type_check from aiida.storage.log import STORAGE_LOGGER -from .abstract import AbstractRepositoryBackend +from .abstract import AbstractRepositoryBackend, InfoDictType if t.TYPE_CHECKING: from disk_objectstore import Container @@ -33,8 +33,8 @@ class DiskObjectStoreRepositoryBackend(AbstractRepositoryBackend): """ def __init__(self, container: 'Container'): - if not t.TYPE_CHECKING: - from disk_objectstore import Container + from disk_objectstore import Container + type_check(container, Container) self._container = container @@ -58,7 +58,7 @@ def key_format(self) -> t.Optional[str]: with self._container as container: return container.hash_type - def initialise(self, **kwargs) -> None: + def initialise(self, **kwargs: t.Any) -> None: """Initialise the repository if it hasn't already been initialised. :param kwargs: parameters for the initialisation. @@ -72,7 +72,7 @@ def is_initialised(self) -> bool: with self._container as container: return container.is_initialised - def erase(self): + def erase(self) -> None: """Delete the repository itself and all its contents.""" try: with self._container as container: @@ -112,8 +112,8 @@ def open(self, key: str) -> t.Iterator[t.BinaryIO]: with container.get_object_stream(key) as handle: yield handle # type: ignore[misc] - def iter_object_streams(self, keys: t.List[str]) -> t.Iterator[t.Tuple[str, t.BinaryIO]]: - with self._container.get_objects_stream_and_meta(keys) as triplets: + def iter_object_streams(self, keys: t.Iterable[str]) -> t.Iterator[t.Tuple[str, t.BinaryIO]]: + with self._container.get_objects_stream_and_meta(keys) as triplets: # type: ignore[arg-type] for key, stream, _ in triplets: assert stream is not None yield key, stream # type: ignore[misc] @@ -145,7 +145,7 @@ def get_object_hash(self, key: str) -> str: return super().get_object_hash(key) return key - def maintain( # type: ignore[override] + def maintain( self, dry_run: bool = False, live: bool = True, @@ -154,7 +154,7 @@ def maintain( # type: ignore[override] clean_storage: t.Optional[bool] = None, do_vacuum: t.Optional[bool] = None, compress: bool = False, - ) -> dict: + ) -> None: """Performs maintenance operations. :param live: if True, will only perform operations that are safe to do while the repository is in use. @@ -162,7 +162,8 @@ def maintain( # type: ignore[override] :param do_repack: flag for forcing the re-packing of already packed files. :param clean_storage: flag for forcing the cleaning of soft-deleted files from the repository. :param do_vacuum: flag for forcing the vacuuming of the internal database when cleaning the repository. - :param compress: flag for compressing the data when packing loose files. Set to ``Compress.AUTO`` if ``True``. + :param compress: flag for compressing the data when packing loose files. + Set to ``CompressMode.AUTO`` if ``True``. :return: a dictionary with information on the operations performed. """ from disk_objectstore import CompressMode @@ -171,13 +172,14 @@ def maintain( # type: ignore[override] if live and (do_repack or clean_storage or do_vacuum): overrides = {'do_repack': do_repack, 'clean_storage': clean_storage, 'do_vacuum': do_vacuum} - keys = ', '.join([key for key, override in overrides.items() if override is True]) # type: ignore + keys = ', '.join([key for key, override in overrides.items() if override is True]) raise ValueError(f'The following overrides were enabled but cannot be if `live=True`: {keys}') pack_loose = True if pack_loose is None else pack_loose + compress_mode = CompressMode.NO if compress is True: - compress = CompressMode.AUTO + compress_mode = CompressMode.AUTO if live: do_repack = False @@ -196,7 +198,7 @@ def maintain( # type: ignore[override] if not dry_run: with get_progress_reporter()(total=1) as progress: callback = create_callback(progress) - container.pack_all_loose(compress=compress, callback=callback) + container.pack_all_loose(compress=compress_mode, callback=callback) # type: ignore[arg-type] if do_repack: files_numb = container.count_objects().packed @@ -205,19 +207,19 @@ def maintain( # type: ignore[override] if not dry_run: with get_progress_reporter()(total=1) as progress: callback = create_callback(progress) - container.repack(callback=callback) + container.repack(callback=callback) # type: ignore[arg-type] if clean_storage: logger.report(f'Cleaning the repository database (with `vacuum={do_vacuum}`) ...') if not dry_run: container.clean_storage(vacuum=do_vacuum) - def get_info( # type: ignore[override] + def get_info( self, - detailed=False, - ) -> t.Dict[str, t.Union[int, str, t.Dict[str, int], t.Dict[str, float]]]: + detailed: bool = False, + ) -> InfoDictType: """Return information on configuration and content of the repository.""" - output_info: t.Dict[str, t.Union[int, str, t.Dict[str, int], t.Dict[str, float]]] = {} + output_info: InfoDictType = {} with self._container as container: output_info['SHA-hash algorithm'] = container.hash_type diff --git a/src/aiida/repository/backend/sandbox.py b/src/aiida/repository/backend/sandbox.py index 56fc1f7aea..ed5807374d 100644 --- a/src/aiida/repository/backend/sandbox.py +++ b/src/aiida/repository/backend/sandbox.py @@ -33,7 +33,7 @@ def __str__(self) -> str: return f'SandboxRepository: {self._sandbox.abspath if self._sandbox else "null"}' return 'SandboxRepository: ' - def __del__(self): + def __del__(self) -> None: """Delete the entire sandbox folder if it was instantiated and still exists.""" self.erase() @@ -49,7 +49,7 @@ def uuid(self) -> str | None: def key_format(self) -> str | None: return 'uuid4' - def initialise(self, **kwargs) -> None: + def initialise(self, **kwargs: t.Any) -> None: """Initialise the repository if it hasn't already been initialised. :param kwargs: parameters for the initialisation. @@ -63,14 +63,14 @@ def is_initialised(self) -> bool: return isinstance(self._sandbox, SandboxFolder) @property - def sandbox(self): + def sandbox(self) -> SandboxFolder: """Return the sandbox instance of this repository.""" if self._sandbox is None: self._sandbox = SandboxFolder(filepath=pathlib.Path(self._filepath) if self._filepath is not None else None) return self._sandbox - def erase(self): + def erase(self) -> None: """Delete the repository itself and all its contents.""" if getattr(self, '_sandbox', None) is not None: try: @@ -109,7 +109,7 @@ def open(self, key: str) -> t.Iterator[t.BinaryIO]: with self.sandbox.open(key, mode='rb') as handle: yield handle - def iter_object_streams(self, keys: list[str]) -> t.Iterator[tuple[str, t.BinaryIO]]: + def iter_object_streams(self, keys: t.Iterable[str]) -> t.Iterator[tuple[str, t.BinaryIO]]: for key in keys: with self.open(key) as handle: yield key, handle @@ -122,8 +122,5 @@ def delete_objects(self, keys: list[str]) -> None: def list_objects(self) -> t.Iterable[str]: return self.sandbox.get_content_list() - def maintain(self, dry_run: bool = False, live: bool = True, **kwargs) -> None: - raise NotImplementedError - - def get_info(self, detailed: bool = False, **kwargs) -> dict: + def get_info(self, detailed: bool = False) -> t.NoReturn: raise NotImplementedError diff --git a/src/aiida/repository/common.py b/src/aiida/repository/common.py index 86e00a9de6..20948d129c 100644 --- a/src/aiida/repository/common.py +++ b/src/aiida/repository/common.py @@ -65,7 +65,7 @@ def __init__( self._objects = objects or {} @classmethod - def from_serialized(cls, serialized: dict, name='') -> 'File': + def from_serialized(cls, serialized: dict[str, typing.Any], name: str = '') -> 'File': """Construct a new instance from a serialized instance. :param serialized: the serialized instance. @@ -80,11 +80,9 @@ def from_serialized(cls, serialized: dict, name='') -> 'File': key = None objects = {name: File.from_serialized(obj, name) for name, obj in serialized.get('o', {}).items()} - instance = cls.__new__(cls) - instance.__init__(name, file_type, key, objects) # type: ignore[misc] - return instance + return cls(name, file_type, key, objects) - def serialize(self) -> dict: + def serialize(self) -> dict[str, typing.Any]: """Serialize the metadata into a JSON-serializable format. .. note:: the serialization format is optimized to reduce the size in bytes. @@ -125,7 +123,7 @@ def objects(self) -> typing.Dict[str, 'File']: """Return the objects of the file object.""" return self._objects - def __eq__(self, other) -> bool: + def __eq__(self, other: object) -> bool: """Return whether this instance is equal to another file object instance.""" if not isinstance(other, self.__class__): return False @@ -136,6 +134,6 @@ def __eq__(self, other) -> bool: return equal_attributes and equal_objects - def __repr__(self): + def __repr__(self) -> str: args = (self.name, self.file_type.value, self.key, self.objects.items()) return 'File'.format(*args) diff --git a/src/aiida/repository/repository.py b/src/aiida/repository/repository.py index 4d58ab73b7..b1836fb56b 100644 --- a/src/aiida/repository/repository.py +++ b/src/aiida/repository/repository.py @@ -82,7 +82,7 @@ def serialize(self) -> Dict[str, Any]: return self._directory.serialize() @classmethod - def flatten(cls, serialized=Optional[Dict[str, Any]], delimiter: str = '/') -> Dict[str, Optional[str]]: + def flatten(cls, serialized: Optional[Dict[str, Any]], delimiter: str = '/') -> Dict[str, Optional[str]]: """Flatten the serialized content of a repository into a mapping of path -> key or None (if folder). Note, all folders are represented in the flattened output, and their path is suffixed with the delimiter. @@ -205,7 +205,7 @@ def get_file_keys(self) -> List[str]: """ file_keys: List[str] = [] - def _add_file_keys(keys, objects): + def _add_file_keys(keys: list[str], objects: dict[str, File]) -> None: """Recursively add keys of all file objects to the keys list.""" for obj in objects.values(): if obj.file_type == FileType.FILE and obj.key is not None: diff --git a/src/aiida/tools/archive/create.py b/src/aiida/tools/archive/create.py index 43dafbf0f1..e07dca84b9 100644 --- a/src/aiida/tools/archive/create.py +++ b/src/aiida/tools/archive/create.py @@ -683,7 +683,7 @@ def _stream_repo_files( f'Backend repository key format incompatible: {repository.key_format!r} != {key_format!r}' ) with get_progress_reporter()(desc='Archiving files: ', total=len(all_keys)) as progress: - for key, stream in repository.iter_object_streams(all_keys): # type: ignore[arg-type] + for key, stream in repository.iter_object_streams(all_keys): # to-do should we use assume the key here is correct, or always re-compute and check? writer.put_object(stream, key=key) progress.update() diff --git a/src/aiida/tools/archive/imports.py b/src/aiida/tools/archive/imports.py index eb0e1af4e3..8832499e32 100644 --- a/src/aiida/tools/archive/imports.py +++ b/src/aiida/tools/archive/imports.py @@ -1244,7 +1244,7 @@ def _add_files_to_repo(backend_from: StorageBackend, backend_to: StorageBackend, repository_to = backend_to.get_repository() repository_from = backend_from.get_repository() with get_progress_reporter()(desc='Adding archive files to repository', total=len(new_keys)) as progress: - for key, handle in repository_from.iter_object_streams(new_keys): # type: ignore[arg-type] + for key, handle in repository_from.iter_object_streams(new_keys): backend_key = repository_to.put_object_from_filelike(handle) if backend_key != key: raise ImportValidationError( diff --git a/src/aiida/transports/plugins/ssh_async.py b/src/aiida/transports/plugins/ssh_async.py index 37f531903c..14a525ec42 100644 --- a/src/aiida/transports/plugins/ssh_async.py +++ b/src/aiida/transports/plugins/ssh_async.py @@ -1282,7 +1282,7 @@ async def copy_from_remote_to_remote_async( self.logger.error('Unknown parameters passed to copy_from_remote_to_remote') with SandboxFolder() as sandbox: - await self.get_async(remotesource, sandbox.abspath, **kwargs_get) + await self.get_async(remotesource, sandbox.abspath, **kwargs_get) # type: ignore[arg-type] # Then we scan the full sandbox directory with get_content_list, # because copying directly from sandbox.abspath would not work # to copy a single file into another single file, and copying diff --git a/src/aiida/transports/transport.py b/src/aiida/transports/transport.py index 0045173c97..d88d9aaa1d 100644 --- a/src/aiida/transports/transport.py +++ b/src/aiida/transports/transport.py @@ -421,7 +421,10 @@ def copy_from_remote_to_remote( self.logger.error('Unknown parameters passed to copy_from_remote_to_remote') with SandboxFolder() as sandbox: - self.get(remotesource, sandbox.abspath, **kwargs_get) + # TODO: mypy error: Argument 2 to "get" of "Transport" + # has incompatible type "str | PurePath"; + # expected "str | Path | PurePosixPath" + self.get(remotesource, sandbox.abspath, **kwargs_get) # type: ignore[arg-type] # Then we scan the full sandbox directory with get_content_list, # because copying directly from sandbox.abspath would not work # to copy a single file into another single file, and copying