Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,6 @@ repos:
src/aiida/orm/utils/builders/computer.py|
src/aiida/orm/utils/calcjob.py|
src/aiida/orm/utils/node.py|
src/aiida/repository/backend/disk_object_store.py|
src/aiida/repository/backend/sandbox.py|
src/aiida/restapi/common/utils.py|
src/aiida/restapi/resources.py|
src/aiida/restapi/run_api.py|
Expand Down
5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -353,9 +353,10 @@ disallow_subclassing_any = true
disallow_untyped_calls = true
disallow_untyped_defs = true
module = [
'aiida.tools.graph.*',
'aiida.cmdline.params.*',
'aiida.cmdline.groups.*',
'aiida.cmdline.params.*',
'aiida.repository.*',
'aiida.tools.graph.*',
'aiida.tools.query.*'
]
warn_return_any = true
Expand Down
6 changes: 4 additions & 2 deletions src/aiida/cmdline/commands/cmd_archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from aiida.common.exceptions import CorruptStorage, IncompatibleStorageSchema, UnreachableStorage
from aiida.common.links import GraphTraversalRules
from aiida.common.log import AIIDA_LOGGER
from aiida.common.typing import FilePath
from aiida.common.utils import DEFAULT_BATCH_SIZE, DEFAULT_FILTER_SIZE

EXTRAS_MODE_EXISTING = ['keep_existing', 'update_existing', 'mirror', 'none']
Expand Down Expand Up @@ -488,7 +489,7 @@ def _import_archive_and_migrate(
dry_run_success = f'import dry-run of archive {archive} completed. Profile storage unmodified.'

with SandboxFolder(filepath=filepath) as temp_folder:
archive_path = archive
archive_path: FilePath = archive

if web_based:
echo.echo_report(f'downloading archive: {archive}')
Expand All @@ -501,14 +502,15 @@ def _import_archive_and_migrate(
archive_path = temp_folder.get_abs_path('downloaded_archive.zip')
echo.echo_success('archive downloaded, proceeding with import')

archive_path = str(archive_path)
echo.echo_report(f'starting import: {archive}')
try:
_import_archive(archive_path, archive_format=archive_format, **import_kwargs)
except IncompatibleStorageSchema as exception:
if try_migration:
echo.echo_report(f'incompatible version detected for {archive}, trying migration')
try:
new_path = temp_folder.get_abs_path('migrated_archive.aiida')
new_path = str(temp_folder.get_abs_path('migrated_archive.aiida'))
archive_format.migrate(archive_path, new_path, archive_format.latest_version, compression=0)
archive_path = new_path
except Exception as sub_exception:
Expand Down
57 changes: 31 additions & 26 deletions src/aiida/common/folders.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,12 @@
import pathlib
import shutil
import tempfile
import typing as t
from collections.abc import Iterator

from . import timezone
from .lang import type_check
from .typing import FilePath, Self

# If True, tries to make everything (dirs, files) group-writable.
# Otherwise, tries to make everything only readable and writable by the user.
Expand All @@ -45,7 +48,7 @@ class Folder:
to os.path.abspath or normpath are quite slow).
"""

def __init__(self, abspath, folder_limit=None):
def __init__(self, abspath: FilePath, folder_limit: FilePath | None = None):
"""Construct a new instance."""
abspath = os.path.abspath(abspath)
if folder_limit is None:
Expand All @@ -64,22 +67,22 @@ def __init__(self, abspath, folder_limit=None):
self._folder_limit = folder_limit

@property
def mode_dir(self):
def mode_dir(self) -> int:
"""Return the mode with which the folders should be created"""
if GROUP_WRITABLE:
return 0o770

return 0o700

@property
def mode_file(self):
def mode_file(self) -> int:
"""Return the mode with which the files should be created"""
if GROUP_WRITABLE:
return 0o660

return 0o600

def get_subfolder(self, subfolder, create=False, reset_limit=False):
def get_subfolder(self, subfolder: FilePath, create=False, reset_limit=False) -> Folder:
"""Return a Folder object pointing to a subfolder.

:param subfolder: a string with the relative path of the subfolder,
Expand Down Expand Up @@ -110,7 +113,7 @@ def get_subfolder(self, subfolder, create=False, reset_limit=False):

return new_folder

def get_content_list(self, pattern='*', only_paths=True):
def get_content_list(self, pattern: str = '*', only_paths: bool = True) -> list:
"""Return a list of files (and subfolders) in the folder, matching a given pattern.

Example: If you want to exclude files starting with a dot, you can
Expand All @@ -134,7 +137,7 @@ def get_content_list(self, pattern='*', only_paths=True):

return [(fname, not os.path.isdir(os.path.join(self.abspath, fname))) for fname in file_list]

def create_symlink(self, src, name):
def create_symlink(self, src: FilePath, name: FilePath) -> None:
"""Create a symlink inside the folder to the location 'src'.

:param src: the location to which the symlink must point. Can be
Expand All @@ -148,7 +151,7 @@ def create_symlink(self, src, name):

# For symlinks, permissions should not be set

def insert_path(self, src, dest_name=None, overwrite=True):
def insert_path(self, src: FilePath, dest_name: FilePath | None = None, overwrite: bool = True) -> FilePath:
"""Copy a file to the folder.

:param src: the source filename to copy
Expand Down Expand Up @@ -205,7 +208,9 @@ def insert_path(self, src, dest_name=None, overwrite=True):

return dest_abs_path

def create_file_from_filelike(self, filelike, filename, mode='wb', encoding=None):
def create_file_from_filelike(
self, filelike: t.IO[t.AnyStr], filename: FilePath, mode: str = 'wb', encoding: str | None = None
) -> FilePath:
"""Create a file with the given filename from a filelike object.

:param filelike: a filelike object whose contents to copy
Expand All @@ -227,7 +232,7 @@ def create_file_from_filelike(self, filelike, filename, mode='wb', encoding=None

return filepath

def remove_path(self, filename):
def remove_path(self, filename: FilePath) -> None:
"""Remove a file or folder from the folder.

:param filename: the relative path name to remove
Expand All @@ -241,7 +246,7 @@ def remove_path(self, filename):
else:
os.remove(dest_abs_path)

def get_abs_path(self, relpath, check_existence=False):
def get_abs_path(self, relpath: FilePath, check_existence: bool = False) -> FilePath:
"""Return an absolute path for a file or folder in this folder.

The advantage of using this method is that it checks that filename
Expand All @@ -268,7 +273,9 @@ def get_abs_path(self, relpath, check_existence=False):
return dest_abs_path

@contextlib.contextmanager
def open(self, name, mode='r', encoding='utf8', check_existence=False):
def open(
self, name: FilePath, mode: str = 'r', encoding: str | None = 'utf8', check_existence: bool = False
) -> Iterator[t.Any]:
"""Open a file in the current folder and return the corresponding file object.

:param check_existence: if False, just return the file path.
Expand All @@ -282,32 +289,32 @@ def open(self, name, mode='r', encoding='utf8', check_existence=False):
yield handle

@property
def abspath(self):
def abspath(self) -> FilePath:
"""The absolute path of the folder."""
return self._abspath

@property
def folder_limit(self):
def folder_limit(self) -> FilePath:
"""The folder limit that cannot be crossed when creating files and folders."""
return self._folder_limit

def exists(self):
def exists(self) -> bool:
"""Return True if the folder exists, False otherwise."""
return os.path.exists(self.abspath)

def isfile(self, relpath):
def isfile(self, relpath: FilePath) -> bool:
"""Return True if 'relpath' exists inside the folder and is a file,
False otherwise.
"""
return os.path.isfile(os.path.join(self.abspath, relpath))

def isdir(self, relpath):
def isdir(self, relpath: FilePath) -> bool:
"""Return True if 'relpath' exists inside the folder and is a directory,
False otherwise.
"""
return os.path.isdir(os.path.join(self.abspath, relpath))

def erase(self, create_empty_folder=False):
def erase(self, create_empty_folder: bool = False) -> None:
"""Erases the folder. Should be called only in very specific cases,
in general folder should not be erased!

Expand All @@ -321,7 +328,7 @@ def erase(self, create_empty_folder=False):
if create_empty_folder:
self.create()

def create(self):
def create(self) -> None:
"""Creates the folder, if it does not exist on the disk yet.

It will also create top directories, if absent.
Expand All @@ -331,7 +338,7 @@ def create(self):
"""
os.makedirs(self.abspath, mode=self.mode_dir, exist_ok=True)

def replace_with_folder(self, srcdir, move=False, overwrite=False):
def replace_with_folder(self, srcdir: FilePath, move: bool = False, overwrite: bool = False) -> None:
"""This routine copies or moves the source folder 'srcdir' to the local folder pointed to by this Folder.

:param srcdir: the source folder on the disk; this must be an absolute path
Expand Down Expand Up @@ -399,11 +406,11 @@ def __init__(self, filepath: pathlib.Path | None = None):

super().__init__(abspath=tempfile.mkdtemp(dir=filepath))

def __enter__(self):
def __enter__(self) -> Self:
"""Enter a context and return self."""
return self

def __exit__(self, exc_type, exc_value, traceback):
def __exit__(self, exc_type, exc_value, traceback) -> None:
"""Erase the temporary directory created in the constructor."""
self.erase()

Expand All @@ -416,9 +423,7 @@ class SubmitTestFolder(Folder):
not overwrite already existing created test folders.
"""

_sub_folder = None

def __init__(self, basepath=CALC_JOB_DRY_RUN_BASE_PATH):
def __init__(self, basepath: FilePath = CALC_JOB_DRY_RUN_BASE_PATH):
"""Construct and create the sandbox folder.

The directory will be created in the current working directory with the name given by `basepath`.
Expand Down Expand Up @@ -451,9 +456,9 @@ def __init__(self, basepath=CALC_JOB_DRY_RUN_BASE_PATH):

self._sub_folder = self.get_subfolder(os.path.relpath(subfolder_path, self.abspath), reset_limit=True)

def __enter__(self):
def __enter__(self) -> Folder:
"""Return the sub folder that should be Called when entering in the with statement."""
return self._sub_folder

def __exit__(self, exc_type, exc_value, traceback):
def __exit__(self, exc_type, exc_value, traceback) -> None:
"""When context manager is exited, do not delete the folder."""
3 changes: 2 additions & 1 deletion src/aiida/engine/daemon/execmanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
from aiida.common.datastructures import CalcInfo, FileCopyOperation
from aiida.common.folders import Folder, SandboxFolder
from aiida.common.links import LinkType
from aiida.common.typing import FilePath
from aiida.engine.processes.exit_code import ExitCode
from aiida.manage.configuration import get_config_option
from aiida.orm import CalcJobNode, Code, FolderData, Node, PortableCode, RemoteData, load_node
Expand Down Expand Up @@ -694,7 +695,7 @@ def traverse(node_):


async def retrieve_calculation(
calculation: CalcJobNode, transport: Transport, retrieved_temporary_folder: str
calculation: CalcJobNode, transport: Transport, retrieved_temporary_folder: FilePath
) -> FolderData | None:
"""Retrieve all the files of a completed job calculation using the given transport.

Expand Down
8 changes: 6 additions & 2 deletions src/aiida/engine/processes/calcjobs/calcjob.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from aiida.common.folders import Folder
from aiida.common.lang import classproperty, override
from aiida.common.links import LinkType
from aiida.common.typing import FilePath

from ..exit_code import ExitCode
from ..ports import PortNamespace
Expand Down Expand Up @@ -743,7 +744,7 @@ async def _perform_import(self):
return self.parse(retrieved_temporary_folder.abspath)

def parse(
self, retrieved_temporary_folder: Optional[str] = None, existing_exit_code: ExitCode | None = None
self, retrieved_temporary_folder: FilePath | None = None, existing_exit_code: ExitCode | None = None
) -> ExitCode:
"""Parse a retrieved job calculation.

Expand Down Expand Up @@ -771,7 +772,7 @@ def parse(

# Call the retrieved output parser
try:
exit_code_retrieved = self.parse_retrieved_output(retrieved_temporary_folder)
exit_code_retrieved = self.parse_retrieved_output(str(retrieved_temporary_folder))
finally:
if retrieved_temporary_folder is not None:
shutil.rmtree(retrieved_temporary_folder, ignore_errors=True)
Expand Down Expand Up @@ -1122,7 +1123,10 @@ def presubmit(self, folder: Folder) -> CalcInfo:
job_tmpl.max_wallclock_seconds = max_wallclock_seconds

submit_script_filename = self.node.get_option('submit_script_filename')
assert submit_script_filename is not None
script_content = scheduler.get_submit_script(job_tmpl)
# TODO: mypy error: Argument 2 to "create_file_from_filelike" of "Folder"
# has incompatible type "Any | None"; expected "str | PurePath"
folder.create_file_from_filelike(io.StringIO(script_content), submit_script_filename, 'w', encoding='utf8')

def encoder(obj):
Expand Down
26 changes: 8 additions & 18 deletions src/aiida/repository/backend/abstract.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,15 @@
import hashlib
import io
import pathlib
from typing import BinaryIO, Iterable, Iterator, List, Optional, Tuple, Union
from collections.abc import Iterable, Iterator
from typing import Any, BinaryIO, List, Optional, Tuple, Union

from aiida.common.hashing import chunked_file_hash

__all__ = ('AbstractRepositoryBackend',)

InfoDictType = dict[str, Union[int, str, dict[str, int], dict[str, float]]]
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Would be nice to have a better type than this 😅



class AbstractRepositoryBackend(metaclass=abc.ABCMeta):
"""Class that defines the abstract interface for an object repository.
Expand Down Expand Up @@ -44,7 +47,7 @@ def key_format(self) -> Optional[str]:
"""

@abc.abstractmethod
def initialise(self, **kwargs) -> None:
def initialise(self, **kwargs: Any) -> None:
"""Initialise the repository if it hasn't already been initialised.

:param kwargs: parameters for the initialisation.
Expand All @@ -65,7 +68,7 @@ def erase(self) -> None:
"""

@staticmethod
def is_readable_byte_stream(handle) -> bool:
def is_readable_byte_stream(handle: Any) -> bool:
return hasattr(handle, 'read') and hasattr(handle, 'mode') and 'b' in handle.mode

def put_object_from_filelike(self, handle: BinaryIO) -> str:
Expand Down Expand Up @@ -120,7 +123,7 @@ def list_objects(self) -> Iterable[str]:
"""

@abc.abstractmethod
def get_info(self, detailed: bool = False, **kwargs) -> dict:
def get_info(self, detailed: bool = False) -> InfoDictType:
"""Returns relevant information about the content of the repository.

:param detailed:
Expand All @@ -129,19 +132,6 @@ def get_info(self, detailed: bool = False, **kwargs) -> dict:
:return: a dictionary with the information.
"""

@abc.abstractmethod
def maintain(self, dry_run: bool = False, live: bool = True, **kwargs) -> None:
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was causing liskov principle violations in the subclasses. Ultimately, the "maintain" operation will be specialized for each different backend to probably doesn't make sense to have it here.

"""Performs maintenance operations.

:param dry_run:
flag to only print the actions that would be taken without actually executing them.

:param live:
flag to indicate to the backend whether AiiDA is live or not (i.e. if the profile of the
backend is currently being used/accessed). The backend is expected then to only allow (and
thus set by default) the operations that are safe to perform in this state.
"""

@contextlib.contextmanager
def open(self, key: str) -> Iterator[BinaryIO]: # type: ignore[return]
"""Open a file handle to an object stored under the given key.
Expand All @@ -168,7 +158,7 @@ def get_object_content(self, key: str) -> bytes:
return handle.read()

@abc.abstractmethod
def iter_object_streams(self, keys: List[str]) -> Iterator[Tuple[str, BinaryIO]]:
def iter_object_streams(self, keys: Iterable[str]) -> Iterator[Tuple[str, BinaryIO]]:
"""Return an iterator over the (read-only) byte streams of objects identified by key.

.. note:: handles should only be read within the context of this iterator.
Expand Down
Loading
Loading