Skip to content

Commit

Permalink
Made changes to maje mypy happy.
Browse files Browse the repository at this point in the history
  • Loading branch information
JPBergsma committed Oct 13, 2023
1 parent 3be7d54 commit b1b66c4
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 17 deletions.
17 changes: 10 additions & 7 deletions optimade/server/entry_collections/mongo.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,21 +102,24 @@ def count(self, **kwargs: Any) -> int:
kwargs["filter"] = {}
return len(self.collection.find(**kwargs))

def insert(self, content: bytes, filename: str, metadata: dict = {}) -> None:
def insert(self, data: list) -> None:
"""Add the given entries to the underlying database.
Warning:
No validation is performed on the incoming data.
Arguments:
content: The file content to add to gridfs.
filename: The filename of the added content.
metadata: extra metadata to add to the gridfs entry.
data: a list of dictionaries. Each dictionary contains the data belonging to one file.
These dictionaries contain the fields:
data: The file content to add to gridfs.
filename: The filename of the added content.
metadata: extra metadata to add to the gridfs entry.
"""
self.collection.put(content, filename=filename, metadata=metadata)
for entry in data: # todo check whether I can insert multiple files in one go.
self.collection.put(**entry)

def handle_query_params(
self, params: Union[SingleEntryQueryParams, PartialDataQueryParams]
self, params: Union[SingleEntryQueryParams, PartialDataQueryParams] # type: ignore[override]
) -> Dict[str, Any]:
"""Parse and interpret the backend-agnostic query parameter models into a dictionary
that can be used by MongoDB.
Expand Down Expand Up @@ -373,7 +376,7 @@ def insert(self, data: List[EntryResource]) -> None:
self.collection.insert_many(data)

def handle_query_params(
self, params: Union[EntryListingQueryParams, SingleEntryQueryParams]
self, params: Union[EntryListingQueryParams, SingleEntryQueryParams] # type: ignore[override]
) -> Dict[str, Any]:
"""Parse and interpret the backend-agnostic query parameter models into a dictionary
that can be used by MongoDB.
Expand Down
2 changes: 1 addition & 1 deletion optimade/server/exception_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ def general_exception_handler(request: Request, exc: Exception) -> JSONAPIRespon
(OptimadeHTTPException, http_exception_handler),
(RequestValidationError, request_validation_exception_handler),
(ValidationError, validation_exception_handler),
(VisitError, grammar_not_implemented_handler),
(VisitError, grammar_not_implemented_handler), # type: ignore[list-item] # not entirely sure why this entry triggers mypy
(NotImplementedError, not_implemented_handler), # type: ignore[list-item] # not entirely sure why this entry triggers mypy
(Exception, general_exception_handler),
]
Expand Down
2 changes: 1 addition & 1 deletion optimade/server/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def read_array_header(fobj):
"name": numpy_meta[2].name,
"itemsize": numpy_meta[2].itemsize,
}
partial_data_coll.insert(f, filename=filename, metadata=metadata)
partial_data_coll.insert([{"data": f, "filename": filename, "metadata": metadata}]) # type: ignore[list-item] # Todo : Perhaps this can be reduced to a single insert statement.

def load_entries(endpoint_name: str, endpoint_collection: EntryCollection):
LOGGER.debug("Loading test %s...", endpoint_name)
Expand Down
18 changes: 10 additions & 8 deletions optimade/server/routers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -420,11 +420,13 @@ def get_partial_entry(
)

array = np.frombuffer(
results["attributes"]["data"],
dtype=getattr(np, results["attributes"]["dtype"]["name"]),
).reshape(results["attributes"]["shape"])
results["attributes"]["data"], # type: ignore[call-overload]
dtype=getattr(np, results["attributes"]["dtype"]["name"]), # type: ignore[call-overload]
).reshape(
results["attributes"]["shape"]
) # type: ignore[call-overload]
# slice array
property_ranges = results["attributes"]["property_ranges"]
property_ranges = results["attributes"]["property_ranges"] # type: ignore[call-overload]
slice_ind = [
slice(
0,
Expand Down Expand Up @@ -455,14 +457,14 @@ def get_partial_entry(
"has_references": False,
} # Todo: add support for non_dense data
if more_data_available:
next_link = ["PARTIAL-DATA-NEXT", [results["attributes"].pop("next")]]
next_link = ["PARTIAL-DATA-NEXT", [results["attributes"].pop("next")]] # type: ignore[call-overload]

if params.response_format == "json":
for key in header:
results["attributes"][key] = header[key]
results["attributes"]["data"] = array.tolist()
results["attributes"][key] = header[key] # type: ignore[call-overload]
results["attributes"]["data"] = array.tolist() # type: ignore[call-overload]
if more_data_available:
results["attributes"]["next"] = next_link
results["attributes"]["next"] = next_link # type: ignore[call-overload]
return dict(
links=links,
data=[results] if results else None,
Expand Down

0 comments on commit b1b66c4

Please sign in to comment.