diff --git a/api/app/handlers.py b/api/app/handlers.py deleted file mode 100644 index 069e5b2b82b5..000000000000 --- a/api/app/handlers.py +++ /dev/null @@ -1,23 +0,0 @@ -import errno -import logging -import os - - -def mkdir_p(path): # type: ignore[no-untyped-def] - """http://stackoverflow.com/a/600612/190597 (tzot)""" - try: - os.makedirs(path, exist_ok=True) # Python>3.2 - except TypeError: - try: - os.makedirs(path) - except OSError as exc: # Python >2.5 - if exc.errno == errno.EEXIST and os.path.isdir(path): - pass - else: - raise - - -class MakeFileHandler(logging.FileHandler): - def __init__(self, filename, mode="a", encoding=None, delay=0): # type: ignore[no-untyped-def] - mkdir_p(os.path.dirname(filename)) # type: ignore[no-untyped-call] - logging.FileHandler.__init__(self, filename, mode, encoding, delay) diff --git a/api/app/pagination.py b/api/app/pagination.py index 0c7187d9382c..440e30e9d90b 100644 --- a/api/app/pagination.py +++ b/api/app/pagination.py @@ -1,6 +1,7 @@ import base64 import json from collections import OrderedDict +from typing import Any, List, Optional, Type from drf_yasg import openapi # type: ignore[import-untyped] from drf_yasg.inspectors import PaginatorInspector # type: ignore[import-untyped] @@ -17,7 +18,7 @@ class CustomPagination(PageNumberPagination): class EdgeIdentityPaginationInspector(PaginatorInspector): # type: ignore[misc] def get_paginator_parameters( - self, paginator: BasePagination + self, paginator: Type[BasePagination] ) -> list[openapi.Parameter]: """ :param BasePagination paginator: the paginator @@ -40,13 +41,14 @@ def get_paginator_parameters( ), ] - def get_paginated_response(self, paginator, response_schema): # type: ignore[no-untyped-def] + def get_paginated_response( + self, paginator: Type[BasePagination], response_schema: openapi.Schema + ) -> openapi.Schema: """ :param BasePagination paginator: the paginator :param openapi.Schema response_schema: the response schema that must be paged. :rtype: openapi.Schema """ - return openapi.Schema( type=openapi.TYPE_OBJECT, properties=OrderedDict( @@ -69,7 +71,9 @@ class EdgeIdentityPagination(CustomPagination): max_page_size = 100 page_size = 100 - def paginate_queryset(self, dynamo_queryset, request, view=None): # type: ignore[no-untyped-def] + def paginate_queryset( + self, dynamo_queryset: Any, request: Any, view: Optional[Any] = None + ) -> Optional[List[Any]]: last_evaluated_key = dynamo_queryset.get("LastEvaluatedKey") if last_evaluated_key: self.last_evaluated_key = base64.b64encode( @@ -81,7 +85,7 @@ def paginate_queryset(self, dynamo_queryset, request, view=None): # type: ignor for identity_document in dynamo_queryset["Items"] ] - def get_paginated_response(self, data) -> Response: # type: ignore[no-untyped-def] + def get_paginated_response(self, data: Any) -> Response: """ Note: "If the size of the Query result set is larger than 1 MB, ScannedCount and Count represent only a partial count of the total items" diff --git a/api/app/routers.py b/api/app/routers.py index 691b384432c3..a55e5a061d9f 100644 --- a/api/app/routers.py +++ b/api/app/routers.py @@ -1,10 +1,13 @@ import logging import random from enum import Enum +from typing import Any, Optional, Type from django.conf import settings from django.core.cache import cache from django.db import connections +from django.db.models import Model +from django_stubs_ext.db.router import TypedDatabaseRouter from .exceptions import ImproperlyConfiguredError @@ -46,8 +49,8 @@ def connection_check(database: str) -> bool: return usable -class PrimaryReplicaRouter: - def db_for_read(self, model, **hints): # type: ignore[no-untyped-def] +class PrimaryReplicaRouter(TypedDatabaseRouter): + def db_for_read(self, model: Type[Model], **hints: Any) -> Optional[str]: if settings.NUM_DB_REPLICAS == 0: return "default" @@ -75,10 +78,12 @@ def db_for_read(self, model, **hints): # type: ignore[no-untyped-def] ) return "default" - def db_for_write(self, model, **hints): # type: ignore[no-untyped-def] + def db_for_write(self, model: Type[Model], **hints: Any) -> Optional[str]: return "default" - def allow_relation(self, obj1, obj2, **hints): # type: ignore[no-untyped-def] + def allow_relation( + self, obj1: Type[Model], obj2: Type[Model], **hints: Any + ) -> Optional[bool]: """ Relations between objects are allowed if both objects are in the primary/replica pool. @@ -95,10 +100,12 @@ def allow_relation(self, obj1, obj2, **hints): # type: ignore[no-untyped-def] return True return None - def allow_migrate(self, db, app_label, model_name=None, **hints): # type: ignore[no-untyped-def] + def allow_migrate( + self, db: str, app_label: str, model_name: str | None = None, **hints: Any + ) -> Optional[bool]: return db == "default" - def _get_replica(self, replicas: list[str]) -> None | str: # type: ignore[return] + def _get_replica(self, replicas: list[str]) -> None | str: while replicas: if settings.REPLICA_READ_STRATEGY == ReplicaReadStrategy.DISTRIBUTED: database = random.choice(replicas) @@ -119,11 +126,14 @@ def _get_replica(self, replicas: list[str]) -> None | str: # type: ignore[retur if connection_check(database): return database + # If no replicas are available, return None + return None + -class AnalyticsRouter: +class AnalyticsRouter(TypedDatabaseRouter): route_app_labels = ["app_analytics"] - def db_for_read(self, model, **hints): # type: ignore[no-untyped-def] + def db_for_read(self, model: Type[Model], **hints: Any) -> Optional[str]: """ Attempts to read analytics models go to 'analytics' database. """ @@ -131,7 +141,7 @@ def db_for_read(self, model, **hints): # type: ignore[no-untyped-def] return "analytics" return None - def db_for_write(self, model, **hints): # type: ignore[no-untyped-def] + def db_for_write(self, model: Type[Model], **hints: Any) -> Optional[str]: """ Attempts to write analytics models go to 'analytics' database. """ @@ -139,7 +149,9 @@ def db_for_write(self, model, **hints): # type: ignore[no-untyped-def] return "analytics" return None - def allow_relation(self, obj1, obj2, **hints): # type: ignore[no-untyped-def] + def allow_relation( + self, obj1: Type[Model], obj2: Type[Model], **hints: Any + ) -> Optional[bool]: """ Relations between objects are allowed if both objects are in the analytics database. @@ -151,7 +163,9 @@ def allow_relation(self, obj1, obj2, **hints): # type: ignore[no-untyped-def] return True return None - def allow_migrate(self, db, app_label, model_name=None, **hints): # type: ignore[no-untyped-def] + def allow_migrate( + self, db: str, app_label: str, model_name: str | None = None, **hints: Any + ) -> Optional[bool]: """ Make sure the analytics app only appears in the 'analytics' database """ diff --git a/api/poetry.lock b/api/poetry.lock index afe9a4da0fb9..921007a22dcf 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -4115,6 +4115,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -5191,4 +5192,4 @@ files = [ [metadata] lock-version = "2.1" python-versions = ">3.11,<3.13" -content-hash = "8112081b901ef56f1d604eb67abf751420ddb5fc9a4d10141ee144b62401271b" +content-hash = "2f88cbe5492964ca9bbf48f75b6570648cf28360296e9c885330449f9b3895c6" diff --git a/api/pyproject.toml b/api/pyproject.toml index 10da59d83351..70d31f65432a 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -160,6 +160,7 @@ tzdata = "^2024.1" djangorestframework-simplejwt = "^5.3.1" structlog = "^24.4.0" prometheus-client = "^0.21.1" +django-stubs-ext = "^5.1.3" [tool.poetry.group.auth-controller] optional = true diff --git a/api/tests/unit/app/test_unit_app_routers.py b/api/tests/unit/app/test_unit_app_routers.py index eb45d30d5fe4..b9b9bdfe20ef 100644 --- a/api/tests/unit/app/test_unit_app_routers.py +++ b/api/tests/unit/app/test_unit_app_routers.py @@ -39,12 +39,12 @@ def test_replica_router_db_for_read_with_one_offline_replica( router = PrimaryReplicaRouter() # When - result = router.db_for_read(FFAdminUser) # type: ignore[no-untyped-call] + result = router.db_for_read(FFAdminUser) # Then # Read strategy DISTRIBUTED is random, so just this is a check # against loading the primary or one of the cross region replicas - assert result.startswith("replica_") + assert result is not None and result.startswith("replica_") # Check that the number of replica call counts is as expected. conn_call_count = 2 @@ -85,12 +85,12 @@ def test_replica_router_db_for_read_with_local_offline_replicas( router = PrimaryReplicaRouter() # When - result = router.db_for_read(FFAdminUser) # type: ignore[no-untyped-call] + result = router.db_for_read(FFAdminUser) # Then # Read strategy DISTRIBUTED is random, so just this is a check # against loading the primary or one of the cross region replicas - assert result.startswith("cross_region_replica_") + assert result is not None and result.startswith("cross_region_replica_") # Check that the number of replica call counts is as expected. conn_call_count = 6 @@ -120,7 +120,7 @@ def test_replica_router_db_for_read_with_all_offline_replicas( router = PrimaryReplicaRouter() # When - result = router.db_for_read(FFAdminUser) # type: ignore[no-untyped-call] + result = router.db_for_read(FFAdminUser) # Then # Fallback to primary database if all replicas are offline. @@ -154,7 +154,7 @@ def test_replica_router_db_with_sequential_read( router = PrimaryReplicaRouter() # When - result = router.db_for_read(FFAdminUser) # type: ignore[no-untyped-call] + result = router.db_for_read(FFAdminUser) # Then # Fallback from first replica to second one. @@ -186,7 +186,7 @@ def test_replica_router_db_no_replicas( router = PrimaryReplicaRouter() # When - result = router.db_for_read(FFAdminUser) # type: ignore[no-untyped-call] + result = router.db_for_read(FFAdminUser) # Then # Should always use primary database.