From c035ad583124d093ce363169fce5f49b085f0cc0 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Thu, 19 Sep 2024 11:55:42 +0200 Subject: [PATCH 01/65] handle missing genotypes --- .../services/match_genotype_service/match_genotype.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/genotype_api/services/match_genotype_service/match_genotype.py b/genotype_api/services/match_genotype_service/match_genotype.py index a8ce46f..abc1151 100644 --- a/genotype_api/services/match_genotype_service/match_genotype.py +++ b/genotype_api/services/match_genotype_service/match_genotype.py @@ -3,11 +3,11 @@ from collections import Counter from genotype_api.database.models import Analysis, Sample -from genotype_api.models import MatchResult, MatchCounts, SampleDetail +from genotype_api.models import MatchCounts, MatchResult, SampleDetail from genotype_api.services.match_genotype_service.utils import ( - compare_genotypes, - check_snps, check_sex, + check_snps, + compare_genotypes, ) @@ -16,6 +16,8 @@ class MatchGenotypeService: def get_matches(analyses: list[Analysis], sample_analysis: Analysis) -> list[MatchResult]: match_results = [] for genotype in analyses: + if genotype.genotypes is None: + continue genotype_pairs = zip(genotype.genotypes, sample_analysis.genotypes) results = dict( compare_genotypes(genotype_1, genotype_2) From e244f67ce812ed0c3794b6c079f50076e2dc8ebd Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 20 Sep 2024 16:24:33 +0200 Subject: [PATCH 02/65] add pool_recycle argument --- genotype_api/database/database.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/genotype_api/database/database.py b/genotype_api/database/database.py index 87f28d0..61c851e 100644 --- a/genotype_api/database/database.py +++ b/genotype_api/database/database.py @@ -5,8 +5,8 @@ from sqlalchemy.engine.reflection import Inspector from sqlalchemy.orm import Session, scoped_session, sessionmaker -from genotype_api.exceptions import GenotypeDBError from genotype_api.database.models import Base +from genotype_api.exceptions import GenotypeDBError SESSION: scoped_session | None = None ENGINE: Engine | None = None @@ -16,7 +16,7 @@ def initialise_database(db_uri: str) -> None: """Initialize the SQLAlchemy engine and session for genotype api.""" global SESSION, ENGINE - ENGINE = create_engine(db_uri, pool_pre_ping=True) + ENGINE = create_engine(db_uri, pool_pre_ping=True, pool_recycle=3600) session_factory = sessionmaker(ENGINE) SESSION = scoped_session(session_factory) From a408848996071c45a49f288da59a57e28dcf659e Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 23 Sep 2024 14:24:06 +0200 Subject: [PATCH 03/65] revert changes --- genotype_api/services/match_genotype_service/match_genotype.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/genotype_api/services/match_genotype_service/match_genotype.py b/genotype_api/services/match_genotype_service/match_genotype.py index abc1151..a3a388a 100644 --- a/genotype_api/services/match_genotype_service/match_genotype.py +++ b/genotype_api/services/match_genotype_service/match_genotype.py @@ -16,8 +16,6 @@ class MatchGenotypeService: def get_matches(analyses: list[Analysis], sample_analysis: Analysis) -> list[MatchResult]: match_results = [] for genotype in analyses: - if genotype.genotypes is None: - continue genotype_pairs = zip(genotype.genotypes, sample_analysis.genotypes) results = dict( compare_genotypes(genotype_1, genotype_2) From 4aef04694c1b2727fcedd4d3fed15c86ebe74096 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Tue, 24 Sep 2024 16:15:35 +0200 Subject: [PATCH 04/65] rollback before closing active session --- genotype_api/database/database.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/genotype_api/database/database.py b/genotype_api/database/database.py index 61c851e..3282f28 100644 --- a/genotype_api/database/database.py +++ b/genotype_api/database/database.py @@ -16,7 +16,7 @@ def initialise_database(db_uri: str) -> None: """Initialize the SQLAlchemy engine and session for genotype api.""" global SESSION, ENGINE - ENGINE = create_engine(db_uri, pool_pre_ping=True, pool_recycle=3600) + ENGINE = create_engine(db_uri, pool_pre_ping=True) session_factory = sessionmaker(ENGINE) SESSION = scoped_session(session_factory) @@ -61,6 +61,10 @@ def get_tables() -> list[str]: return inspector.get_table_names() -def close_session(): +def close_session() -> None: """Close the global database session of the genotype api.""" - SESSION.remove() + if SESSION: + session = SESSION() + if session.is_active: + session.rollback() + SESSION.remove() From eb7aa28e3364091a434d3bb249da126dc62fe2df Mon Sep 17 00:00:00 2001 From: ahdamin Date: Tue, 24 Sep 2024 16:57:16 +0200 Subject: [PATCH 05/65] rollback mw active session before closing --- genotype_api/api/middleware.py | 8 +++++++- genotype_api/database/database.py | 8 ++------ 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/genotype_api/api/middleware.py b/genotype_api/api/middleware.py index 0ef9eaf..1327642 100644 --- a/genotype_api/api/middleware.py +++ b/genotype_api/api/middleware.py @@ -1,6 +1,7 @@ from fastapi import Request from starlette.middleware.base import BaseHTTPMiddleware -from genotype_api.database.database import close_session + +from genotype_api.database.database import close_session, get_session class DBSessionMiddleware(BaseHTTPMiddleware): @@ -10,6 +11,11 @@ def __init__(self, app): async def dispatch(self, request: Request, call_next): try: response = await call_next(request) + except Exception as e: + raise e finally: + session = get_session() + if session.is_active: + session.rollback() close_session() return response diff --git a/genotype_api/database/database.py b/genotype_api/database/database.py index 3282f28..e6ad720 100644 --- a/genotype_api/database/database.py +++ b/genotype_api/database/database.py @@ -61,10 +61,6 @@ def get_tables() -> list[str]: return inspector.get_table_names() -def close_session() -> None: +def close_session(): """Close the global database session of the genotype api.""" - if SESSION: - session = SESSION() - if session.is_active: - session.rollback() - SESSION.remove() + SESSION.remove() From 5d3d73153a66eb4840a7df60f803fb76d4049c67 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Thu, 26 Sep 2024 11:56:18 +0200 Subject: [PATCH 06/65] use GenotypeDBError --- genotype_api/api/middleware.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/genotype_api/api/middleware.py b/genotype_api/api/middleware.py index 1327642..a39b72c 100644 --- a/genotype_api/api/middleware.py +++ b/genotype_api/api/middleware.py @@ -1,7 +1,7 @@ from fastapi import Request from starlette.middleware.base import BaseHTTPMiddleware - -from genotype_api.database.database import close_session, get_session +from genotype_api.exceptions import GenotypeDBError +from genotype_api.database.database import close_session class DBSessionMiddleware(BaseHTTPMiddleware): @@ -11,11 +11,8 @@ def __init__(self, app): async def dispatch(self, request: Request, call_next): try: response = await call_next(request) - except Exception as e: - raise e - finally: - session = get_session() - if session.is_active: - session.rollback() + except Exception: + raise GenotypeDBError + finally close_session() return response From 687f30243bfb60391428648d00e332e45f4b05cc Mon Sep 17 00:00:00 2001 From: ahdamin Date: Thu, 26 Sep 2024 11:58:49 +0200 Subject: [PATCH 07/65] fix finally block --- genotype_api/api/middleware.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/genotype_api/api/middleware.py b/genotype_api/api/middleware.py index a39b72c..43510e9 100644 --- a/genotype_api/api/middleware.py +++ b/genotype_api/api/middleware.py @@ -1,7 +1,8 @@ from fastapi import Request from starlette.middleware.base import BaseHTTPMiddleware -from genotype_api.exceptions import GenotypeDBError + from genotype_api.database.database import close_session +from genotype_api.exceptions import GenotypeDBError class DBSessionMiddleware(BaseHTTPMiddleware): @@ -13,6 +14,6 @@ async def dispatch(self, request: Request, call_next): response = await call_next(request) except Exception: raise GenotypeDBError - finally + finally: close_session() return response From ba58eae04b866a2bafdf60ff1b529ebc6b9f83fc Mon Sep 17 00:00:00 2001 From: ahdamin Date: Thu, 26 Sep 2024 14:18:28 +0200 Subject: [PATCH 08/65] check session status when closing --- genotype_api/api/middleware.py | 7 +++++-- genotype_api/database/database.py | 12 +++++++++++- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/genotype_api/api/middleware.py b/genotype_api/api/middleware.py index 43510e9..efa67f5 100644 --- a/genotype_api/api/middleware.py +++ b/genotype_api/api/middleware.py @@ -1,7 +1,7 @@ from fastapi import Request from starlette.middleware.base import BaseHTTPMiddleware -from genotype_api.database.database import close_session +from genotype_api.database.database import close_session, get_session from genotype_api.exceptions import GenotypeDBError @@ -12,8 +12,11 @@ def __init__(self, app): async def dispatch(self, request: Request, call_next): try: response = await call_next(request) + return response except Exception: + session = get_session() + if session.is_active: + session.rollback() raise GenotypeDBError finally: close_session() - return response diff --git a/genotype_api/database/database.py b/genotype_api/database/database.py index e6ad720..dace392 100644 --- a/genotype_api/database/database.py +++ b/genotype_api/database/database.py @@ -63,4 +63,14 @@ def get_tables() -> list[str]: def close_session(): """Close the global database session of the genotype api.""" - SESSION.remove() + if SESSION is None: + return + session = SESSION() + try: + if session.dirty: + session.flush() + except Exception: + session.rollback() + raise GenotypeDBError + finally: + SESSION.remove() From 3f0e23d7291ddcdd733a894f85875a82ac88ab32 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Thu, 26 Sep 2024 16:45:04 +0200 Subject: [PATCH 09/65] Handle null genotypes --- .../services/match_genotype_service/match_genotype.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/genotype_api/services/match_genotype_service/match_genotype.py b/genotype_api/services/match_genotype_service/match_genotype.py index a3a388a..2ab4491 100644 --- a/genotype_api/services/match_genotype_service/match_genotype.py +++ b/genotype_api/services/match_genotype_service/match_genotype.py @@ -14,8 +14,13 @@ class MatchGenotypeService: @staticmethod def get_matches(analyses: list[Analysis], sample_analysis: Analysis) -> list[MatchResult]: + if sample_analysis is None or sample_analysis.genotypes is None: + return [] + match_results = [] for genotype in analyses: + if genotype is None or genotype.genotypes is None: + continue genotype_pairs = zip(genotype.genotypes, sample_analysis.genotypes) results = dict( compare_genotypes(genotype_1, genotype_2) From 30ff74270e00eca7d99151d0113ff2aa121c7f0b Mon Sep 17 00:00:00 2001 From: ahdamin Date: Thu, 26 Sep 2024 17:03:50 +0200 Subject: [PATCH 10/65] Add error handling middleware --- genotype_api/api/middleware.py | 41 +++++++++++++++++++++++++++++----- 1 file changed, 35 insertions(+), 6 deletions(-) diff --git a/genotype_api/api/middleware.py b/genotype_api/api/middleware.py index efa67f5..5461ade 100644 --- a/genotype_api/api/middleware.py +++ b/genotype_api/api/middleware.py @@ -1,22 +1,51 @@ +import logging + from fastapi import Request +from fastapi.responses import JSONResponse +from sqlalchemy.exc import OperationalError, PendingRollbackError from starlette.middleware.base import BaseHTTPMiddleware from genotype_api.database.database import close_session, get_session from genotype_api.exceptions import GenotypeDBError +LOG = logging.getLogger(__name__) + class DBSessionMiddleware(BaseHTTPMiddleware): def __init__(self, app): super().__init__(app) async def dispatch(self, request: Request, call_next): + session = None + error_message = JSONResponse( + status_code=500, content={"message": "Internal server error: database session error."} + ) + try: - response = await call_next(request) - return response - except Exception: session = get_session() - if session.is_active: + if session is None: + return error_message + elif session.dirty: + session.flush() + else: + response = await call_next(request) + return response + + except PendingRollbackError as e: + if session and session.is_active: + session.rollback() + LOG.debug(f"DB session error occurred: {e}") + return error_message + + except OperationalError as e: + LOG.debug(f"Database connection lost: {e}") + return error_message + + except Exception as e: + if session and session.is_active: session.rollback() - raise GenotypeDBError + LOG.debug(f"DB session occurred: {e}") + return error_message finally: - close_session() + if session: + close_session() From dba577cfbdd08770d74570bf27f5faf5b0e9bae3 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Thu, 26 Sep 2024 17:05:13 +0200 Subject: [PATCH 11/65] revert --- genotype_api/database/database.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/genotype_api/database/database.py b/genotype_api/database/database.py index dace392..e6ad720 100644 --- a/genotype_api/database/database.py +++ b/genotype_api/database/database.py @@ -63,14 +63,4 @@ def get_tables() -> list[str]: def close_session(): """Close the global database session of the genotype api.""" - if SESSION is None: - return - session = SESSION() - try: - if session.dirty: - session.flush() - except Exception: - session.rollback() - raise GenotypeDBError - finally: - SESSION.remove() + SESSION.remove() From 62141a9d700274a9fd4198a60c9e66c742831558 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 27 Sep 2024 11:53:54 +0200 Subject: [PATCH 12/65] return response for dirty sessions --- genotype_api/api/middleware.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/genotype_api/api/middleware.py b/genotype_api/api/middleware.py index 5461ade..53aa192 100644 --- a/genotype_api/api/middleware.py +++ b/genotype_api/api/middleware.py @@ -17,16 +17,18 @@ def __init__(self, app): async def dispatch(self, request: Request, call_next): session = None - error_message = JSONResponse( - status_code=500, content={"message": "Internal server error: database session error."} - ) + message = "Internal server error: database session error." + error_message = JSONResponse(status_code=500, content={"message": message}) try: session = get_session() if session is None: + LOG.debug(f"No database session found.") return error_message elif session.dirty: session.flush() + response = await call_next(request) + return response else: response = await call_next(request) return response @@ -42,8 +44,6 @@ async def dispatch(self, request: Request, call_next): return error_message except Exception as e: - if session and session.is_active: - session.rollback() LOG.debug(f"DB session occurred: {e}") return error_message finally: From 648b94ef3fd80ca439bcc05b5af3296fd50fe996 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 27 Sep 2024 14:16:33 +0200 Subject: [PATCH 13/65] Add detailed logging and session checks in DBSessionMiddleware --- genotype_api/api/middleware.py | 54 ++++++++++++++++++++-------------- 1 file changed, 32 insertions(+), 22 deletions(-) diff --git a/genotype_api/api/middleware.py b/genotype_api/api/middleware.py index 53aa192..edb3bb5 100644 --- a/genotype_api/api/middleware.py +++ b/genotype_api/api/middleware.py @@ -16,36 +16,46 @@ def __init__(self, app): super().__init__(app) async def dispatch(self, request: Request, call_next): - session = None - message = "Internal server error: database session error." - error_message = JSONResponse(status_code=500, content={"message": message}) + session = get_session() + if session is None: + LOG.error("No database session found.") + return JSONResponse( + status_code=500, content={"message": "Internal server error: No database session."} + ) try: - session = get_session() - if session is None: - LOG.debug(f"No database session found.") - return error_message - elif session.dirty: + response = await call_next(request) + + if session.dirty: session.flush() - response = await call_next(request) - return response - else: - response = await call_next(request) - return response + + return response except PendingRollbackError as e: - if session and session.is_active: + LOG.error("Pending rollback error, rolling back session", exc_info=True) + if session.is_active: session.rollback() - LOG.debug(f"DB session error occurred: {e}") - return error_message + return JSONResponse( + status_code=500, content={"message": "Internal server error: Pending rollback."} + ) except OperationalError as e: - LOG.debug(f"Database connection lost: {e}") - return error_message + LOG.error("Operational error: database connection lost", exc_info=True) + if session.is_active: + session.rollback() + return JSONResponse( + status_code=500, + content={"message": "Internal server error: Database connection lost."}, + ) except Exception as e: - LOG.debug(f"DB session occurred: {e}") - return error_message + LOG.error(f"Unexpected error occurred: {e}", exc_info=True) + if session.is_active: + session.rollback() + return JSONResponse( + status_code=500, + content={"message": "Internal server error: Unexpected error occurred."}, + ) + finally: - if session: - close_session() + close_session() From 07e66cff15ebc08e7a4ddb223a8fd0ba0f219bf5 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 30 Sep 2024 11:47:50 +0200 Subject: [PATCH 14/65] rollback in case of PendingRollbackError --- genotype_api/api/middleware.py | 33 ++++++++++++++++++--------------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/genotype_api/api/middleware.py b/genotype_api/api/middleware.py index edb3bb5..de407a0 100644 --- a/genotype_api/api/middleware.py +++ b/genotype_api/api/middleware.py @@ -1,6 +1,6 @@ import logging -from fastapi import Request +from fastapi import Request, status from fastapi.responses import JSONResponse from sqlalchemy.exc import OperationalError, PendingRollbackError from starlette.middleware.base import BaseHTTPMiddleware @@ -8,6 +8,7 @@ from genotype_api.database.database import close_session, get_session from genotype_api.exceptions import GenotypeDBError +logging.basicConfig(level=logging.INFO) LOG = logging.getLogger(__name__) @@ -18,11 +19,16 @@ def __init__(self, app): async def dispatch(self, request: Request, call_next): session = get_session() if session is None: - LOG.error("No database session found.") + LOG.info("No database session found.") return JSONResponse( - status_code=500, content={"message": "Internal server error: No database session."} + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content={"message": "Internal server error: No database session."}, ) + # Ensure session is clean before processing the request + if not session.is_active: + LOG.info("Session not active, rolling back any uncommitted transactions.") + try: response = await call_next(request) @@ -31,29 +37,26 @@ async def dispatch(self, request: Request, call_next): return response - except PendingRollbackError as e: - LOG.error("Pending rollback error, rolling back session", exc_info=True) + except PendingRollbackError: + LOG.info("Pending rollback error, rolling back session", exc_info=True) if session.is_active: session.rollback() return JSONResponse( - status_code=500, content={"message": "Internal server error: Pending rollback."} + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content={"message": "Internal server error: Pending rollback."}, ) - except OperationalError as e: - LOG.error("Operational error: database connection lost", exc_info=True) - if session.is_active: - session.rollback() + except OperationalError: + LOG.info("Operational error: database connection lost", exc_info=True) return JSONResponse( - status_code=500, + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, content={"message": "Internal server error: Database connection lost."}, ) except Exception as e: - LOG.error(f"Unexpected error occurred: {e}", exc_info=True) - if session.is_active: - session.rollback() + LOG.info(f"Unexpected error occurred: {e}", exc_info=True) return JSONResponse( - status_code=500, + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, content={"message": "Internal server error: Unexpected error occurred."}, ) From a83e8a92770207f11cd31b49ec8cb0adb4ee9f49 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 30 Sep 2024 16:54:11 +0200 Subject: [PATCH 15/65] add pool_recycle argument --- genotype_api/database/database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/genotype_api/database/database.py b/genotype_api/database/database.py index e6ad720..61c851e 100644 --- a/genotype_api/database/database.py +++ b/genotype_api/database/database.py @@ -16,7 +16,7 @@ def initialise_database(db_uri: str) -> None: """Initialize the SQLAlchemy engine and session for genotype api.""" global SESSION, ENGINE - ENGINE = create_engine(db_uri, pool_pre_ping=True) + ENGINE = create_engine(db_uri, pool_pre_ping=True, pool_recycle=3600) session_factory = sessionmaker(ENGINE) SESSION = scoped_session(session_factory) From c87fc994d35b767c82284668f167177edc536da2 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Wed, 2 Oct 2024 08:03:07 +0000 Subject: [PATCH 16/65] replace deprecated on_event with lifespan --- genotype_api/api/app.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/genotype_api/api/app.py b/genotype_api/api/app.py index f6630fc..c8be87f 100644 --- a/genotype_api/api/app.py +++ b/genotype_api/api/app.py @@ -2,7 +2,7 @@ Main functions for the genotype api """ - +from contextlib import asynccontextmanager from fastapi import FastAPI, status, Request from fastapi.responses import JSONResponse from fastapi.middleware.cors import CORSMiddleware @@ -13,7 +13,17 @@ from genotype_api.api.endpoints import samples, snps, users, plates, analyses from sqlalchemy.exc import NoResultFound + +@asynccontextmanager +async def lifespan(app: FastAPI): + # Startup logic + initialise_database(settings.db_uri) + create_all_tables() + yield + # Shutdown logic + app = FastAPI( + lifespan=lifespan, root_path=security_settings.api_root_path, root_path_in_servers=True, openapi_prefix=security_settings.api_root_path, @@ -72,9 +82,3 @@ def welcome(): tags=["analyses"], responses={status.HTTP_404_NOT_FOUND: {"description": "Not found"}}, ) - - -@app.on_event("startup") -def on_startup(): - initialise_database(settings.db_uri) - create_all_tables() From cafb1284649c751f6062073c8615426d29779750 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Wed, 2 Oct 2024 08:06:16 +0000 Subject: [PATCH 17/65] remove deprecated openapi_prefix --- genotype_api/api/app.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/genotype_api/api/app.py b/genotype_api/api/app.py index c8be87f..57886d1 100644 --- a/genotype_api/api/app.py +++ b/genotype_api/api/app.py @@ -24,9 +24,7 @@ async def lifespan(app: FastAPI): app = FastAPI( lifespan=lifespan, - root_path=security_settings.api_root_path, - root_path_in_servers=True, - openapi_prefix=security_settings.api_root_path, + root_path=security_settings.api_root_path ) app.add_middleware( CORSMiddleware, From cd3cdf6f2b2feb57a2968a10863c28ef57b6eaf9 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 7 Oct 2024 14:07:20 +0200 Subject: [PATCH 18/65] add packages: aiomysql and pytest-asyncio --- poetry.lock | 159 ++++++++++++++++++++++++++++++------------------- pyproject.toml | 5 +- 2 files changed, 101 insertions(+), 63 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9ad06a7..8b45ff1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiofiles" @@ -11,6 +11,24 @@ files = [ {file = "aiofiles-23.2.1.tar.gz", hash = "sha256:84ec2218d8419404abcb9f0c02df3f34c6e0a68ed41072acfb1cef5cbc29051a"}, ] +[[package]] +name = "aiomysql" +version = "0.2.0" +description = "MySQL driver for asyncio." +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiomysql-0.2.0-py3-none-any.whl", hash = "sha256:b7c26da0daf23a5ec5e0b133c03d20657276e4eae9b73e040b72787f6f6ade0a"}, + {file = "aiomysql-0.2.0.tar.gz", hash = "sha256:558b9c26d580d08b8c5fd1be23c5231ce3aeff2dadad989540fee740253deb67"}, +] + +[package.dependencies] +PyMySQL = ">=1.0" + +[package.extras] +rsa = ["PyMySQL[rsa] (>=1.0)"] +sa = ["sqlalchemy (>=1.3,<1.4)"] + [[package]] name = "annotated-types" version = "0.6.0" @@ -979,13 +997,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest- [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -1230,23 +1248,41 @@ files = [ [[package]] name = "pytest" -version = "8.1.1" +version = "8.3.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, - {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.4,<2.0" +pluggy = ">=1.5,<2" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.24.0" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, + {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, +] + +[package.dependencies] +pytest = ">=8.2,<9" [package.extras] -testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-cov" @@ -1341,6 +1377,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -1450,64 +1487,64 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.29" +version = "2.0.35" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"}, - {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"}, - {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8bea573863762bbf45d1e13f87c2d2fd32cee2dbd50d050f83f87429c9e1ea"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f552023710d4b93d8fb29a91fadf97de89c5926c6bd758897875435f2a939f33"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:016b2e665f778f13d3c438651dd4de244214b527a275e0acf1d44c05bc6026a9"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win32.whl", hash = "sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win_amd64.whl", hash = "sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win32.whl", hash = "sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win_amd64.whl", hash = "sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win32.whl", hash = "sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win_amd64.whl", hash = "sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f021d334f2ca692523aaf7bbf7592ceff70c8594fad853416a81d66b35e3abf9"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05c3f58cf91683102f2f0265c0db3bd3892e9eedabe059720492dbaa4f922da1"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:032d979ce77a6c2432653322ba4cbeabf5a6837f704d16fa38b5a05d8e21fa00"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:2e795c2f7d7249b75bb5f479b432a51b59041580d20599d4e112b5f2046437a3"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:cc32b2990fc34380ec2f6195f33a76b6cdaa9eecf09f0c9404b74fc120aef36f"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win32.whl", hash = "sha256:9509c4123491d0e63fb5e16199e09f8e262066e58903e84615c301dde8fa2e87"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win_amd64.whl", hash = "sha256:3655af10ebcc0f1e4e06c5900bb33e080d6a1fa4228f502121f28a3b1753cde5"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4c31943b61ed8fdd63dfd12ccc919f2bf95eefca133767db6fbbd15da62078ec"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a62dd5d7cc8626a3634208df458c5fe4f21200d96a74d122c83bc2015b333bc1"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0630774b0977804fba4b6bbea6852ab56c14965a2b0c7fc7282c5f7d90a1ae72"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d625eddf7efeba2abfd9c014a22c0f6b3796e0ffb48f5d5ab106568ef01ff5a"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ada603db10bb865bbe591939de854faf2c60f43c9b763e90f653224138f910d9"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c41411e192f8d3ea39ea70e0fae48762cd11a2244e03751a98bd3c0ca9a4e936"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win32.whl", hash = "sha256:d299797d75cd747e7797b1b41817111406b8b10a4f88b6e8fe5b5e59598b43b0"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win_amd64.whl", hash = "sha256:0375a141e1c0878103eb3d719eb6d5aa444b490c96f3fedab8471c7f6ffe70ee"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccae5de2a0140d8be6838c331604f91d6fafd0735dbdcee1ac78fc8fbaba76b4"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a275a806f73e849e1c309ac11108ea1a14cd7058577aba962cd7190e27c9e3c"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:732e026240cdd1c1b2e3ac515c7a23820430ed94292ce33806a95869c46bd139"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890da8cd1941fa3dab28c5bac3b9da8502e7e366f895b3b8e500896f12f94d11"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0d8326269dbf944b9201911b0d9f3dc524d64779a07518199a58384c3d37a44"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b76d63495b0508ab9fc23f8152bac63205d2a704cd009a2b0722f4c8e0cba8e0"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win32.whl", hash = "sha256:69683e02e8a9de37f17985905a5eca18ad651bf592314b4d3d799029797d0eb3"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win_amd64.whl", hash = "sha256:aee110e4ef3c528f3abbc3c2018c121e708938adeeff9006428dd7c8555e9b3f"}, + {file = "SQLAlchemy-2.0.35-py3-none-any.whl", hash = "sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1"}, + {file = "sqlalchemy-2.0.35.tar.gz", hash = "sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} typing-extensions = ">=4.6.0" [package.extras] @@ -1704,4 +1741,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "af76cfdf0d073897a294a35be05fd1505f51ff8f0ffd537bd1b2ae56f56990d5" +content-hash = "4137cf8d00a4d14d8c728d07f899a30edda86c4c33c7b6252b6e94085bb04b6f" diff --git a/pyproject.toml b/pyproject.toml index f27ded0..8a1494b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,19 +21,20 @@ openpyxl = "^3.1.2" passlib = "^1.7.4" pydantic = "^2.6.4" pydantic-settings = "^2.2.1" -pymysql = "^1.1.0" python = "^3.11" python-dotenv = "^1.0.1" python-jose = {extras = ["cryptography"], version = "^3.3.0"} python-multipart = "^0.0.9" pyyaml = "^6.0.1" requests = "*" -sqlalchemy = "^2.0.29" +sqlalchemy = {extras = ["asyncio"], version = "^2.0.35"} sqlalchemy-utils = "^0.41.2" starlette = "^0.37.2" uvicorn = "^0.29.0" uvloop = "^0.19.0" cryptography = "^42.0.8" +aiomysql = "^0.2.0" +pytest-asyncio = "^0.24.0" [tool.poetry.group.dev.dependencies] From 56cc256577dc1fe5e0e2d824bbc38c5dccdb1ae3 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 7 Oct 2024 20:31:32 +0200 Subject: [PATCH 19/65] Refactor DB to async MySQL with retries --- genotype_api/api/middleware.py | 64 ---------------- genotype_api/config.py | 5 +- genotype_api/database/database.py | 120 +++++++++++++++--------------- genotype_api/database/store.py | 33 +++++--- 4 files changed, 84 insertions(+), 138 deletions(-) delete mode 100644 genotype_api/api/middleware.py diff --git a/genotype_api/api/middleware.py b/genotype_api/api/middleware.py deleted file mode 100644 index de407a0..0000000 --- a/genotype_api/api/middleware.py +++ /dev/null @@ -1,64 +0,0 @@ -import logging - -from fastapi import Request, status -from fastapi.responses import JSONResponse -from sqlalchemy.exc import OperationalError, PendingRollbackError -from starlette.middleware.base import BaseHTTPMiddleware - -from genotype_api.database.database import close_session, get_session -from genotype_api.exceptions import GenotypeDBError - -logging.basicConfig(level=logging.INFO) -LOG = logging.getLogger(__name__) - - -class DBSessionMiddleware(BaseHTTPMiddleware): - def __init__(self, app): - super().__init__(app) - - async def dispatch(self, request: Request, call_next): - session = get_session() - if session is None: - LOG.info("No database session found.") - return JSONResponse( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - content={"message": "Internal server error: No database session."}, - ) - - # Ensure session is clean before processing the request - if not session.is_active: - LOG.info("Session not active, rolling back any uncommitted transactions.") - - try: - response = await call_next(request) - - if session.dirty: - session.flush() - - return response - - except PendingRollbackError: - LOG.info("Pending rollback error, rolling back session", exc_info=True) - if session.is_active: - session.rollback() - return JSONResponse( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - content={"message": "Internal server error: Pending rollback."}, - ) - - except OperationalError: - LOG.info("Operational error: database connection lost", exc_info=True) - return JSONResponse( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - content={"message": "Internal server error: Database connection lost."}, - ) - - except Exception as e: - LOG.info(f"Unexpected error occurred: {e}", exc_info=True) - return JSONResponse( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - content={"message": "Internal server error: Unexpected error occurred."}, - ) - - finally: - close_session() diff --git a/genotype_api/config.py b/genotype_api/config.py index 2602846..05f8ccb 100644 --- a/genotype_api/config.py +++ b/genotype_api/config.py @@ -10,10 +10,13 @@ class DBSettings(BaseSettings): """Settings for serving the genotype-api app""" - db_uri: str = "sqlite:///database.db" + db_uri: str = "mysql+aiomysql://username:password@localhost/dbname" db_name: str = "database.db" host: str = "localhost" port: int = 8000 + echo_sql: bool = False + max_retries: int = 5 + retry_delay: int = 120 # 2 minutes class Config: env_file = str(ENV_FILE) diff --git a/genotype_api/database/database.py b/genotype_api/database/database.py index 61c851e..6b17622 100644 --- a/genotype_api/database/database.py +++ b/genotype_api/database/database.py @@ -1,66 +1,62 @@ -"""Hold the database information""" +"""Hold the database information and session manager.""" -from sqlalchemy import create_engine, inspect -from sqlalchemy.engine.base import Engine -from sqlalchemy.engine.reflection import Inspector -from sqlalchemy.orm import Session, scoped_session, sessionmaker +import asyncio +import logging +from contextlib import asynccontextmanager +from typing import AsyncGenerator -from genotype_api.database.models import Base -from genotype_api.exceptions import GenotypeDBError - -SESSION: scoped_session | None = None -ENGINE: Engine | None = None - - -def initialise_database(db_uri: str) -> None: - """Initialize the SQLAlchemy engine and session for genotype api.""" - global SESSION, ENGINE - - ENGINE = create_engine(db_uri, pool_pre_ping=True, pool_recycle=3600) - session_factory = sessionmaker(ENGINE) - SESSION = scoped_session(session_factory) - - -def get_session() -> scoped_session: - """Get a SQLAlchemy session with a connection to genotype api.""" - if not SESSION: - raise GenotypeDBError - return SESSION - - -def get_scoped_session_registry() -> scoped_session | None: - """Get the scoped session registry for genotype api.""" - return SESSION - - -def get_engine() -> Engine: - """Get the SQLAlchemy engine with a connection to genotype api.""" - if not ENGINE: - raise GenotypeDBError - return ENGINE - - -def create_all_tables() -> None: - """Create all tables in genotype api.""" - session: Session = get_session() - Base.metadata.create_all(bind=session.get_bind()) - close_session() - - -def drop_all_tables() -> None: - """Drop all tables in genotype api.""" - session: Session = get_session() - Base.metadata.drop_all(bind=session.get_bind()) - close_session() - - -def get_tables() -> list[str]: - """Get a list of all tables in genotype api.""" - engine: Engine = get_engine() - inspector: Inspector = inspect(engine) - return inspector.get_table_names() +from sqlalchemy.exc import OperationalError +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine +from genotype_api.config import settings +from genotype_api.database.models import Base -def close_session(): - """Close the global database session of the genotype api.""" - SESSION.remove() +LOG = logging.getLogger(__name__) + +engine = create_async_engine( + settings.db_uri, + echo=settings.echo_sql, + future=True, + pool_size=10, + max_overflow=20, +) + +sessionmanager = async_sessionmaker( + engine, + class_=AsyncSession, + expire_on_commit=False, +) + + +@asynccontextmanager +async def get_session() -> AsyncGenerator[AsyncSession, None]: + """Provides an asynchronous session context manager with retry logic.""" + retries = 0 + while retries < settings.max_retries: + async with sessionmanager() as session: + try: + yield session + break + except OperationalError as e: + retries += 1 + LOG.error(f"OperationalError: {e}, retrying {retries}/{settings.max_retries}...") + if retries >= settings.max_retries: + LOG.error("Max retries exceeded. Could not connect to the database.") + raise + await session.close() + await asyncio.sleep(settings.retry_delay) + finally: + await session.close() + + +async def create_all_tables(): + """Create all tables in the database.""" + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + +async def drop_all_tables(): + """Drop all tables in the database.""" + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + await conn.run_sync(Base.metadata.drop_all) diff --git a/genotype_api/database/store.py b/genotype_api/database/store.py index 4dba733..379e5f6 100644 --- a/genotype_api/database/store.py +++ b/genotype_api/database/store.py @@ -1,13 +1,12 @@ """Module for the store handler.""" -from sqlalchemy.orm import Session +from sqlalchemy.ext.asyncio import AsyncSession -from genotype_api.config import DBSettings from genotype_api.database.crud.create import CreateHandler from genotype_api.database.crud.delete import DeleteHandler from genotype_api.database.crud.read import ReadHandler from genotype_api.database.crud.update import UpdateHandler -from genotype_api.database.database import get_session, initialise_database +from genotype_api.database.database import get_session class Store( @@ -16,13 +15,25 @@ class Store( ReadHandler, UpdateHandler, ): - def __init__(self): - self.session: Session = get_session() - DeleteHandler(self.session) - ReadHandler(self.session) - UpdateHandler(self.session) + def __init__(self, session: AsyncSession): + """Initialize the Store with an active database session.""" + self.session = session + CreateHandler.__init__(self, session) + DeleteHandler.__init__(self, session) + ReadHandler.__init__(self, session) + UpdateHandler.__init__(self, session) + @classmethod + async def create(cls) -> "Store": + """Asynchronously create and return a Store instance with a session.""" + async with get_session() as session: # Correctly use async context manager + return cls(session) # Return a Store instance with the session -def get_store() -> Store: - """Return a store.""" - return Store() + +async def get_store() -> Store: + """Return a Store instance.""" + store = await Store.create() + try: + yield store # Yield the store for the duration of the request + finally: + await store.session.close() From bbd7b0ac162417ae3de3a7621ac408a94ab66d1e Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 7 Oct 2024 21:37:30 +0200 Subject: [PATCH 20/65] remove deprecated: openapi_prefix --- genotype_api/api/app.py | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/genotype_api/api/app.py b/genotype_api/api/app.py index 57886d1..96263bc 100644 --- a/genotype_api/api/app.py +++ b/genotype_api/api/app.py @@ -2,30 +2,31 @@ Main functions for the genotype api """ + +import logging from contextlib import asynccontextmanager -from fastapi import FastAPI, status, Request -from fastapi.responses import JSONResponse -from fastapi.middleware.cors import CORSMiddleware -from genotype_api.api.middleware import DBSessionMiddleware -from genotype_api.config import security_settings, settings -from genotype_api.database.database import create_all_tables, initialise_database -from genotype_api.api.endpoints import samples, snps, users, plates, analyses +from fastapi import FastAPI, Request, status +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import JSONResponse from sqlalchemy.exc import NoResultFound +from genotype_api.api.endpoints import analyses, plates, samples, snps, users +from genotype_api.config import security_settings + +LOG = logging.getLogger(__name__) + @asynccontextmanager async def lifespan(app: FastAPI): - # Startup logic - initialise_database(settings.db_uri) - create_all_tables() - yield - # Shutdown logic - -app = FastAPI( - lifespan=lifespan, - root_path=security_settings.api_root_path -) + # Startup actions, like connecting to the database + LOG.debug("Starting up...") + yield # This is important, it must yield control + # Shutdown actions, like closing the database connection + LOG.debug("Shutting down...") + + +app = FastAPI(lifespan=lifespan, root_path=security_settings.api_root_path) app.add_middleware( CORSMiddleware, allow_origins=["*"], @@ -33,7 +34,6 @@ async def lifespan(app: FastAPI): allow_methods=["*"], allow_headers=["*"], ) -app.add_middleware(DBSessionMiddleware) @app.exception_handler(NoResultFound) From 485daee57c2f397cb83ff627c3bd69905935a194 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 7 Oct 2024 21:43:23 +0200 Subject: [PATCH 21/65] Add async support to endpoints --- genotype_api/api/endpoints/analyses.py | 26 ++++++------ genotype_api/api/endpoints/plates.py | 28 ++++++------- genotype_api/api/endpoints/samples.py | 58 ++++++++++++++++---------- genotype_api/api/endpoints/snps.py | 15 +++---- genotype_api/api/endpoints/users.py | 31 +++++++------- 5 files changed, 83 insertions(+), 75 deletions(-) diff --git a/genotype_api/api/endpoints/analyses.py b/genotype_api/api/endpoints/analyses.py index 26b9ba7..7b550b5 100644 --- a/genotype_api/api/endpoints/analyses.py +++ b/genotype_api/api/endpoints/analyses.py @@ -2,19 +2,17 @@ from http import HTTPStatus -from fastapi import APIRouter, Depends, File, Query, UploadFile, status, HTTPException +from fastapi import (APIRouter, Depends, File, HTTPException, Query, + UploadFile, status) from fastapi.responses import JSONResponse from genotype_api.database.store import Store, get_store from genotype_api.dto.analysis import AnalysisResponse from genotype_api.dto.user import CurrentUser - from genotype_api.exceptions import AnalysisNotFoundError from genotype_api.security import get_active_user -from genotype_api.services.endpoint_services.analysis_service import ( - AnalysisService, -) - +from genotype_api.services.endpoint_services.analysis_service import \ + AnalysisService router = APIRouter() @@ -24,14 +22,14 @@ def get_analysis_service(store: Store = Depends(get_store)) -> AnalysisService: @router.get("/{analysis_id}", response_model=AnalysisResponse) -def read_analysis( +async def read_analysis( analysis_id: int, analysis_service: AnalysisService = Depends(get_analysis_service), current_user: CurrentUser = Depends(get_active_user), ): """Return analysis.""" try: - return analysis_service.get_analysis(analysis_id) + return await analysis_service.get_analysis(analysis_id) except AnalysisNotFoundError: raise HTTPException( detail=f"Could not find analysis with id: {analysis_id}", @@ -40,7 +38,7 @@ def read_analysis( @router.get("/", response_model=list[AnalysisResponse], response_model_exclude={"genotypes"}) -def read_analyses( +async def read_analyses( skip: int = 0, limit: int = Query(default=100, lte=100), analysis_service: AnalysisService = Depends(get_analysis_service), @@ -48,7 +46,7 @@ def read_analyses( ): """Return all analyses.""" try: - return analysis_service.get_analyses(skip=skip, limit=limit) + return await analysis_service.get_analyses(skip=skip, limit=limit) except AnalysisNotFoundError: raise HTTPException( detail="Could not fetch analyses from backend.", @@ -57,14 +55,14 @@ def read_analyses( @router.delete("/{analysis_id}") -def delete_analysis( +async def delete_analysis( analysis_id: int, analysis_service: AnalysisService = Depends(get_analysis_service), current_user: CurrentUser = Depends(get_active_user), ): """Delete analysis based on analysis id.""" try: - analysis_service.delete_analysis(analysis_id) + await analysis_service.delete_analysis(analysis_id) except AnalysisNotFoundError: raise HTTPException( detail=f"Could not find analysis with id: {analysis_id}", @@ -76,12 +74,12 @@ def delete_analysis( @router.post( "/sequence", response_model=list[AnalysisResponse], response_model_exclude={"genotypes"} ) -def upload_sequence_analysis( +async def upload_sequence_analysis( file: UploadFile = File(...), analysis_service: AnalysisService = Depends(get_analysis_service), current_user: CurrentUser = Depends(get_active_user), ): """Reading VCF file, creating and uploading sequence analyses and sample objects to the database.""" - analyses: list[AnalysisResponse] = analysis_service.get_upload_sequence_analyses(file) + analyses: list[AnalysisResponse] = await analysis_service.get_upload_sequence_analyses(file) return analyses diff --git a/genotype_api/api/endpoints/plates.py b/genotype_api/api/endpoints/plates.py index 2faa007..a5d98ac 100644 --- a/genotype_api/api/endpoints/plates.py +++ b/genotype_api/api/endpoints/plates.py @@ -2,19 +2,19 @@ from http import HTTPStatus from typing import Literal -from fastapi import APIRouter, Depends, File, Query, UploadFile, status, HTTPException -from fastapi.responses import JSONResponse -from genotype_api.database.filter_models.plate_models import PlateOrderParams +from fastapi import (APIRouter, Depends, File, HTTPException, Query, + UploadFile, status) +from fastapi.responses import JSONResponse +from genotype_api.database.filter_models.plate_models import PlateOrderParams from genotype_api.database.store import Store, get_store from genotype_api.dto.plate import PlateResponse from genotype_api.dto.user import CurrentUser -from genotype_api.exceptions import PlateNotFoundError, PlateExistsError +from genotype_api.exceptions import PlateExistsError, PlateNotFoundError from genotype_api.security import get_active_user from genotype_api.services.endpoint_services.plate_service import PlateService - router = APIRouter() @@ -25,14 +25,14 @@ def get_plate_service(store: Store = Depends(get_store)) -> PlateService: @router.post( "/plate", ) -def upload_plate( +async def upload_plate( file: UploadFile = File(...), plate_service: PlateService = Depends(get_plate_service), current_user: CurrentUser = Depends(get_active_user), ): try: - plate_service.upload_plate(file) + await plate_service.upload_plate(file) except PlateExistsError: raise HTTPException( detail="Plate already exists in the database.", status_code=HTTPStatus.BAD_REQUEST @@ -45,7 +45,7 @@ def upload_plate( response_model=PlateResponse, response_model_exclude={"analyses", "user", "plate_status_counts"}, ) -def sign_off_plate( +async def sign_off_plate( plate_id: int, method_document: str = Query(...), method_version: str = Query(...), @@ -57,7 +57,7 @@ def sign_off_plate( Add Depends with current user """ - return plate_service.update_plate_sign_off( + return await plate_service.update_plate_sign_off( plate_id=plate_id, user_email=current_user.email, method_version=method_version, @@ -87,14 +87,14 @@ def sign_off_plate( } }, ) -def read_plate( +async def read_plate( plate_id: int, plate_service: PlateService = Depends(get_plate_service), current_user: CurrentUser = Depends(get_active_user), ): """Display information about a plate.""" try: - return plate_service.get_plate(plate_id=plate_id) + return await plate_service.get_plate(plate_id=plate_id) except PlateNotFoundError: raise HTTPException( detail=f"Could not find plate with id: {plate_id}", status_code=HTTPStatus.BAD_REQUEST @@ -120,7 +120,7 @@ async def read_plates( order_by=order_by, skip=skip, limit=limit, sort_order=sort_order ) try: - return plate_service.get_plates(order_params=order_params) + return await plate_service.get_plates(order_params=order_params) except PlateNotFoundError: raise HTTPException( detail="Could not fetch plates from backend.", status_code=HTTPStatus.BAD_REQUEST @@ -128,14 +128,14 @@ async def read_plates( @router.delete("/{plate_id}") -def delete_plate( +async def delete_plate( plate_id: int, plate_service: PlateService = Depends(get_plate_service), current_user: CurrentUser = Depends(get_active_user), ): """Delete plate.""" try: - analysis_ids = plate_service.delete_plate(plate_id) + analysis_ids = await plate_service.delete_plate(plate_id) return JSONResponse( f"Deleted plate: {plate_id} and analyses: {analysis_ids}", status_code=status.HTTP_200_OK, diff --git a/genotype_api/api/endpoints/samples.py b/genotype_api/api/endpoints/samples.py index 88858f1..aad03cd 100644 --- a/genotype_api/api/endpoints/samples.py +++ b/genotype_api/api/endpoints/samples.py @@ -8,11 +8,15 @@ from genotype_api.constants import Sexes, Types from genotype_api.database.filter_models.sample_models import SampleFilterParams - from genotype_api.database.store import Store, get_store -from genotype_api.dto.sample import SampleResponse, SampleCreate +from genotype_api.dto.sample import SampleCreate, SampleResponse from genotype_api.dto.user import CurrentUser -from genotype_api.exceptions import SampleNotFoundError, SampleExistsError +from genotype_api.exceptions import ( + GenotypeDBError, + InsufficientAnalysesError, + SampleExistsError, + SampleNotFoundError, +) from genotype_api.models import MatchResult, SampleDetail from genotype_api.security import get_active_user from genotype_api.services.endpoint_services.sample_service import SampleService @@ -28,13 +32,13 @@ def get_sample_service(store: Store = Depends(get_store)) -> SampleService: "/{sample_id}", response_model=SampleResponse, ) -def read_sample( +async def read_sample( sample_id: str, sample_service: SampleService = Depends(get_sample_service), current_user: CurrentUser = Depends(get_active_user), ): try: - return sample_service.get_sample(sample_id) + return await sample_service.get_sample(sample_id) except SampleNotFoundError: return JSONResponse( content=f"Sample with id: {sample_id} not found.", status_code=HTTPStatus.BAD_REQUEST @@ -44,14 +48,14 @@ def read_sample( @router.post( "/", ) -def create_sample( +async def create_sample( sample: SampleCreate, sample_service: SampleService = Depends(get_sample_service), current_user: CurrentUser = Depends(get_active_user), ): try: - sample_service.create_sample(sample_create=sample) - new_sample: SampleResponse = sample_service.get_sample(sample_id=sample.id) + await sample_service.create_sample(sample_create=sample) + new_sample: SampleResponse = await sample_service.get_sample(sample_id=sample.id) if not new_sample: return JSONResponse( content="Failed to create sample.", status_code=HTTPStatus.BAD_REQUEST @@ -78,7 +82,7 @@ def create_sample( }, }, ) -def read_samples( +async def read_samples( skip: int = 0, limit: int = Query(default=10, lte=10), sample_id: str | None = None, @@ -100,11 +104,11 @@ def read_samples( limit=limit, ) - return sample_service.get_samples(filter_params) + return await sample_service.get_samples(filter_params) @router.put("/{sample_id}/sex") -def update_sex( +async def update_sex( sample_id: str, sex: Sexes = Query(...), genotype_sex: Sexes | None = None, @@ -114,7 +118,7 @@ def update_sex( ): """Updating sex field on sample and sample analyses.""" try: - sample_service.set_sex( + await sample_service.set_sex( sample_id=sample_id, sex=sex, genotype_sex=genotype_sex, sequence_sex=sequence_sex ) except SampleNotFoundError: @@ -128,7 +132,7 @@ def update_sex( "/{sample_id}/comment", response_model=SampleResponse, ) -def update_comment( +async def update_comment( sample_id: str, comment: str = Query(...), sample_service: SampleService = Depends(get_sample_service), @@ -136,7 +140,7 @@ def update_comment( ): """Updating comment field on sample.""" try: - return sample_service.set_sample_comment(sample_id=sample_id, comment=comment) + return await sample_service.set_sample_comment(sample_id=sample_id, comment=comment) except SampleNotFoundError: return JSONResponse( content=f"Could not find sample with id: {sample_id}", @@ -148,7 +152,7 @@ def update_comment( "/{sample_id}/status", response_model=SampleResponse, ) -def set_sample_status( +async def set_sample_status( sample_id: str, sample_service: SampleService = Depends(get_sample_service), status: Literal["pass", "fail", "cancel"] | None = None, @@ -156,7 +160,7 @@ def set_sample_status( ): """Check sample analyses and update sample status accordingly.""" try: - return sample_service.set_sample_status(sample_id=sample_id, status=status) + return await sample_service.set_sample_status(sample_id=sample_id, status=status) except SampleNotFoundError: return JSONResponse( content=f"Could not find sample with id: {sample_id}", @@ -165,7 +169,7 @@ def set_sample_status( @router.get("/{sample_id}/match", response_model=list[MatchResult]) -def match( +async def match( sample_id: str, analysis_type: Types, comparison_set: Types, @@ -175,7 +179,7 @@ def match( current_user: CurrentUser = Depends(get_active_user), ) -> list[MatchResult]: """Match sample genotype against all other genotypes.""" - return sample_service.get_match_results( + return await sample_service.get_match_results( sample_id=sample_id, analysis_type=analysis_type, comparison_set=comparison_set, @@ -190,27 +194,37 @@ def match( deprecated=True, response_model_include={"sex": True, "nocalls": True, "snps": True}, ) -def get_status_detail( +async def get_status_detail( sample_id: str, sample_service: SampleService = Depends(get_sample_service), current_user: CurrentUser = Depends(get_active_user), ): try: - return sample_service.get_status_detail(sample_id) + return await sample_service.get_status_detail(sample_id) except SampleNotFoundError: return JSONResponse( content=f"Sample with id: {sample_id} not found.", status_code=HTTPStatus.BAD_REQUEST ) + except InsufficientAnalysesError: + return JSONResponse( + content="Insufficient analyses found for the given date range and comparison set.", + status_code=HTTPStatus.BAD_REQUEST, + ) + except GenotypeDBError: + return JSONResponse( + content="Genotypes are missing for the sample analysis.", + status_code=HTTPStatus.BAD_REQUEST, + ) @router.delete("/{sample_id}") -def delete_sample( +async def delete_sample( sample_id: str, sample_service: SampleService = Depends(get_sample_service), current_user: CurrentUser = Depends(get_active_user), ): """Delete sample and its Analyses.""" - sample_service.delete_sample(sample_id) + await sample_service.delete_sample(sample_id) return JSONResponse( content=f"Deleted sample with id: {sample_id}", status_code=status.HTTP_200_OK ) diff --git a/genotype_api/api/endpoints/snps.py b/genotype_api/api/endpoints/snps.py index 2e140fb..ee5cbb5 100644 --- a/genotype_api/api/endpoints/snps.py +++ b/genotype_api/api/endpoints/snps.py @@ -1,18 +1,15 @@ """Routes for the snps""" from fastapi import APIRouter, Depends, Query, UploadFile - - from starlette.responses import JSONResponse + from genotype_api.database.store import Store, get_store from genotype_api.dto.snp import SNPResponse from genotype_api.dto.user import CurrentUser from genotype_api.exceptions import SNPExistsError from genotype_api.security import get_active_user - from genotype_api.services.endpoint_services.snp_service import SNPService - router = APIRouter() @@ -21,13 +18,13 @@ def get_snp_service(store: Store = Depends(get_store)) -> SNPService: @router.get("/", response_model=list[SNPResponse]) -def read_snps( +async def read_snps( skip: int = 0, limit: int = Query(default=100, lte=100), snp_service: SNPService = Depends(get_snp_service), current_user: CurrentUser = Depends(get_active_user), ): - return snp_service.get_snps(skip=skip, limit=limit) + return await snp_service.get_snps(skip=skip, limit=limit) @router.post("/", response_model=list[SNPResponse]) @@ -37,17 +34,17 @@ async def upload_snps( current_user: CurrentUser = Depends(get_active_user), ): try: - return snp_service.upload_snps(snps_file) + return await snp_service.upload_snps(snps_file) except SNPExistsError: return JSONResponse(status_code=400, content="SNPs already uploaded") @router.delete("/") -def delete_snps( +async def delete_snps( snp_service: SNPService = Depends(get_snp_service), current_user: CurrentUser = Depends(get_active_user), ): """Delete all SNPs""" - result = snp_service.delete_all_snps() + result = await snp_service.delete_all_snps() return {"message": f"all snps deleted ({result} snps)"} diff --git a/genotype_api/api/endpoints/users.py b/genotype_api/api/endpoints/users.py index 096be0c..9068c03 100644 --- a/genotype_api/api/endpoints/users.py +++ b/genotype_api/api/endpoints/users.py @@ -1,15 +1,14 @@ """Routes for users""" -from fastapi import APIRouter, Depends, Query, HTTPException +from fastapi import APIRouter, Depends, HTTPException, Query from pydantic import EmailStr - from starlette import status - from starlette.responses import JSONResponse -from genotype_api.database.store import get_store, Store -from genotype_api.dto.user import UserRequest, UserResponse, CurrentUser -from genotype_api.exceptions import UserNotFoundError, UserArchiveError, UserExistsError +from genotype_api.database.store import Store, get_store +from genotype_api.dto.user import CurrentUser, UserRequest, UserResponse +from genotype_api.exceptions import (UserArchiveError, UserExistsError, + UserNotFoundError) from genotype_api.security import get_active_user from genotype_api.services.endpoint_services.user_service import UserService @@ -21,25 +20,25 @@ def get_user_service(store: Store = Depends(get_store)) -> UserService: @router.get("/{user_id}", response_model=UserResponse) -def read_user( +async def read_user( user_id: int, user_service: UserService = Depends(get_user_service), current_user: CurrentUser = Depends(get_active_user), ) -> UserResponse: try: - return user_service.get_user(user_id) + return await user_service.get_user(user_id) except UserNotFoundError: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") @router.delete("/{user_id}") -def delete_user( +async def delete_user( user_id: int, user_service: UserService = Depends(get_user_service), current_user: CurrentUser = Depends(get_active_user), ) -> JSONResponse: try: - user_service.delete_user(user_id) + await user_service.delete_user(user_id) except UserNotFoundError: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") except UserArchiveError: @@ -51,36 +50,36 @@ def delete_user( @router.put("/{user_id}/email", response_model=UserResponse, response_model_exclude={"plates"}) -def change_user_email( +async def change_user_email( user_id: int, email: EmailStr, user_service: UserService = Depends(get_user_service), current_user: CurrentUser = Depends(get_active_user), ) -> UserResponse: try: - return user_service.update_user_email(user_id=user_id, email=email) + return await user_service.update_user_email(user_id=user_id, email=email) except UserNotFoundError: HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") @router.get("/", response_model=list[UserResponse], response_model_exclude={"plates"}) -def read_users( +async def read_users( skip: int = 0, limit: int = Query(default=100, lte=100), user_service: UserService = Depends(get_user_service), current_user: CurrentUser = Depends(get_active_user), ) -> list[UserResponse]: - return user_service.get_users(skip=skip, limit=limit) + return await user_service.get_users(skip=skip, limit=limit) @router.post("/", response_model=UserResponse, response_model_exclude={"plates"}) -def create_user( +async def create_user( user: UserRequest, user_service: UserService = Depends(get_user_service), current_user: CurrentUser = Depends(get_active_user), ): try: - return user_service.create_user(user) + return await user_service.create_user(user) except UserExistsError: HTTPException(status_code=409, detail="Email already registered.") From ea24e2f972b5261887cd8442cdd6c9d119128a6d Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 7 Oct 2024 21:45:30 +0200 Subject: [PATCH 22/65] Refactor BaseHandler for async queries --- genotype_api/database/base_handler.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/genotype_api/database/base_handler.py b/genotype_api/database/base_handler.py index 6fd06e5..3e3d295 100644 --- a/genotype_api/database/base_handler.py +++ b/genotype_api/database/base_handler.py @@ -1,6 +1,10 @@ from dataclasses import dataclass from typing import Type -from sqlalchemy.orm import Session, Query, DeclarativeBase + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select +from sqlalchemy.orm import DeclarativeBase, Query + from genotype_api.database.models import Analysis, Sample @@ -8,12 +12,12 @@ class BaseHandler: """All queries in one base class.""" - def __init__(self, session: Session): + def __init__(self, session: AsyncSession): self.session = session def _get_query(self, table: Type[DeclarativeBase]) -> Query: """Return a query for the given table.""" - return self.session.query(table) + return select(table) def _get_join_analysis_on_sample(self) -> Query: return self._get_query(table=Sample).join(Analysis) From 5e642f37203ca3113ebefa9d70fa184ec550d7e0 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 7 Oct 2024 21:49:55 +0200 Subject: [PATCH 23/65] Refactor crud handlers for async queries --- genotype_api/database/crud/create.py | 45 ++--- genotype_api/database/crud/delete.py | 28 +-- genotype_api/database/crud/read.py | 250 +++++++++++++++++++-------- genotype_api/database/crud/update.py | 71 ++++---- 4 files changed, 255 insertions(+), 139 deletions(-) diff --git a/genotype_api/database/crud/create.py b/genotype_api/database/crud/create.py index 9cbd56a..252ee70 100644 --- a/genotype_api/database/crud/create.py +++ b/genotype_api/database/crud/create.py @@ -1,8 +1,7 @@ import logging - from genotype_api.database.base_handler import BaseHandler -from genotype_api.database.models import Analysis, Plate, Sample, User, SNP, Genotype +from genotype_api.database.models import SNP, Analysis, Genotype, Plate, Sample, User from genotype_api.dto.user import UserRequest from genotype_api.exceptions import SampleExistsError @@ -11,49 +10,51 @@ class CreateHandler(BaseHandler): - def create_analysis(self, analysis: Analysis) -> Analysis: + async def create_analysis(self, analysis: Analysis) -> Analysis: self.session.add(analysis) - self.session.commit() - self.session.refresh(analysis) + await self.session.commit() + await self.session.refresh(analysis) return analysis - def create_plate(self, plate: Plate) -> Plate: + async def create_plate(self, plate: Plate) -> Plate: self.session.add(plate) - self.session.commit() - self.session.refresh(plate) + await self.session.commit() + await self.session.refresh(plate) LOG.info(f"Creating plate with id {plate.plate_id}.") return plate - def create_sample(self, sample: Sample) -> Sample: + async def create_sample(self, sample: Sample) -> Sample: """Creates a sample in the database.""" - sample_in_db = self.session.query(Sample).filter(Sample.id == sample.id).one_or_none() + sample_in_db = await self._get_query(Analysis).filter(Sample.id == sample.id).one_or_none() if sample_in_db: raise SampleExistsError self.session.add(sample) - self.session.commit() - self.session.refresh(sample) + await self.session.commit() + await self.session.refresh(sample) return sample - def create_analyses_samples(self, analyses: list[Analysis]) -> list[Sample]: + async def create_analyses_samples(self, analyses: list[Analysis]) -> list[Sample]: """creating samples in an analysis if not already in db.""" return [ - self.create_sample(sample=Sample(id=analysis.sample_id)) + await self.create_sample(sample=Sample(id=analysis.sample_id)) for analysis in analyses - if not self.session.query(Sample).filter(Sample.id == analysis.sample_id).one_or_none() + if not await self.session.query(Sample) + .filter(Sample.id == analysis.sample_id) + .one_or_none() ] - def create_user(self, user: User) -> User: + async def create_user(self, user: User) -> User: self.session.add(user) - self.session.commit() - self.session.refresh(user) + await self.session.commit() + await self.session.refresh(user) return user - def create_snps(self, snps: list[SNP]) -> list[SNP]: + async def create_snps(self, snps: list[SNP]) -> list[SNP]: self.session.add_all(snps) - self.session.commit() + await self.session.commit() return snps - def create_genotype(self, genotype: Genotype) -> Genotype: + async def create_genotype(self, genotype: Genotype) -> Genotype: self.session.add(genotype) - self.session.commit() + await self.session.commit() return genotype diff --git a/genotype_api/database/crud/delete.py b/genotype_api/database/crud/delete.py index 8cc642f..4b8594e 100644 --- a/genotype_api/database/crud/delete.py +++ b/genotype_api/database/crud/delete.py @@ -1,35 +1,35 @@ import logging -from sqlalchemy import delete - from genotype_api.database.base_handler import BaseHandler -from genotype_api.database.models import Analysis, Plate, Sample, User, SNP +from genotype_api.database.models import SNP, Analysis, Plate, Sample, User LOG = logging.getLogger(__name__) class DeleteHandler(BaseHandler): - def delete_analysis(self, analysis: Analysis) -> None: + async def delete_analysis(self, analysis: Analysis) -> None: self.session.delete(analysis) - self.session.commit() + await self.session.commit() - def delete_plate(self, plate: Plate) -> None: + async def delete_plate(self, plate: Plate) -> None: self.session.delete(plate) - self.session.commit() + await self.session.commit() - def delete_sample(self, sample: Sample) -> None: + async def delete_sample(self, sample: Sample) -> None: self.session.delete(sample) - self.session.commit() + await self.session.commit() - def delete_user(self, user: User) -> None: + async def delete_user(self, user: User) -> None: self.session.delete(user) - self.session.commit() + await self.session.commit() - def delete_snps(self) -> int: - snps: list[SNP] = self._get_query(SNP).all() + async def delete_snps(self) -> int: + query = self._get_query(SNP) + result = await self.session.execute(query) + snps: list[SNP] = result.scalars().all() count: int = len(snps) for snp in snps: self.session.delete(snp) - self.session.commit() + await self.session.commit() return count diff --git a/genotype_api/database/crud/read.py b/genotype_api/database/crud/read.py index d3ad566..906247f 100644 --- a/genotype_api/database/crud/read.py +++ b/genotype_api/database/crud/read.py @@ -1,114 +1,154 @@ import logging -from datetime import timedelta, date -from sqlalchemy import func, desc, asc -from sqlalchemy.orm import Query +from datetime import date, timedelta + +from sqlalchemy import asc, desc, func +from sqlalchemy.future import select +from sqlalchemy.orm import Query, selectinload + from genotype_api.constants import Types from genotype_api.database.base_handler import BaseHandler from genotype_api.database.filter_models.plate_models import PlateOrderParams from genotype_api.database.filter_models.sample_models import SampleFilterParams -from genotype_api.database.filters.analysis_filter import apply_analysis_filter, AnalysisFilter -from genotype_api.database.filters.genotype_filters import apply_genotype_filter, GenotypeFilter +from genotype_api.database.filters.analysis_filter import AnalysisFilter, apply_analysis_filter +from genotype_api.database.filters.genotype_filters import GenotypeFilter, apply_genotype_filter from genotype_api.database.filters.plate_filters import PlateFilter, apply_plate_filter -from genotype_api.database.filters.sample_filters import apply_sample_filter, SampleFilter +from genotype_api.database.filters.sample_filters import SampleFilter, apply_sample_filter from genotype_api.database.filters.snp_filters import SNPFilter, apply_snp_filter -from genotype_api.database.filters.user_filters import apply_user_filter, UserFilter -from genotype_api.database.models import ( - Analysis, - Plate, - Sample, - User, - SNP, - Genotype, -) +from genotype_api.database.filters.user_filters import UserFilter, apply_user_filter +from genotype_api.database.models import SNP, Analysis, Genotype, Plate, Sample, User LOG = logging.getLogger(__name__) class ReadHandler(BaseHandler): - def get_analyses_by_plate_id(self, plate_id: int) -> list[Analysis]: - analyses: Query = self._get_query(Analysis) + async def get_analyses_by_plate_id(self, plate_id: int) -> list[Analysis]: + analyses: Query = self._get_query(Analysis).options( + selectinload(Analysis.genotypes), selectinload(Analysis.sample) + ) filter_functions = [AnalysisFilter.BY_PLATE_ID] - return apply_analysis_filter( + filtered_query = apply_analysis_filter( analyses=analyses, filter_functions=filter_functions, plate_id=plate_id - ).all() + ) + result = await self.session.execute(filtered_query) + return result.scalars().all() - def get_analysis_by_id(self, analysis_id: int) -> Analysis: + async def get_analysis_by_id(self, analysis_id: int) -> Analysis: analyses: Query = self._get_query(Analysis) filter_functions = [AnalysisFilter.BY_ID] - return apply_analysis_filter( + filtered_query = apply_analysis_filter( analyses=analyses, filter_functions=filter_functions, analysis_id=analysis_id - ).first() + ) + result = await self.session.execute(filtered_query) + return result.scalars().first() - def get_analyses(self) -> list[Analysis]: - return self._get_query(Analysis).all() + async def get_analyses(self) -> list[Analysis]: + filtered_query = self._get_query(Analysis) + result = await self.session.execute(filtered_query) + return result.scalars().all() - def get_analyses_with_skip_and_limit(self, skip: int, limit: int) -> list[Analysis]: + async def get_analyses_with_skip_and_limit(self, skip: int, limit: int) -> list[Analysis]: analyses: Query = self._get_query(Analysis) filter_functions = [AnalysisFilter.SKIP_AND_LIMIT] - return apply_analysis_filter( + filtered_query = apply_analysis_filter( analyses=analyses, filter_functions=filter_functions, skip=skip, limit=limit - ).all() + ) + result = await self.session.execute(filtered_query) + return result.scalars().all() - def get_analyses_by_type_between_dates( + async def get_analyses_by_type_between_dates( self, analysis_type: Types, date_min: date, date_max: date ) -> list[Analysis]: analyses: Query = self._get_query(Analysis) + filter_functions = [AnalysisFilter.BY_TYPE, AnalysisFilter.BETWEEN_DATES] - return apply_analysis_filter( + filtered_query = apply_analysis_filter( analyses=analyses, filter_functions=filter_functions, date_min=date_min, date_max=date_max, type=analysis_type, - ).all() + ) + + filtered_query = filtered_query.options(selectinload(Analysis.genotypes)) + + # Execute the query asynchronously + result = await self.session.execute(filtered_query) + return result.scalars().all() - def get_analysis_by_type_and_sample_id(self, analysis_type: str, sample_id: str) -> Analysis: + async def get_analysis_by_type_and_sample_id( + self, sample_id: str, analysis_type: Types + ) -> Analysis: analyses: Query = self._get_query(Analysis) filter_functions = [AnalysisFilter.BY_TYPE, AnalysisFilter.BY_SAMPLE_ID] - return apply_analysis_filter( + filtered_query = apply_analysis_filter( analyses=analyses, filter_functions=filter_functions, sample_id=sample_id, type=analysis_type, - ).first() + ) + + # Add selectinload to eagerly load genotypes + filtered_query = filtered_query.options(selectinload(Analysis.genotypes)) + + result = await self.session.execute(filtered_query) + return result.scalars().first() - def get_plate_by_id(self, plate_id: int) -> Plate: - plates: Query = self._get_query(Plate) + async def get_plate_by_id(self, plate_id: int) -> Plate: + plates: Query = self._get_query(Plate).options( + selectinload(Plate.analyses).selectinload( + Analysis.sample + ) # Eager loading of analyses and samples + ) filter_functions = [PlateFilter.BY_ID] - return apply_plate_filter( + filtered_query = apply_plate_filter( plates=plates, filter_functions=filter_functions, entry_id=plate_id - ).first() + ) + result = await self.session.execute(filtered_query) + return result.scalars().first() - def get_plate_by_plate_id(self, plate_id: str) -> Plate: - plates: Query = self._get_query(Plate) + async def get_plate_by_plate_id(self, plate_id: str) -> Plate: + plates: Query = self._get_query(Plate).options(selectinload(Plate.analyses)) filter_functions = [PlateFilter.BY_PLATE_ID] - return apply_plate_filter( + filtered_query = apply_plate_filter( plates=plates, filter_functions=filter_functions, plate_id=plate_id - ).first() + ) + result = await self.session.execute(filtered_query) + return result.scalars().first() - def get_ordered_plates(self, order_params: PlateOrderParams) -> list[Plate]: + async def get_ordered_plates(self, order_params: PlateOrderParams) -> list[Plate]: sort_func = desc if order_params.sort_order == "descend" else asc - plates: Query = self._get_query(Plate) + plates: Query = self._get_query(Plate).options( + selectinload(Plate.analyses).selectinload(Analysis.sample) + ) filter_functions = [PlateFilter.ORDER, PlateFilter.SKIP_AND_LIMIT] - return apply_plate_filter( + filtered_query = apply_plate_filter( plates=plates, filter_functions=filter_functions, order_by=order_params.order_by, skip=order_params.skip, limit=order_params.limit, sort_func=sort_func, - ).all() + ) + result = await self.session.execute(filtered_query) + return result.scalars().all() - def get_genotype_by_id(self, entry_id: int) -> Genotype: - genotypes: Query = self._get_query(Genotype) + async def get_genotype_by_id(self, entry_id: int) -> Genotype: + genotypes: Query = self._get_query(Genotype).options(selectinload(Genotype.analysis)) filter_functions = [GenotypeFilter.BY_ID] - return apply_genotype_filter( + filtered_query = apply_genotype_filter( genotypes=genotypes, filter_functions=filter_functions, entry_id=entry_id - ).first() + ) + result = await self.session.execute(filtered_query) + return result.scalars().first() - def get_filtered_samples(self, filter_params: SampleFilterParams) -> list[Sample]: - query = self.session.query(Sample).distinct().join(Analysis) + async def get_filtered_samples(self, filter_params: SampleFilterParams) -> list[Sample]: + query = ( + select(Sample) + .distinct() + .options(selectinload(Sample.analyses).selectinload(Analysis.genotypes)) + .join(Analysis) + ) if filter_params.sample_id: query = self._get_samples(query, filter_params.sample_id) if filter_params.plate_id: @@ -119,12 +159,13 @@ def get_filtered_samples(self, filter_params: SampleFilterParams) -> list[Sample query = self._get_commented_samples(query) if filter_params.is_missing: query = self._get_status_missing_samples(query) - return ( + filtered_query = ( query.order_by(Sample.created_at.desc()) .offset(filter_params.skip) .limit(filter_params.limit) - .all() ) + result = await self.session.execute(filtered_query) + return result.scalars().all() @staticmethod def _get_incomplete_samples(query: Query) -> Query: @@ -155,50 +196,111 @@ def _get_samples(query: Query, sample_id: str) -> Query: """Returns a query for samples containing the given sample_id.""" return query.filter(Sample.id.contains(sample_id)) - def get_sample_by_id(self, sample_id: str) -> Sample: + async def get_sample_by_id(self, sample_id: str) -> Sample: + # Start by getting a base query for Sample samples: Query = self._get_query(Sample) + + # Define the filter functions for filtering by Sample ID filter_functions = [SampleFilter.BY_ID] - return apply_sample_filter( + + # Apply the filters using apply_sample_filter + filtered_query = apply_sample_filter( samples=samples, filter_functions=filter_functions, sample_id=sample_id - ).first() + ) + + # Ensure we load related analyses and genotypes using selectinload to avoid lazy loading + filtered_query = filtered_query.options( + selectinload(Sample.analyses).selectinload(Analysis.genotypes) + ) - def get_user_by_id(self, user_id: int) -> User: + # Execute the query asynchronously + result = await self.session.execute(filtered_query) + return result.scalars().first() + + async def get_user_by_id(self, user_id: int) -> User: users: Query = self._get_query(User) filter_functions = [UserFilter.BY_ID] - return apply_user_filter( + filtered_query = apply_user_filter( users=users, filter_functions=filter_functions, user_id=user_id - ).first() + ) + result = await self.session.execute(filtered_query) + return result.scalars().first() - def get_user_by_email(self, email: str) -> User | None: + async def get_user_by_email(self, email: str) -> User | None: users: Query = self._get_query(User) filter_functions = [UserFilter.BY_EMAIL] - return apply_user_filter( + filtered_query = apply_user_filter( users=users, filter_functions=filter_functions, email=email - ).first() + ) + result = await self.session.execute(filtered_query) + return result.scalars().first() - def get_users_with_skip_and_limit(self, skip: int, limit: int) -> list[User]: - users: Query = self._get_query(User) + async def get_users_with_skip_and_limit(self, skip: int, limit: int) -> list[User]: + users: Query = self._get_query(User).options(selectinload(User.plates)) filter_functions = [UserFilter.SKIP_AND_LIMIT] - return apply_user_filter( + filtered_query = apply_user_filter( users=users, filter_functions=filter_functions, skip=skip, limit=limit - ).all() + ) + result = await self.session.execute(filtered_query) + return result.scalars().all() - def check_analyses_objects(self, analyses: list[Analysis], analysis_type: Types) -> None: + async def check_analyses_objects(self, analyses: list[Analysis], analysis_type: Types) -> None: """Raising 400 if any analysis in the list already exist in the database""" for analysis_obj in analyses: - existing_analysis = self.get_analysis_by_type_and_sample_id( + existing_analysis = await self.get_analysis_by_type_and_sample_id( sample_id=analysis_obj.sample_id, analysis_type=analysis_type, ) if existing_analysis: - self.session.delete(existing_analysis) + await self.session.delete(existing_analysis) + await self.session.commit() + + async def get_snps(self) -> list[SNP]: + filtered_query = self._get_query(SNP) + result = await self.session.execute(filtered_query) + return result.scalars().all() + + async def get_snps_by_limit_and_skip(self, skip: int, limit: int) -> list[SNP]: + snps: Query = self._get_query(SNP) + filter_functions = [SNPFilter.SKIP_AND_LIMIT] + filtered_query = apply_snp_filter( + snps=snps, filter_functions=filter_functions, skip=skip, limit=limit + ) + result = await self.session.execute(filtered_query) + return result.scalars().all() + + async def get_snps_by_limit_and_skip(self, skip: int, limit: int) -> list[SNP]: + snps: Query = self._get_query(SNP) + filter_functions = [SNPFilter.SKIP_AND_LIMIT] + filtered_query = apply_snp_filter( + snps=snps, filter_functions=filter_functions, skip=skip, limit=limit + ) + result = await self.session.execute(filtered_query) + return result.scalars().all() - def get_snps(self) -> list[SNP]: - return self._get_query(SNP).all() + async def get_snps_by_limit_and_skip(self, skip: int, limit: int) -> list[SNP]: + snps: Query = self._get_query(SNP) + filter_functions = [SNPFilter.SKIP_AND_LIMIT] + filtered_query = apply_snp_filter( + snps=snps, filter_functions=filter_functions, skip=skip, limit=limit + ) + result = await self.session.execute(filtered_query) + return result.scalars().all() - def get_snps_by_limit_and_skip(self, skip: int, limit: int) -> list[SNP]: + async def get_snps_by_limit_and_skip(self, skip: int, limit: int) -> list[SNP]: snps: Query = self._get_query(SNP) filter_functions = [SNPFilter.SKIP_AND_LIMIT] - return apply_snp_filter( + filtered_query = apply_snp_filter( snps=snps, filter_functions=filter_functions, skip=skip, limit=limit - ).all() + ) + result = await self.session.execute(filtered_query) + return result.scalars().all() + + async def get_snps_by_limit_and_skip(self, skip: int, limit: int) -> list[SNP]: + snps: Query = self._get_query(SNP) + filter_functions = [SNPFilter.SKIP_AND_LIMIT] + filtered_query = apply_snp_filter( + snps=snps, filter_functions=filter_functions, skip=skip, limit=limit + ) + result = await self.session.execute(filtered_query) + return result.scalars().all() diff --git a/genotype_api/database/crud/update.py b/genotype_api/database/crud/update.py index df36d30..b64be46 100644 --- a/genotype_api/database/crud/update.py +++ b/genotype_api/database/crud/update.py @@ -1,18 +1,19 @@ from pydantic import EmailStr - +from sqlalchemy.future import select +from sqlalchemy.orm import Query, selectinload from genotype_api.constants import Types from genotype_api.database.base_handler import BaseHandler from genotype_api.database.filter_models.plate_models import PlateSignOff from genotype_api.database.filter_models.sample_models import SampleSexesUpdate -from genotype_api.database.models import Sample, Plate, User +from genotype_api.database.models import Analysis, Plate, Sample, User from genotype_api.exceptions import SampleNotFoundError from genotype_api.services.match_genotype_service.match_genotype import MatchGenotypeService class UpdateHandler(BaseHandler): - def refresh_sample_status( + async def refresh_sample_status( self, sample: Sample, ) -> Sample: @@ -20,49 +21,60 @@ def refresh_sample_status( sample.status = None else: results = MatchGenotypeService.check_sample(sample=sample) - sample.status = "fail" if "fail" in results.dict().values() else "pass" + sample.status = "fail" if "fail" in results.model_dump().values() else "pass" self.session.add(sample) - self.session.commit() - self.session.refresh(sample) + await self.session.commit() + await self.session.refresh(sample) return sample - def update_sample_comment(self, sample_id: str, comment: str) -> Sample: - sample: Sample = self.get_sample_by_id(sample_id=sample_id) + async def update_sample_comment(self, sample_id: str, comment: str) -> Sample: + query: Query = select(Sample).distinct().filter(Sample.id == sample_id) + result = await self.session.execute(query) + sample: Sample = result.scalars().one_or_none() if not sample: raise SampleNotFoundError sample.comment = comment self.session.add(sample) - self.session.commit() - self.session.refresh(sample) + await self.session.commit() + await self.session.refresh(sample) return sample - def update_sample_status(self, sample_id: str, status: str | None) -> Sample: - sample: Sample = self.get_sample_by_id(sample_id=sample_id) + async def update_sample_status(self, sample_id: str, status: str | None) -> Sample: + query: Query = select(Sample).distinct().filter(Sample.id == sample_id) + result = await self.session.execute(query) + sample: Sample = result.scalars().one_or_none() if not sample: raise SampleNotFoundError sample.status = status self.session.add(sample) - self.session.commit() - self.session.refresh(sample) + await self.session.commit() + await self.session.refresh(sample) return sample - def refresh_plate(self, plate: Plate) -> None: - self.session.refresh(plate) + async def refresh_plate(self, plate: Plate) -> None: + await self.session.refresh(plate) - def update_plate_sign_off(self, plate: Plate, plate_sign_off: PlateSignOff) -> Plate: + async def update_plate_sign_off(self, plate: Plate, plate_sign_off: PlateSignOff) -> Plate: plate.signed_by = plate_sign_off.user_id plate.signed_at = plate_sign_off.signed_at plate.method_document = plate_sign_off.method_document plate.method_version = plate_sign_off.method_version - self.session.commit() - self.session.refresh(plate) + await self.session.commit() + await self.session.refresh(plate) return plate - def update_sample_sex(self, sexes_update: SampleSexesUpdate) -> Sample: - sample = ( - self.session.query(Sample).filter(Sample.id == sexes_update.sample_id).one_or_none() + async def update_sample_sex(self, sexes_update: SampleSexesUpdate) -> Sample: + query: Query = ( + select(Sample) + .distinct() + .options(selectinload(Sample.analyses).selectinload(Analysis.genotypes)) + .join(Analysis) + .filter(Sample.id == sexes_update.sample_id) ) + + result = await self.session.execute(query) + sample = result.scalars().one_or_none() if not sample: raise SampleNotFoundError sample.sex = sexes_update.sex @@ -72,15 +84,16 @@ def update_sample_sex(self, sexes_update: SampleSexesUpdate) -> Sample: elif sexes_update.sequence_sex and analysis.type == Types.SEQUENCE: analysis.sex = sexes_update.sequence_sex self.session.add(analysis) - self.session.add(sample) - self.session.commit() - self.session.refresh(sample) - sample = self.refresh_sample_status(sample) + self.session.add_all(sample) + await self.session.commit() + await self.session.refresh(sample) + sample = await self.refresh_sample_status(sample) return sample - def update_user_email(self, user: User, email: EmailStr) -> User: + async def update_user_email(self, user: User, email: EmailStr) -> User: user.email = email self.session.add(user) - self.session.commit() - self.session.refresh(user) + await self.session.commit() + await self.session.refresh(user) + return user return user From 1efeb54ff4a78fd0ca952dd30e80745844bb9219 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 7 Oct 2024 21:58:29 +0200 Subject: [PATCH 24/65] Add async support to services --- .../endpoint_services/analysis_service.py | 33 ++++--- .../endpoint_services/plate_service.py | 72 +++++++-------- .../endpoint_services/sample_service.py | 90 ++++++++++++------- .../services/endpoint_services/snp_service.py | 17 ++-- .../endpoint_services/user_service.py | 32 +++---- .../services/snp_reader_service/snp_reader.py | 4 +- 6 files changed, 137 insertions(+), 111 deletions(-) diff --git a/genotype_api/services/endpoint_services/analysis_service.py b/genotype_api/services/endpoint_services/analysis_service.py index 4c8d2d5..1b32e5c 100644 --- a/genotype_api/services/endpoint_services/analysis_service.py +++ b/genotype_api/services/endpoint_services/analysis_service.py @@ -4,13 +4,10 @@ from fastapi import UploadFile -from genotype_api.constants import Types, FileExtension +from genotype_api.constants import FileExtension, Types from genotype_api.database.models import Analysis - - from genotype_api.dto.analysis import AnalysisResponse from genotype_api.exceptions import AnalysisNotFoundError - from genotype_api.file_parsing.files import check_file from genotype_api.file_parsing.vcf import SequenceAnalysis from genotype_api.services.endpoint_services.base_service import BaseService @@ -32,38 +29,38 @@ def _create_analysis_response(analysis: Analysis) -> AnalysisResponse: genotypes=analysis.genotypes, ) - def get_analysis(self, analysis_id: int) -> AnalysisResponse: - analysis: Analysis = self.store.get_analysis_by_id(analysis_id=analysis_id) + async def get_analysis(self, analysis_id: int) -> AnalysisResponse: + analysis: Analysis = await self.store.get_analysis_by_id(analysis_id=analysis_id) if not analysis: raise AnalysisNotFoundError return self._create_analysis_response(analysis) - def get_analyses(self, skip: int, limit: int) -> list[AnalysisResponse]: - analyses: list[Analysis] = self.store.get_analyses_with_skip_and_limit( + async def get_analyses(self, skip: int, limit: int) -> list[AnalysisResponse]: + analyses: list[Analysis] = await self.store.get_analyses_with_skip_and_limit( skip=skip, limit=limit ) if not analyses: raise AnalysisNotFoundError return [self._create_analysis_response(analysis) for analysis in analyses] - def get_upload_sequence_analyses(self, file: UploadFile) -> list[AnalysisResponse]: + async def get_upload_sequence_analyses(self, file: UploadFile) -> list[AnalysisResponse]: """ Reading VCF file, creating and uploading sequence analyses and sample objects to the database. """ file_name: Path = check_file(file_path=file.filename, extension=FileExtension.VCF) - content = file.file.read().decode("utf-8") - sequence_analysis = SequenceAnalysis(vcf_file=content, source=str(file_name)) + content = await file.file.read() + sequence_analysis = SequenceAnalysis(vcf_file=content.decode("utf-8"), source=str(file_name)) analyses: list[Analysis] = list(sequence_analysis.generate_analyses()) - self.store.check_analyses_objects(analyses=analyses, analysis_type=Types.SEQUENCE) - self.store.create_analyses_samples(analyses=analyses) + await self.store.check_analyses_objects(analyses=analyses, analysis_type=Types.SEQUENCE) + await self.store.create_analyses_samples(analyses=analyses) for analysis in analyses: - analysis: Analysis = self.store.create_analysis(analysis=analysis) - self.store.refresh_sample_status(sample=analysis.sample) + analysis: Analysis = await self.store.create_analysis(analysis=analysis) + await self.store.refresh_sample_status(sample=analysis.sample) return [self._create_analysis_response(analysis) for analysis in analyses] - def delete_analysis(self, analysis_id: int) -> None: - analysis: Analysis = self.store.get_analysis_by_id(analysis_id=analysis_id) + async def delete_analysis(self, analysis_id: int) -> None: + analysis: Analysis = await self.store.get_analysis_by_id(analysis_id=analysis_id) if not analysis: raise AnalysisNotFoundError - self.store.delete_analysis(analysis=analysis) + await self.store.delete_analysis(analysis=analysis) diff --git a/genotype_api/services/endpoint_services/plate_service.py b/genotype_api/services/endpoint_services/plate_service.py index 9739620..7c1f1ee 100644 --- a/genotype_api/services/endpoint_services/plate_service.py +++ b/genotype_api/services/endpoint_services/plate_service.py @@ -2,19 +2,17 @@ import logging from datetime import datetime - from io import BytesIO from pathlib import Path - from fastapi import UploadFile from pydantic import EmailStr from genotype_api.constants import Types -from genotype_api.database.filter_models.plate_models import PlateSignOff, PlateOrderParams -from genotype_api.database.models import Plate, Analysis, User, Sample -from genotype_api.dto.plate import PlateResponse, UserOnPlate, AnalysisOnPlate, SampleStatus -from genotype_api.exceptions import PlateNotFoundError, UserNotFoundError, PlateExistsError +from genotype_api.database.filter_models.plate_models import PlateOrderParams, PlateSignOff +from genotype_api.database.models import Analysis, Plate, Sample, User +from genotype_api.dto.plate import AnalysisOnPlate, PlateResponse, SampleStatus, UserOnPlate +from genotype_api.exceptions import PlateExistsError, PlateNotFoundError, UserNotFoundError from genotype_api.file_parsing.excel import GenotypeAnalysis from genotype_api.file_parsing.files import check_file from genotype_api.services.endpoint_services.base_service import BaseService @@ -43,15 +41,15 @@ def _get_analyses_on_plate(plate: Plate) -> list[AnalysisOnPlate] | None: analyses_response.append(analysis_response) return analyses_response if analyses_response else None - def _get_plate_user(self, plate: Plate) -> UserOnPlate | None: + async def _get_plate_user(self, plate: Plate) -> UserOnPlate | None: if plate.signed_by: - user: User = self.store.get_user_by_id(user_id=plate.signed_by) + user: User = await self.store.get_user_by_id(user_id=plate.signed_by) return UserOnPlate(email=user.email, name=user.name, id=user.id) return None - def _create_plate_response(self, plate: Plate) -> PlateResponse: + async def _create_plate_response(self, plate: Plate) -> PlateResponse: analyses_response: list[AnalysisOnPlate] = self._get_analyses_on_plate(plate) - user: UserOnPlate = self._get_plate_user(plate) + user: UserOnPlate = await self._get_plate_user(plate) return PlateResponse( created_at=plate.created_at, plate_id=plate.plate_id, @@ -69,10 +67,10 @@ def _get_plate_id_from_file(file_name: Path) -> str: # Get the plate id from the standardized name of the plate return file_name.name.split("_", 1)[0] - def upload_plate(self, file: UploadFile) -> None: + async def upload_plate(self, file: UploadFile) -> None: file_name: Path = check_file(file_path=file.filename, extension=".xlsx") plate_id: str = self._get_plate_id_from_file(file_name) - db_plate = self.store.get_plate_by_plate_id(plate_id) + db_plate = await self.store.get_plate_by_plate_id(plate_id) if db_plate: raise PlateExistsError @@ -83,26 +81,26 @@ def upload_plate(self, file: UploadFile) -> None: ) plate_obj = Plate(plate_id=plate_id) - plate: Plate = self.store.create_plate(plate=plate_obj) - new_plate: Plate = self.store.get_plate_by_plate_id(plate_id=plate_id) + plate: Plate = await self.store.create_plate(plate=plate_obj) + new_plate: Plate = await self.store.get_plate_by_plate_id(plate_id=plate_id) analyses: list[Analysis] = list(excel_parser.generate_analyses(plate_id=new_plate.id)) - self.store.check_analyses_objects(analyses=analyses, analysis_type=Types.GENOTYPE) - self.store.create_analyses_samples(analyses=analyses) + await self.store.check_analyses_objects(analyses=analyses, analysis_type=Types.GENOTYPE) + await self.store.create_analyses_samples(analyses=analyses) for analysis in analyses: - self.store.create_analysis(analysis=analysis) + await self.store.create_analysis(analysis=analysis) plate_obj.analyses = analyses for analysis in analyses: - sample: Sample = self.store.get_sample_by_id(sample_id=analysis.sample_id) - self.store.refresh_sample_status(sample=sample) - self.store.refresh_plate(plate=plate) + sample: Sample = await self.store.get_sample_by_id(sample_id=analysis.sample_id) + await self.store.refresh_sample_status(sample=sample) + await self.store.refresh_plate(plate=plate) - def update_plate_sign_off( + async def update_plate_sign_off( self, plate_id: int, user_email: EmailStr, method_document: str, method_version: str ) -> PlateResponse: - plate: Plate = self.store.get_plate_by_id(plate_id=plate_id) + plate: Plate = await self.store.get_plate_by_id(plate_id=plate_id) if not plate: raise PlateNotFoundError - user: User = self.store.get_user_by_email(email=user_email) + user: User = await self.store.get_user_by_email(email=user_email) if not user: raise UserNotFoundError plate_sign_off = PlateSignOff( @@ -111,29 +109,31 @@ def update_plate_sign_off( method_document=method_document, method_version=method_version, ) - self.store.update_plate_sign_off(plate=plate, plate_sign_off=plate_sign_off) - return self._create_plate_response(plate) + await self.store.update_plate_sign_off(plate=plate, plate_sign_off=plate_sign_off) + return await self._create_plate_response(plate) + + async def get_plate(self, plate_id: int) -> PlateResponse: + plate = await self.store.get_plate_by_id(plate_id) - def get_plate(self, plate_id: int) -> PlateResponse: - plate: Plate = self.store.get_plate_by_id(plate_id=plate_id) if not plate: raise PlateNotFoundError - return self._create_plate_response(plate) - def get_plates(self, order_params: PlateOrderParams) -> list[PlateResponse]: - plates: list[Plate] = self.store.get_ordered_plates(order_params=order_params) + return await self._create_plate_response(plate) + + async def get_plates(self, order_params: PlateOrderParams) -> list[PlateResponse]: + plates: list[Plate] = await self.store.get_ordered_plates(order_params=order_params) if not plates: raise PlateNotFoundError - return [self._create_plate_response(plate) for plate in plates] + return [await self._create_plate_response(plate) for plate in plates] - def delete_plate(self, plate_id) -> list[int]: + async def delete_plate(self, plate_id) -> list[int]: """Delete a plate with the given plate id and return associated analysis ids.""" - plate = self.store.get_plate_by_id(plate_id=plate_id) + plate = await self.store.get_plate_by_id(plate_id=plate_id) if not plate: raise PlateNotFoundError - analyses: list[Analysis] = self.store.get_analyses_by_plate_id(plate_id=plate_id) + analyses: list[Analysis] = await self.store.get_analyses_by_plate_id(plate_id=plate_id) analysis_ids: list[int] = [analyse.id for analyse in analyses] for analysis in analyses: - self.store.delete_analysis(analysis=analysis) - self.store.delete_plate(plate=plate) + await self.store.delete_analysis(analysis=analysis) + await self.store.delete_plate(plate=plate) return analysis_ids diff --git a/genotype_api/services/endpoint_services/sample_service.py b/genotype_api/services/endpoint_services/sample_service.py index c80dd83..6acb0d4 100644 --- a/genotype_api/services/endpoint_services/sample_service.py +++ b/genotype_api/services/endpoint_services/sample_service.py @@ -2,14 +2,14 @@ from datetime import date from typing import Literal -from genotype_api.constants import Types, Sexes -from genotype_api.database.filter_models.sample_models import SampleFilterParams, SampleSexesUpdate -from genotype_api.database.models import Sample, Analysis +from genotype_api.constants import Sexes, Types +from genotype_api.database.filter_models.sample_models import SampleFilterParams, SampleSexesUpdate +from genotype_api.database.models import Analysis, Sample from genotype_api.dto.genotype import GenotypeResponse -from genotype_api.dto.sample import AnalysisOnSample, SampleResponse, SampleCreate -from genotype_api.exceptions import SampleNotFoundError -from genotype_api.models import SampleDetail, MatchResult +from genotype_api.dto.sample import AnalysisOnSample, SampleCreate, SampleResponse +from genotype_api.exceptions import GenotypeDBError, InsufficientAnalysesError, SampleNotFoundError +from genotype_api.models import MatchResult, SampleDetail from genotype_api.services.endpoint_services.base_service import BaseService from genotype_api.services.match_genotype_service.match_genotype import MatchGenotypeService @@ -59,19 +59,24 @@ def _get_sample_response(self, sample: Sample) -> SampleResponse: analyses=analyses, ) - def get_sample(self, sample_id: str) -> SampleResponse: - sample: Sample = self.store.get_sample_by_id(sample_id=sample_id) + async def get_sample(self, sample_id: str) -> SampleResponse: + # Use the ReadHandler to fetch the sample + sample: Sample = await self.store.get_sample_by_id(sample_id) + if not sample: raise SampleNotFoundError + + # If sample has two analyses and no status, refresh its status if len(sample.analyses) == 2 and not sample.status: - sample: Sample = self.store.refresh_sample_status(sample=sample) + sample: Sample = await self.store.refresh_sample_status(sample=sample) + return self._get_sample_response(sample) - def get_samples(self, filter_params: SampleFilterParams) -> list[SampleResponse]: - samples: list[Sample] = self.store.get_filtered_samples(filter_params=filter_params) + async def get_samples(self, filter_params: SampleFilterParams) -> list[SampleResponse]: + samples: list[Sample] = await self.store.get_filtered_samples(filter_params=filter_params) return [self._get_sample_response(sample) for sample in samples] - def create_sample(self, sample_create: SampleCreate) -> None: + async def create_sample(self, sample_create: SampleCreate) -> None: sample = Sample( id=sample_create.id, status=sample_create.status, @@ -79,21 +84,21 @@ def create_sample(self, sample_create: SampleCreate) -> None: sex=sample_create.sex, created_at=sample_create.created_at, ) - self.store.create_sample(sample=sample) + await self.store.create_sample(sample=sample) - def delete_sample(self, sample_id: str) -> None: - sample: Sample = self.store.get_sample_by_id(sample_id=sample_id) + async def delete_sample(self, sample_id: str) -> None: + sample: Sample = await self.store.get_sample_by_id(sample_id=sample_id) for analysis in sample.analyses: - self.store.delete_analysis(analysis=analysis) - self.store.delete_sample(sample=sample) + await self.store.delete_analysis(analysis=analysis) + await self.store.delete_sample(sample=sample) - def get_status_detail(self, sample_id: str) -> SampleDetail: - sample: Sample = self.store.get_sample_by_id(sample_id=sample_id) + async def get_status_detail(self, sample_id: str) -> SampleDetail: + sample: Sample = await self.store.get_sample_by_id(sample_id=sample_id) if len(sample.analyses) != 2: return SampleDetail() return MatchGenotypeService.check_sample(sample=sample) - def get_match_results( + async def get_match_results( self, sample_id: str, analysis_type: Types, @@ -101,30 +106,53 @@ def get_match_results( date_min: date, date_max: date, ) -> list[MatchResult]: - """Get the match results for an analysis type and the comparison type in a given time frame.""" - analyses: list[Analysis] = self.store.get_analyses_by_type_between_dates( - analysis_type=comparison_set, date_max=date_max, date_min=date_min + """ + Get the match results for a specific analysis type and comparison set within a date range. + """ + + # Fetch the analyses with eager loading using selectinload + analyses = await self.store.get_analyses_by_type_between_dates( + analysis_type=comparison_set, date_min=date_min, date_max=date_max ) - sample_analysis: Analysis = self.store.get_analysis_by_type_and_sample_id( - analysis_type=analysis_type, sample_id=sample_id + + # if not analyses: + # raise InsufficientAnalysesError + + # Fetch the sample analysis with eager loading + sample_analysis = await self.store.get_analysis_by_type_and_sample_id( + sample_id=sample_id, analysis_type=analysis_type ) + + # if sample_analysis is None: + # raise SampleNotFoundError + + # # Ensure that genotypes are eagerly loaded to avoid lazy loading + # if not sample_analysis.genotypes: + # raise GenotypeDBError + + # Perform matching using the MatchGenotypeService matches: list[MatchResult] = MatchGenotypeService.get_matches( analyses=analyses, sample_analysis=sample_analysis ) + return matches - def set_sample_status( + async def set_sample_status( self, sample_id: str, status: Literal["pass", "fail", "cancel"] | None ) -> SampleResponse: - sample: Sample = self.store.update_sample_status(sample_id=sample_id, status=status) + sample: Sample = await self.store.update_sample_status(sample_id=sample_id, status=status) return self._get_sample_response(sample) - def set_sample_comment(self, sample_id: str, comment: str) -> SampleResponse: - sample: Sample = self.store.update_sample_comment(sample_id=sample_id, comment=comment) + async def set_sample_comment(self, sample_id: str, comment: str) -> SampleResponse: + sample: Sample = await self.store.update_sample_comment( + sample_id=sample_id, comment=comment + ) return self._get_sample_response(sample) - def set_sex(self, sample_id: str, sex: Sexes, genotype_sex: Sexes, sequence_sex: Sexes) -> None: + async def set_sex( + self, sample_id: str, sex: Sexes, genotype_sex: Sexes, sequence_sex: Sexes + ) -> None: sexes_update = SampleSexesUpdate( sample_id=sample_id, sex=sex, genotype_sex=genotype_sex, sequence_sex=sequence_sex ) - self.store.update_sample_sex(sexes_update=sexes_update) + await self.store.update_sample_sex(sexes_update=sexes_update) diff --git a/genotype_api/services/endpoint_services/snp_service.py b/genotype_api/services/endpoint_services/snp_service.py index dacf4b9..4b5a9df 100644 --- a/genotype_api/services/endpoint_services/snp_service.py +++ b/genotype_api/services/endpoint_services/snp_service.py @@ -3,7 +3,6 @@ from fastapi import UploadFile from genotype_api.database.models import SNP - from genotype_api.dto.snp import SNPResponse from genotype_api.exceptions import SNPExistsError from genotype_api.services.endpoint_services.base_service import BaseService @@ -16,19 +15,19 @@ class SNPService(BaseService): def _get_snp_response(snp: SNP) -> SNPResponse: return SNPResponse(ref=snp.ref, chrom=snp.chrom, pos=snp.pos, id=snp.id) - def get_snps(self, skip: int, limit: int) -> list[SNPResponse]: - snps: list[SNP] = self.store.get_snps_by_limit_and_skip(skip=skip, limit=limit) + async def get_snps(self, skip: int, limit: int) -> list[SNPResponse]: + snps: list[SNP] = await self.store.get_snps_by_limit_and_skip(skip=skip, limit=limit) return [self._get_snp_response(snp) for snp in snps] - def upload_snps(self, snps_file: UploadFile) -> list[SNPResponse]: + async def upload_snps(self, snps_file: UploadFile) -> list[SNPResponse]: """Upload snps to the database, raises an error when SNPs already exist.""" - existing_snps: list[SNP] = self.store.get_snps() + existing_snps: list[SNP] = await self.store.get_snps() if existing_snps: raise SNPExistsError - snps: list[SNP] = SNPReaderService.read_snps_from_file(snps_file) - new_snps: list[SNP] = self.store.create_snps(snps=snps) + snps: list[SNP] = await SNPReaderService.read_snps_from_file(snps_file) + new_snps: list[SNP] = await self.store.create_snps(snps=snps) return [self._get_snp_response(new_snp) for new_snp in new_snps] - def delete_all_snps(self) -> int: - result = self.store.delete_snps() + async def delete_all_snps(self) -> int: + result = await self.store.delete_snps() return result.rowcount diff --git a/genotype_api/services/endpoint_services/user_service.py b/genotype_api/services/endpoint_services/user_service.py index e335ab4..94d6133 100644 --- a/genotype_api/services/endpoint_services/user_service.py +++ b/genotype_api/services/endpoint_services/user_service.py @@ -1,10 +1,12 @@ """Module to holds the user service.""" from pydantic import EmailStr + from genotype_api.database.models import User from genotype_api.database.store import Store -from genotype_api.dto.user import UserResponse, UserRequest, PlateOnUser -from genotype_api.exceptions import UserNotFoundError, UserArchiveError, UserExistsError +from genotype_api.dto.user import PlateOnUser, UserRequest, UserResponse +from genotype_api.exceptions import (UserArchiveError, UserExistsError, + UserNotFoundError) from genotype_api.services.endpoint_services.base_service import BaseService @@ -30,35 +32,35 @@ def _create_user_response(self, user: User) -> UserResponse: plates: list[PlateOnUser] = self._get_plates_on_user(user) return UserResponse(email=user.email, name=user.name, id=user.id, plates=plates) - def create_user(self, user: UserRequest): - existing_user: User = self.store.get_user_by_email(email=user.email) + async def create_user(self, user: UserRequest): + existing_user: User = await self.store.get_user_by_email(email=user.email) if existing_user: raise UserExistsError db_user = User(email=user.email, name=user.name) - new_user: User = self.store.create_user(user=db_user) + new_user: User = await self.store.create_user(user=db_user) return self._create_user_response(new_user) - def get_users(self, skip: int, limit: int) -> list[UserResponse]: - users: list[User] = self.store.get_users_with_skip_and_limit(skip=skip, limit=limit) + async def get_users(self, skip: int, limit: int) -> list[UserResponse]: + users: list[User] = await self.store.get_users_with_skip_and_limit(skip=skip, limit=limit) return [self._create_user_response(user) for user in users] - def get_user(self, user_id: int) -> UserResponse: - user: User = self.store.get_user_by_id(user_id=user_id) + async def get_user(self, user_id: int) -> UserResponse: + user: User = await self.store.get_user_by_id(user_id=user_id) if not user: raise UserNotFoundError return self._create_user_response(user) - def delete_user(self, user_id: int): - user: User = self.store.get_user_by_id(user_id=user_id) + async def delete_user(self, user_id: int): + user: User = await self.store.get_user_by_id(user_id=user_id) if not user: raise UserNotFoundError if user.plates: raise UserArchiveError - self.store.delete_user(user=user) + await self.store.delete_user(user=user) - def update_user_email(self, user_id: int, email: EmailStr): - user: User = self.store.get_user_by_id(user_id=user_id) + async def update_user_email(self, user_id: int, email: EmailStr): + user: User = await self.store.get_user_by_id(user_id=user_id) if not user: raise UserNotFoundError - user: User = self.store.update_user_email(user=user, email=email) + user: User = await self.store.update_user_email(user=user, email=email) return self._create_user_response(user) diff --git a/genotype_api/services/snp_reader_service/snp_reader.py b/genotype_api/services/snp_reader_service/snp_reader.py index 8e7b4e5..ba703ff 100644 --- a/genotype_api/services/snp_reader_service/snp_reader.py +++ b/genotype_api/services/snp_reader_service/snp_reader.py @@ -9,9 +9,9 @@ class SNPReaderService: @staticmethod - def read_snps_from_file(snps_file: UploadFile) -> list[SNP]: + async def read_snps_from_file(snps_file: UploadFile) -> list[SNP]: snps: list[SNP] = [] - content = snps_file.read() + content = await snps_file.read() header = SNP_HEADER for line in content.decode().split("\n"): if len(line) <= 10: From a2d96d492db0bb0d2a99cffbe82559cb2440ea75 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 7 Oct 2024 22:22:34 +0200 Subject: [PATCH 25/65] Refactor test fixtures, helpers, and tests for async support --- tests/conftest.py | 32 +++--- tests/database/crud/test_create.py | 99 +++++++++++------ tests/database/crud/test_delete.py | 51 +++++---- tests/database/crud/test_read.py | 90 ++++++++-------- tests/database/crud/test_update.py | 51 ++++----- .../database/filters/test_analysis_filters.py | 37 +++---- .../database/filters/test_genotype_filters.py | 7 +- tests/database/filters/test_plate_filters.py | 25 +++-- tests/database/filters/test_sample_filters.py | 102 ++++++++++++------ tests/database/filters/test_snp_filters.py | 22 +++- tests/database/filters/test_user_filters.py | 30 ++++-- tests/store_helpers.py | 42 ++++---- tests/test_store_helpers.py | 58 +++++----- 13 files changed, 391 insertions(+), 255 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 168d9a2..0b1a37c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,14 +2,14 @@ import datetime from pathlib import Path -from typing import Generator +from typing import AsyncGenerator import pytest -from genotype_api.database.database import initialise_database, create_all_tables, drop_all_tables +from genotype_api.database.database import create_all_tables, drop_all_tables, get_session from genotype_api.database.filter_models.plate_models import PlateSignOff from genotype_api.database.filter_models.sample_models import SampleSexesUpdate -from genotype_api.database.models import User, Plate, SNP, Sample, Genotype, Analysis +from genotype_api.database.models import SNP, Analysis, Genotype, Plate, Sample, User from genotype_api.database.store import Store from tests.store_helpers import StoreHelpers @@ -34,14 +34,14 @@ def date_tomorrow() -> datetime: return datetime.date.today() + datetime.timedelta(days=1) -@pytest.fixture -def store() -> Generator[Store, None, None]: +@pytest.fixture(scope="session") +async def store() -> AsyncGenerator[Store, None]: """Return a CG store.""" - initialise_database("sqlite:///") - _store = Store() - create_all_tables() + session = get_session() + _store = Store(session) + await create_all_tables() yield _store - drop_all_tables() + await drop_all_tables() @pytest.fixture @@ -226,7 +226,7 @@ def test_analyses( @pytest.fixture -def base_store( +async def base_store( store: Store, helpers: StoreHelpers, test_snps: list[SNP], @@ -237,17 +237,17 @@ def base_store( test_analyses: list[Analysis], ): for snp in test_snps: - helpers.ensure_snp(store=store, snp=snp) + await helpers.ensure_snp(store=store, snp=snp) for genotype in test_genotypes: - helpers.ensure_genotype(store=store, genotype=genotype) + await helpers.ensure_genotype(store=store, genotype=genotype) for plate in test_plates: - helpers.ensure_plate(store=store, plate=plate) + await helpers.ensure_plate(store=store, plate=plate) for user in test_users: - helpers.ensure_user(store=store, user=user) + await helpers.ensure_user(store=store, user=user) for sample in test_samples: - helpers.ensure_sample(store=store, sample=sample) + await helpers.ensure_sample(store=store, sample=sample) for analysis in test_analyses: - helpers.ensure_analysis(store=store, analysis=analysis) + await helpers.ensure_analysis(store=store, analysis=analysis) return store diff --git a/tests/database/crud/test_create.py b/tests/database/crud/test_create.py index 25522ec..0001dbf 100644 --- a/tests/database/crud/test_create.py +++ b/tests/database/crud/test_create.py @@ -1,88 +1,119 @@ """Module to test the create functionality of the genotype API CRUD.""" -from genotype_api.database.models import Analysis, SNP, User, Genotype, Sample, Plate +from sqlalchemy.orm import Query + +from genotype_api.database.models import SNP, Analysis, Genotype, Plate, Sample, User from genotype_api.database.store import Store -def test_create_analysis(store: Store, test_analysis: Analysis): +async def test_create_analysis(store: Store, test_analysis: Analysis): # GIVEN an analysis and an empty store - - assert not store._get_query(Analysis).all() + analyses_query: Query = store._get_query(Analysis) + result = await store.session.execute(analyses_query) + assert not result.scalars().all() # WHEN creating the analysis - store.create_analysis(analysis=test_analysis) + await store.create_analysis(analysis=test_analysis) # THEN the analysis is created - assert store._get_query(Analysis).all()[0].id == test_analysis.id + result = await store.session.execute(analyses_query) + analyses = result.scalars().all() + assert analyses[0].id == test_analysis.id -def test_create_genotype(store: Store, test_genotype: Genotype): +async def test_create_genotype(store: Store, test_genotype: Genotype): # GIVEN a genotype and an empty store - - assert not store._get_query(Genotype).all() + genotypes_query: Query = store._get_query(Genotype) + result = await store.session.execute(genotypes_query) + assert not result.scalars().all() # WHEN creating the genotype - store.create_genotype(genotype=test_genotype) + await store.create_genotype(genotype=test_genotype) # THEN the genotype is created - assert store._get_query(Genotype).all()[0].id == test_genotype.id + result = await store.session.execute(genotypes_query) + genotypes = result.scalars().all() + assert genotypes[0].id == test_genotype.id -def test_create_snp(store: Store, test_snp: SNP): +async def test_create_snp(store: Store, test_snp: SNP): # GIVEN a SNP and an empty store - assert not store._get_query(SNP).all() + snps_query: Query = store._get_query(SNP) + result = await store.session.execute(snps_query) + assert not result.scalars().all() # WHEN creating the SNP - store.create_snps(snps=[test_snp]) + await store.create_snps(snps=[test_snp]) # THEN the SNP is created - assert store._get_query(SNP).all()[0].id == test_snp.id + result = await store.session.execute(snps_query) + snps = result.scalars().all() + assert snps[0].id == test_snp.id -def test_create_user(store: Store, test_user: User): +async def test_create_user(store: Store, test_user: User): # GIVEN a user and an empty store - assert not store._get_query(User).all() + users_query: Query = store._get_query(User) + result = await store.session.execute(users_query) + assert not result.scalars().all() # WHEN creating the user - store.create_user(user=test_user) + await store.create_user(user=test_user) # THEN the user is created - assert store._get_query(User).all()[0].id == test_user.id + result = await store.session.execute(users_query) + users = result.scalars().all() + assert users[0].id == test_user.id -def test_create_sample(store: Store, test_sample: Sample): +async def test_create_sample(store: Store, test_sample: Sample): # GIVEN a sample and an empty store - assert not store._get_query(Sample).all() + samples_query: Query = store._get_query(Sample) + result = await store.session.execute(samples_query) + assert not result.scalars().all() # WHEN creating the sample - store.create_sample(sample=test_sample) + await store.create_sample(sample=test_sample) # THEN the sample is created - assert store._get_query(Sample).all()[0].id == test_sample.id + result = await store.session.execute(samples_query) + samples = result.scalars().all() + assert samples[0].id == test_sample.id -def test_create_plate(store: Store, test_plate: Plate): +async def test_create_plate(store: Store, test_plate: Plate): # GIVEN a plate and an empty store - assert not store._get_query(Plate).all() + plates_query: Query = store._get_query(Plate) + result = await store.session.execute(plates_query) + assert not result.scalars().all() # WHEN creating the plate - store.create_plate(plate=test_plate) + await store.create_plate(plate=test_plate) # THEN the plate is created - assert store._get_query(Plate).all()[0].id == test_plate.id + result = await store.session.execute(plates_query) + plates = result.scalars().all() + assert plates[0].id == test_plate.id -def test_create_analyses_samples(store: Store, test_analysis: Analysis): +async def test_create_analyses_samples(store: Store, test_analysis: Analysis): # GIVEN an analysis in a store - assert not store._get_query(Sample).all() - assert not store._get_query(Analysis).all() + samples_query: Query = store._get_query(Sample) + analyses_query: Query = store._get_query(Analysis) + + result = await store.session.execute(samples_query) + assert not result.scalars().all() + + result = await store.session.execute(analyses_query) + assert not result.scalars().all() - store.create_analysis(test_analysis) + await store.create_analysis(test_analysis) - # WHEN creating the analyses - store.create_analyses_samples(analyses=[test_analysis]) + # WHEN creating the analyses samples + await store.create_analyses_samples(analyses=[test_analysis]) # THEN the samples are created - sample: Sample = store._get_query(Sample).all()[0] + result = await store.session.execute(samples_query) + sample: Sample = result.scalars().all()[0] assert sample assert sample.id == test_analysis.sample_id diff --git a/tests/database/crud/test_delete.py b/tests/database/crud/test_delete.py index ebfc32f..b7f9c40 100644 --- a/tests/database/crud/test_delete.py +++ b/tests/database/crud/test_delete.py @@ -1,59 +1,72 @@ """Module to test the delete functionality of the genotype API CRUD.""" -from genotype_api.database.models import Analysis, Sample, User, Plate, SNP +from sqlalchemy.orm import Query + +from genotype_api.database.models import SNP, Analysis, Plate, Sample, User from genotype_api.database.store import Store -def test_delete_analysis(base_store: Store, test_analysis: Analysis): +async def test_delete_analysis(base_store: Store, test_analysis: Analysis): # GIVEN an analysis and a store with the analysis - assert test_analysis in base_store._get_query(Analysis).all() + analyses = await base_store.get_analyses() + assert test_analysis in analyses # WHEN deleting the analysis - base_store.delete_analysis(analysis=test_analysis) + await base_store.delete_analysis(analysis=test_analysis) # THEN the analysis is deleted - assert test_analysis not in base_store._get_query(Analysis).all() + analyses = await base_store.get_analyses() + assert test_analysis not in analyses -def test_delete_sample(base_store: Store, test_sample: Sample): +async def test_delete_sample(base_store: Store, test_sample: Sample): # GIVEN a sample and a store with the sample - assert test_sample in base_store._get_query(Sample).all() + query: Query = base_store._get_query(Sample) + result = await base_store.session.execute(query) + assert test_sample in result.scalars().all() # WHEN deleting the sample - base_store.delete_sample(sample=test_sample) + await base_store.delete_sample(sample=test_sample) # THEN the sample is deleted - assert test_sample not in base_store._get_query(Sample).all() + result = await base_store.session.execute(base_store._get_query(test_sample)) + assert test_sample not in result.scalars().all() -def test_delete_plate(base_store: Store, test_plate: Plate): +async def test_delete_plate(base_store: Store, test_plate: Plate): # GIVEN a plate and a store with the plate - assert test_plate in base_store._get_query(Plate).all() + result = await base_store.session.execute(base_store._get_query(test_plate)) + assert test_plate in result.scalars().all() # WHEN deleting the plate - base_store.delete_plate(plate=test_plate) + await base_store.delete_plate(plate=test_plate) # THEN the plate is deleted - assert test_plate not in base_store._get_query(Plate).all() + result = await base_store.session.execute(base_store._get_query(test_plate)) + assert test_plate not in result.scalars().all() -def test_delete_user(base_store: Store, test_user: User): +async def test_delete_user(base_store: Store, test_user: User): # GIVEN a user and a store with the user - assert test_user in base_store._get_query(User).all() + result = await base_store.session.execute(base_store._get_query(test_user)) + assert test_user in result.scalars().all() # WHEN deleting the user base_store.delete_user(user=test_user) # THEN the user is deleted - assert test_user not in base_store._get_query(User).all() + result = await base_store.session.execute(base_store._get_query(test_user)) + assert test_user not in result.scalars().all() -def test_delete_snps(base_store: Store, test_snp: SNP): +async def test_delete_snps(base_store: Store, test_snp: SNP): # GIVEN an SNP and a store with the SNP - assert base_store._get_query(SNP).all() + result = await base_store.session.execute(base_store._get_query(test_snp)) + assert result.scalars().all() # WHEN deleting the SNP base_store.delete_snps() # THEN all SNPs are deleted - assert not base_store._get_query(SNP).all() + result = await base_store.session.execute(base_store._get_query(test_snp)) + assert not result.scalars().all() diff --git a/tests/database/crud/test_read.py b/tests/database/crud/test_read.py index fa55a34..b305502 100644 --- a/tests/database/crud/test_read.py +++ b/tests/database/crud/test_read.py @@ -2,29 +2,30 @@ from datetime import date - from genotype_api.database.filter_models.plate_models import PlateOrderParams -from genotype_api.database.models import Analysis, Plate, SNP, User, Genotype +from genotype_api.database.models import SNP, Analysis, Genotype, Plate, User from genotype_api.database.store import Store from tests.store_helpers import StoreHelpers -def test_get_analysis_by_plate_id(base_store: Store, test_analysis: Analysis): +async def test_get_analysis_by_plate_id(base_store: Store, test_analysis: Analysis): # GIVEN an analysis and a store with the analysis # WHEN getting the analysis by plate id - analyses: list[Analysis] = base_store.get_analyses_by_plate_id(plate_id=test_analysis.plate_id) + analyses: list[Analysis] = await base_store.get_analyses_by_plate_id( + plate_id=test_analysis.plate_id + ) # THEN the analysis is returned for analysis in analyses: assert analysis.plate_id == test_analysis.plate_id -def test_get_analysis_by_type_and_sample_id(base_store: Store, test_analysis: Analysis): +async def test_get_analysis_by_type_and_sample_id(base_store: Store, test_analysis: Analysis): # GIVEN an analysis and a store with the analysis # WHEN getting the analysis by type and sample id - analysis: Analysis = base_store.get_analysis_by_type_and_sample_id( + analysis: Analysis = await base_store.get_analysis_by_type_and_sample_id( analysis_type=test_analysis.type, sample_id=test_analysis.sample_id ) @@ -33,37 +34,37 @@ def test_get_analysis_by_type_and_sample_id(base_store: Store, test_analysis: An assert analysis.type == test_analysis.type -def test_get_analysis_by_id(base_store: Store, test_analysis: Analysis): +async def test_get_analysis_by_id(base_store: Store, test_analysis: Analysis): # GIVEN an analysis and a store with the analysis # WHEN getting the analysis by id - analysis: Analysis = base_store.get_analysis_by_id(analysis_id=test_analysis.id) + analysis: Analysis = await base_store.get_analysis_by_id(analysis_id=test_analysis.id) # THEN the analysis is returned assert analysis.id == test_analysis.id -def test_get_analyses(base_store: Store, test_analyses: list[Analysis]): +async def test_get_analyses(base_store: Store, test_analyses: list[Analysis]): # GIVEN an analysis and a store with the analysis # WHEN getting the analyses - analyses: list[Analysis] = base_store.get_analyses() + analyses: list[Analysis] = await base_store.get_analyses() # THEN the analyses are returned assert analyses == test_analyses -def test_get_analyses_with_skip_and_limit(base_store: Store, test_analyses: list[Analysis]): +async def test_get_analyses_with_skip_and_limit(base_store: Store, test_analyses: list[Analysis]): # GIVEN an analysis and a store with the analysis # WHEN getting the analyses with skip and limit - analyses: list[Analysis] = base_store.get_analyses_with_skip_and_limit(skip=0, limit=2) + analyses: list[Analysis] = await base_store.get_analyses_with_skip_and_limit(skip=0, limit=2) # THEN the analyses are returned assert analyses == test_analyses[:2] -def test_get_analyses_by_type_between_dates( +async def test_get_analyses_by_type_between_dates( base_store: Store, test_analysis: Analysis, date_tomorrow: date, @@ -75,10 +76,10 @@ def test_get_analyses_by_type_between_dates( future_analysis: Analysis = test_analysis future_analysis.created_at = date_two_weeks_future - helpers.ensure_analysis(store=base_store, analysis=future_analysis) + await helpers.ensure_analysis(store=base_store, analysis=future_analysis) # WHEN getting the analyses by type between dates excluding one of the analyses - analyses: list[Analysis] = base_store.get_analyses_by_type_between_dates( + analyses: list[Analysis] = await base_store.get_analyses_by_type_between_dates( analysis_type=test_analysis.type, date_min=date_yesterday, date_max=date_tomorrow ) @@ -88,101 +89,106 @@ def test_get_analyses_by_type_between_dates( assert analysis.created_at != date_two_weeks_future -def test_get_plate_by_id(base_store: Store, test_plate: Plate): +async def test_get_plate_by_id(base_store: Store, test_plate: Plate): # GIVEN a store with a plate # WHEN getting the plate by id - plate: Plate = base_store.get_plate_by_id(plate_id=test_plate.id) + plate: Plate = await base_store.get_plate_by_id(plate_id=test_plate.id) # THEN the plate is returned assert plate.id == test_plate.id -def test_get_plate_by_plate_id(base_store: Store, test_plate: Plate): +async def test_get_plate_by_plate_id(base_store: Store, test_plate: Plate): # GIVEN a store with a plate # WHEN getting the plate by plate id - plate: Plate = base_store.get_plate_by_plate_id(plate_id=test_plate.plate_id) + plate: Plate = await base_store.get_plate_by_plate_id(plate_id=test_plate.plate_id) # THEN the plate is returned assert plate.plate_id == test_plate.plate_id -def get_user_by_id(base_store: Store, test_user: User): +async def test_get_user_by_id(base_store: Store, test_user: User): # GIVEN a store with a user # WHEN getting the user by id - user: User = base_store.get_user_by_id(user_id=test_user.id) + user: User = await base_store.get_user_by_id(user_id=test_user.id) # THEN the user is returned assert user.id == test_user.id -def get_user_by_email(base_store: Store, test_user: User): +async def test_get_user_by_email(base_store: Store, test_user: User): # GIVEN a store with a user # WHEN getting the user by email - user: User = base_store.get_user_by_email(email=test_user.email) + user: User = await base_store.get_user_by_email(email=test_user.email) # THEN the user is returned assert user.email == test_user.email -def get_user_with_skip_and_limit(base_store: Store, test_users: list[User], helpers: StoreHelpers): - # GIVEN store with a user +async def get_user_with_skip_and_limit( + base_store: Store, test_users: list[User], helpers: StoreHelpers +): + # GIVEN a store with a user out_of_limit_user: User = test_users[0] out_of_limit_user.id = 3 - helpers.ensure_user(store=base_store, user=out_of_limit_user) + await helpers.ensure_user(store=base_store, user=out_of_limit_user) - # WHEN getting the user with skip and limit - users: list[User] = base_store.get_users_with_skip_and_limit(skip=0, limit=2) + # WHEN getting the users with skip and limit + users: list[User] = await base_store.get_users_with_skip_and_limit(skip=0, limit=2) - # THEN the user is returned + # THEN the users are returned assert users == test_users -def test_get_genotype_by_id(base_store: Store, test_genotype: Genotype): - # GIVEN store with a genotype +async def test_get_genotype_by_id(base_store: Store, test_genotype: Genotype): + # GIVEN a store with a genotype # WHEN getting the genotype by id - genotype: Genotype = base_store.get_genotype_by_id(entry_id=test_genotype.id) + genotype: Genotype = await base_store.get_genotype_by_id(entry_id=test_genotype.id) # THEN the genotype is returned assert genotype.id == test_genotype.id -def test_get_snps(base_store: Store, test_snps: list[SNP]): +async def test_get_snps(base_store: Store, test_snps: list[SNP]): # GIVEN a store with a SNP # WHEN getting the SNPs - snps: list[SNP] = base_store.get_snps() + snps: list[SNP] = await base_store.get_snps() # THEN the SNPs are returned assert len(snps) == len(test_snps) -def test_get_snps_by_limit_and_skip(base_store: Store, test_snps: list[SNP]): - # GIVEN store with SNPs +async def test_get_snps_by_limit_and_skip(base_store: Store, test_snps: list[SNP]): + # GIVEN a store with SNPs out_of_limit_snp: SNP = test_snps[0] out_of_limit_snp.id = 3 - base_store.create_snps(snps=[out_of_limit_snp]) + await base_store.create_snps(snps=[out_of_limit_snp]) + # WHEN getting the SNPs - snps: list[SNP] = base_store.get_snps_by_limit_and_skip(skip=0, limit=2) + snps: list[SNP] = await base_store.get_snps_by_limit_and_skip(skip=0, limit=2) # THEN the SNPs are returned assert len(snps) == len(test_snps) -def test_get_ordered_plates(base_store: Store, test_plates: list[Plate], helpers: StoreHelpers): +async def test_get_ordered_plates( + base_store: Store, test_plates: list[Plate], helpers: StoreHelpers +): # GIVEN a store with the plates and plate not fulfilling the limit - plate_order_params = PlateOrderParams(sort_order="acs", order_by="plate_id", skip=0, limit=2) + plate_order_params = PlateOrderParams(sort_order="asc", order_by="plate_id", skip=0, limit=2) out_of_limit_plate: Plate = test_plates[0] out_of_limit_plate.plate_id = "ID3" out_of_limit_plate.id = 3 - helpers.ensure_plate(store=base_store, plate=out_of_limit_plate) + await helpers.ensure_plate(store=base_store, plate=out_of_limit_plate) # WHEN getting the ordered plates - plates: list[Plate] = base_store.get_ordered_plates(order_params=plate_order_params) + plates: list[Plate] = await base_store.get_ordered_plates(order_params=plate_order_params) # THEN the plates are returned assert len(plates) == len(test_plates) diff --git a/tests/database/crud/test_update.py b/tests/database/crud/test_update.py index 84fc000..3a8fe0b 100644 --- a/tests/database/crud/test_update.py +++ b/tests/database/crud/test_update.py @@ -2,95 +2,96 @@ from genotype_api.database.filter_models.plate_models import PlateSignOff from genotype_api.database.filter_models.sample_models import SampleSexesUpdate -from genotype_api.database.models import Sample, Plate +from genotype_api.database.models import Plate, Sample, User from genotype_api.database.store import Store from tests.store_helpers import StoreHelpers -def test_refresh_sample_status(store: Store, test_sample: Sample, helpers: StoreHelpers): +async def test_refresh_sample_status(store: Store, test_sample: Sample, helpers: StoreHelpers): # GIVEN a store with a sample with an initial status initial_status: str = "initial_status" test_sample.status = initial_status - helpers.ensure_sample(store=store, sample=test_sample) + await helpers.ensure_sample(store=store, sample=test_sample) # WHEN updating the sample status - store.refresh_sample_status(sample=test_sample) + await store.refresh_sample_status(sample=test_sample) # THEN the sample status is updated - updated_sample = store.get_sample_by_id(sample_id=test_sample.id) + updated_sample = await store.get_sample_by_id(sample_id=test_sample.id) assert updated_sample.status != initial_status -def test_update_sample_comment(store: Store, test_sample: Sample, helpers: StoreHelpers): +async def test_update_sample_comment(store: Store, test_sample: Sample, helpers: StoreHelpers): # GIVEN a sample and a store with the sample initial_comment: str = "initial_comment" test_sample.comment = initial_comment - helpers.ensure_sample(store=store, sample=test_sample) + await helpers.ensure_sample(store=store, sample=test_sample) # WHEN updating the sample comment new_comment: str = "new_comment" - store.update_sample_comment(sample_id=test_sample.id, comment=new_comment) + await store.update_sample_comment(sample_id=test_sample.id, comment=new_comment) # THEN the sample comment is updated - updated_sample = store.get_sample_by_id(sample_id=test_sample.id) + updated_sample = await store.get_sample_by_id(sample_id=test_sample.id) assert updated_sample.comment == new_comment -def test_update_sample_status(store: Store, test_sample: Sample, helpers: StoreHelpers): +async def test_update_sample_status(store: Store, test_sample: Sample, helpers: StoreHelpers): # GIVEN a sample and a store with the sample initial_status: str = "initial_status" test_sample.status = initial_status - helpers.ensure_sample(store=store, sample=test_sample) + await helpers.ensure_sample(store=store, sample=test_sample) # WHEN updating the sample status new_status: str = "new_status" - store.update_sample_status(sample_id=test_sample.id, status=new_status) + await store.update_sample_status(sample_id=test_sample.id, status=new_status) # THEN the sample status is updated - updated_sample = store.get_sample_by_id(sample_id=test_sample.id) + updated_sample = await store.get_sample_by_id(sample_id=test_sample.id) assert updated_sample.status == new_status -def test_update_user_email(store: Store, test_user, helpers: StoreHelpers): +async def test_update_user_email(store: Store, test_user: User, helpers: StoreHelpers): # GIVEN a user and a store with the user initial_email: str = "initial_email" test_user.email = initial_email - helpers.ensure_user(store=store, user=test_user) + await helpers.ensure_user(store=store, user=test_user) # WHEN updating the user email new_email: str = "new_email" - store.update_user_email(user=test_user, email=new_email) + await store.update_user_email(user=test_user, email=new_email) # THEN the user email is updated - updated_user = store.get_user_by_id(user_id=test_user.id) + updated_user = await store.get_user_by_id(user_id=test_user.id) assert updated_user.email == new_email -def test_update_plate_sign_off( +async def test_update_plate_sign_off( store: Store, unsigned_plate: Plate, plate_sign_off: PlateSignOff, helpers: StoreHelpers ): # GIVEN a plate and a store with the plate - helpers.ensure_plate(store=store, plate=unsigned_plate) + await helpers.ensure_plate(store=store, plate=unsigned_plate) # WHEN updating the plate sign off - store.update_plate_sign_off(plate=unsigned_plate, plate_sign_off=plate_sign_off) + await store.update_plate_sign_off(plate=unsigned_plate, plate_sign_off=plate_sign_off) # THEN the plate sign off is updated - updated_plate = store.get_plate_by_id(plate_id=unsigned_plate.id) + updated_plate = await store.get_plate_by_id(plate_id=unsigned_plate.id) assert updated_plate.signed_by == plate_sign_off.user_id assert updated_plate.signed_at == plate_sign_off.signed_at assert updated_plate.method_document == plate_sign_off.method_document assert updated_plate.method_version == plate_sign_off.method_version -def test_update_sample_sex(base_store: Store, sample_sex_update: SampleSexesUpdate): +async def test_update_sample_sex(base_store: Store, sample_sex_update: SampleSexesUpdate): # GIVEN a store with a sample, analysis # WHEN updating the sex of the sample - base_store.update_sample_sex(sample_sex_update) + await base_store.update_sample_sex(sample_sex_update) - # THEN the sex of the sample and analysis - updated_sample = base_store.get_sample_by_id(sample_id=sample_sex_update.sample_id) + # THEN the sex of the sample and analysis is updated + updated_sample = await base_store.get_sample_by_id(sample_id=sample_sex_update.sample_id) assert updated_sample.sex == sample_sex_update.sex for analysis in updated_sample.analyses: assert analysis.sex == sample_sex_update.genotype_sex + assert analysis.sex == sample_sex_update.genotype_sex diff --git a/tests/database/filters/test_analysis_filters.py b/tests/database/filters/test_analysis_filters.py index f0e1917..7f2dd69 100644 --- a/tests/database/filters/test_analysis_filters.py +++ b/tests/database/filters/test_analysis_filters.py @@ -4,68 +4,69 @@ from genotype_api.database.filters.analysis_filter import ( filter_analyses_by_id, - filter_analyses_by_type, filter_analyses_by_plate_id, filter_analyses_by_sample_id, + filter_analyses_by_type, ) from genotype_api.database.models import Analysis from genotype_api.database.store import Store from tests.store_helpers import StoreHelpers -def test_filter_analyses_by_id(base_store: Store, test_analysis, helpers): +async def test_filter_analyses_by_id(base_store: Store, test_analysis, helpers): # GIVEN an analysis # WHEN filtering analyses by id query: Query = base_store._get_query(Analysis) - analyses: list[Analysis] = filter_analyses_by_id( - analysis_id=test_analysis.id, analyses=query - ).all() + filtered_query = filter_analyses_by_id(analysis_id=test_analysis.id, analyses=query) + result = await base_store.session.execute(filtered_query) + analyses: list[Analysis] = result.scalars().all() # THEN assert the analysis is returned assert analyses assert analyses[0].id == test_analysis.id -def test_filter_analyses_by_type(base_store: Store, test_analysis: Analysis, helpers: StoreHelpers): +async def test_filter_analyses_by_type( + base_store: Store, test_analysis: Analysis, helpers: StoreHelpers +): # GIVEN an analysis # WHEN filtering analyses by type query: Query = base_store._get_query(Analysis) - - analyses: list[Analysis] = filter_analyses_by_type( - type=test_analysis.type, analyses=query - ).all() + filtered_query = filter_analyses_by_type(type=test_analysis.type, analyses=query) + result = await base_store.session.execute(filtered_query) + analyses: list[Analysis] = result.scalars().all() # THEN assert the analysis is returned assert analyses assert analyses[0].type == test_analysis.type -def test_filter_analyses_by_plate_id( +async def test_filter_analyses_by_plate_id( base_store: Store, test_analysis: Analysis, helpers: StoreHelpers ): # GIVEN an analysis # WHEN filtering analyses by plate id query: Query = base_store._get_query(Analysis) - analyses: list[Analysis] = filter_analyses_by_plate_id( - plate_id=test_analysis.plate_id, analyses=query - ).all() + filtered_query = filter_analyses_by_plate_id(plate_id=test_analysis.plate_id, analyses=query) + result = await base_store.session.execute(filtered_query) + analyses: list[Analysis] = result.scalars().all() # THEN assert the analysis is returned assert analyses assert analyses[0].plate_id == test_analysis.plate_id -def test_filter_analyses_by_sample_id(base_store: Store, test_analysis, helpers): +async def test_filter_analyses_by_sample_id(base_store: Store, test_analysis, helpers): # GIVEN an analysis # WHEN filtering analyses by sample id query: Query = base_store._get_query(Analysis) - analyses: list[Analysis] = filter_analyses_by_sample_id( - sample_id=test_analysis.sample_id, analyses=query - ).all() + filtered_query = filter_analyses_by_sample_id(sample_id=test_analysis.sample_id, analyses=query) + result = await base_store.session.execute(filtered_query) + analyses: list[Analysis] = result.scalars().all() # THEN assert the analysis is returned assert analyses diff --git a/tests/database/filters/test_genotype_filters.py b/tests/database/filters/test_genotype_filters.py index c304e80..53093b8 100644 --- a/tests/database/filters/test_genotype_filters.py +++ b/tests/database/filters/test_genotype_filters.py @@ -8,14 +8,15 @@ from tests.store_helpers import StoreHelpers -def test_filter_genotypes_by_id(store: Store, test_genotype: Genotype, helpers: StoreHelpers): +async def test_filter_genotypes_by_id(store: Store, test_genotype: Genotype, helpers: StoreHelpers): # GIVEN a genotype - helpers.ensure_genotype(store=store, genotype=test_genotype) + await helpers.ensure_genotype(store=store, genotype=test_genotype) # WHEN filtering genotypes by id query: Query = store._get_query(Genotype) + result = await store.session.execute(query) genotypes: list[Genotype] = filter_genotypes_by_id( - entry_id=test_genotype.id, genotypes=query + entry_id=test_genotype.id, genotypes=result.scalars() ).all() # THEN assert the genotype is returned diff --git a/tests/database/filters/test_plate_filters.py b/tests/database/filters/test_plate_filters.py index 08a4974..9434568 100644 --- a/tests/database/filters/test_plate_filters.py +++ b/tests/database/filters/test_plate_filters.py @@ -1,6 +1,9 @@ """Module to test the plate filters.""" +from sqlalchemy.orm import Query + from genotype_api.database.filters.plate_filters import ( + apply_plate_filter, filter_plates_by_id, filter_plates_by_plate_id, ) @@ -8,26 +11,36 @@ from genotype_api.database.store import Store -def test_filter_plates_by_id(base_store: Store, test_plate: Plate): +async def test_filter_plates_by_id(base_store: Store, test_plate: Plate): """Test filtering plates by id.""" # GIVEN a store with a plate # WHEN filtering plates by id - query = base_store._get_query(Plate) - plate = filter_plates_by_id(entry_id=test_plate.id, plates=query).first() + query: Query = base_store._get_query(Plate) + filter_functions = filter_plates_by_id(entry_id=test_plate.id, plates=query) + filtered_query = apply_plate_filter( + plates=query, filter_functions=filter_functions, entry_id=test_plate.id + ) + result = await base_store.session.execute(filtered_query) + plate: Plate = result.scalars().first() # THEN the plate is returned assert plate assert plate.id == test_plate.id -def test_filter_plates_by_plate_id(base_store: Store, test_plate: Plate): +async def test_filter_plates_by_plate_id(base_store: Store, test_plate: Plate): """Test filtering plates by plate id.""" # GIVEN a store with a plate # WHEN filtering plates by plate id - query = base_store._get_query(Plate) - plate = filter_plates_by_plate_id(plate_id=test_plate.plate_id, plates=query).first() + query: Query = base_store._get_query(Plate) + filter_functions = filter_plates_by_plate_id(plate_id=test_plate.id, plates=query) + filtered_query = apply_plate_filter( + plates=query, filter_functions=filter_functions, plate_id=test_plate.id + ) + result = await base_store.session.execute(filtered_query) + plate: Plate = result.scalars().first() # THEN the plate is returned assert plate diff --git a/tests/database/filters/test_sample_filters.py b/tests/database/filters/test_sample_filters.py index 1b8dfc1..dd95566 100644 --- a/tests/database/filters/test_sample_filters.py +++ b/tests/database/filters/test_sample_filters.py @@ -3,71 +3,90 @@ from sqlalchemy.orm import Query from genotype_api.database.filters.sample_filters import ( + add_skip_and_limit, + apply_sample_filter, + filter_samples_analysed_on_plate, filter_samples_by_id, filter_samples_contain_id, filter_samples_having_comment, filter_samples_without_status, - filter_samples_analysed_on_plate, - add_skip_and_limit, ) -from genotype_api.database.models import Sample, Plate +from genotype_api.database.models import Plate, Sample from genotype_api.database.store import Store from tests.store_helpers import StoreHelpers -def test_filter_samples_by_id(base_store: Store, test_sample: Sample): +async def test_filter_samples_by_id(base_store: Store, test_sample: Sample): """Test filtering samples by id.""" # GIVEN a store with a sample # WHEN filtering samples by id query: Query = base_store._get_query(Sample) - sample: Sample = filter_samples_by_id(sample_id=test_sample.id, samples=query).first() + filter_functions = filter_samples_by_id(sample_id=test_sample.id, samples=query) + filtered_query = apply_sample_filter( + samples=query, filter_functions=filter_functions, sample_id=test_sample.id + ) + result = await base_store.session.execute(filtered_query) + sample: Sample = result.scalars().first() # THEN the sample is returned assert sample assert sample.id == test_sample.id -def test_filter_samples_contain_id(base_store: Store, test_sample: Sample): +async def test_filter_samples_contain_id(base_store: Store, test_sample: Sample): """Test filtering samples by id.""" # GIVEN a store with a sample # WHEN filtering samples by id query: Query = base_store._get_query(Sample) - sample: Sample = filter_samples_contain_id(sample_id=test_sample.id, samples=query).first() + filter_functions = filter_samples_contain_id(sample_id=test_sample.id, samples=query) + filtered_query = apply_sample_filter( + samples=query, filter_functions=filter_functions, sample_id=test_sample.id + ) + result = await base_store.session.execute(filtered_query) + sample: Sample = result.scalars().first() # THEN the sample is returned assert sample assert sample.id == test_sample.id -def test_filter_samples_contain_id_when_no_id(base_store: Store, test_sample: Sample): +async def test_filter_samples_contain_id_when_no_id(base_store: Store, test_sample: Sample): """Test filtering samples by id when no id is provided.""" # GIVEN a store with two samples - assert len(base_store._get_query(Sample).all()) == 2 + assert len(await base_store._get_query(Sample).all()) == 2 # WHEN filtering samples by id query: Query = base_store._get_query(Sample) - samples: list[Sample] = filter_samples_contain_id(sample_id=None, samples=query).all() + filter_functions = filter_samples_contain_id(sample_id=None, samples=query) + filtered_query = apply_sample_filter( + samples=query, filter_functions=filter_functions, sample_id=None + ) + result = await base_store.session.execute(filtered_query) + samples: list[Sample] = result.scalars().all() # THEN all samples are returned assert len(samples) == 2 -def test_filter_samples_having_comment( +async def test_filter_samples_having_comment( base_store: Store, test_sample: Sample, helpers: StoreHelpers ): """Test filtering samples by having comment.""" # GIVEN a store with samples having a comment and one without - assert len(base_store._get_query(Sample).all()) == 2 + assert len(await base_store._get_query(Sample).all()) == 2 sample_without_comment: Sample = test_sample sample_without_comment.comment = None sample_without_comment.id = "sample_without_status" - helpers.ensure_sample(store=base_store, sample=sample_without_comment) + await helpers.ensure_sample(store=base_store, sample=sample_without_comment) # WHEN filtering samples by having comment query: Query = base_store._get_query(Sample) - samples: list[Sample] = filter_samples_having_comment(samples=query, is_commented=True).all() + filter_functions = filter_samples_having_comment(samples=query, is_commented=True) + filtered_query = apply_sample_filter(samples=query, filter_functions=filter_functions) + result = await base_store.session.execute(filtered_query) + samples: list[Sample] = result.scalars().all() # THEN samples with comments are returned assert samples @@ -75,20 +94,23 @@ def test_filter_samples_having_comment( assert sample.comment -def test_filter_samples_having_comment_none_provided(base_store: Store, test_sample: Sample): +async def test_filter_samples_having_comment_none_provided(base_store: Store, test_sample: Sample): """Test filtering samples by having comment.""" # GIVEN a store with samples having a comment and one without - assert len(base_store._get_query(Sample).all()) == 2 + assert len(await base_store._get_query(Sample).all()) == 2 # WHEN filtering samples by having comment query: Query = base_store._get_query(Sample) - samples: list[Sample] = filter_samples_having_comment(samples=query, is_commented=None).all() + filter_functions = filter_samples_having_comment(samples=query, is_commented=None) + filtered_query = apply_sample_filter(samples=query, filter_functions=filter_functions) + result = await base_store.session.execute(filtered_query) + samples: list[Sample] = result.scalars().all() # THEN the sample is returned assert len(samples) == 2 -def test_filter_samples_without_status( +async def test_filter_samples_without_status( base_store: Store, test_sample: Sample, helpers: StoreHelpers ): """Test filtering samples by having status.""" @@ -96,11 +118,14 @@ def test_filter_samples_without_status( sample_without_status: Sample = test_sample sample_without_status.status = None sample_without_status.id = "sample_without_status" - helpers.ensure_sample(store=base_store, sample=sample_without_status) + await helpers.ensure_sample(store=base_store, sample=sample_without_status) # WHEN filtering samples by having a status query: Query = base_store._get_query(Sample) - samples: list[Sample] = filter_samples_without_status(samples=query, is_missing=True).all() + filter_functions = filter_samples_without_status(samples=query, is_missing=True) + filtered_query = apply_sample_filter(samples=query, filter_functions=filter_functions) + result = await base_store.session.execute(filtered_query) + samples: list[Sample] = result.scalars().all() # THEN no sample is returned assert samples @@ -108,20 +133,23 @@ def test_filter_samples_without_status( assert not sample.status -def test_filter_samples_without_status_none_provided(base_store: Store, test_sample: Sample): +async def test_filter_samples_without_status_none_provided(base_store: Store, test_sample: Sample): """Test filtering samples by having status.""" # GIVEN a store with a sample that has a status - assert len(base_store._get_query(Sample).all()) == 2 + assert len(await base_store._get_query(Sample).all()) == 2 # WHEN filtering samples by having a status query: Query = base_store._get_query(Sample) - samples: list[Sample] = filter_samples_without_status(samples=query, is_missing=None).all() + filter_functions = filter_samples_without_status(samples=query, is_missing=None) + filtered_query = apply_sample_filter(samples=query, filter_functions=filter_functions) + result = await base_store.session.execute(filtered_query) + samples: list[Sample] = result.scalars().all() # THEN all samples are returned assert len(samples) == 2 -def test_filter_samples_analysed_on_plate( +async def test_filter_samples_analysed_on_plate( base_store: Store, test_sample: Sample, test_plate: Plate ): """Test filtering samples by having comment.""" @@ -129,36 +157,48 @@ def test_filter_samples_analysed_on_plate( # WHEN filtering samples analysed on a plate query: Query = base_store._get_join_analysis_on_sample() - sample: Sample = filter_samples_analysed_on_plate(samples=query, plate_id=test_plate.id).first() + filter_functions = filter_samples_analysed_on_plate(samples=query, plate_id=test_plate.id) + filtered_query = apply_sample_filter( + samples=query, filter_functions=filter_functions, plate_id=test_plate.id + ) + result = await base_store.session.execute(filtered_query) + sample: Sample = result.scalars().first() # THEN one sample is returned assert sample.analyses[0].plate_id == test_plate.id -def test_filter_samples_analysed_on_plate_none_provided( +async def test_filter_samples_analysed_on_plate_none_provided( base_store: Store, test_sample: Sample, ): """Test filtering samples by having comment.""" # GIVEN a store with analysed samples - assert len(base_store._get_query(Sample).all()) == 2 + assert len(await base_store._get_query(Sample).all()) == 2 # WHEN filtering samples analysed on a plate query: Query = base_store._get_join_analysis_on_sample() - samples: list[Sample] = filter_samples_analysed_on_plate(samples=query, plate_id=None).all() + filter_functions = filter_samples_analysed_on_plate(samples=query, plate_id=None) + filtered_query = apply_sample_filter( + samples=query, filter_functions=filter_functions, plate_id=None + ) + result = await base_store.session.execute(filtered_query) + samples: list[Sample] = result.scalars().all() # THEN all samples are returned assert len(samples) == 2 -def test_add_skip_and_limit(base_store: Store, test_sample: Sample): +async def test_add_skip_and_limit(base_store: Store, test_sample: Sample): """Test add_skip_and_limit function.""" # GIVEN a store with two samples - assert len(base_store._get_query(Sample).all()) == 2 + assert len(await base_store._get_query(Sample).all()) == 2 # WHEN adding skip and limit to the query query: Query = base_store._get_query(Sample) - samples: list[Sample] = add_skip_and_limit(query, skip=0, limit=1).all() + filtered_query = add_skip_and_limit(query, skip=0, limit=1) + result = await base_store.session.execute(filtered_query) + samples: list[Sample] = result.scalars().all() # THEN one sample is returned assert samples diff --git a/tests/database/filters/test_snp_filters.py b/tests/database/filters/test_snp_filters.py index 98e04b4..f09b952 100644 --- a/tests/database/filters/test_snp_filters.py +++ b/tests/database/filters/test_snp_filters.py @@ -2,33 +2,45 @@ from sqlalchemy.orm import Query -from genotype_api.database.filters.snp_filters import filter_snps_by_id, add_skip_and_limit +from genotype_api.database.filters.snp_filters import ( + add_skip_and_limit, + apply_snp_filter, + filter_snps_by_id, +) from genotype_api.database.models import SNP from genotype_api.database.store import Store -def test_filter_snps_by_id(base_store: Store, test_snp: SNP): +async def test_filter_snps_by_id(base_store: Store, test_snp: SNP): """Test filter_snps_by_id function.""" # GIVEN a store with a SNP # WHEN filtering a SNP by id query: Query = base_store._get_query(SNP) - snp: SNP = filter_snps_by_id(snp_id=test_snp.id, snps=query).first() + filter_functions = filter_snps_by_id(snp_id=test_snp.id, snps=query) + filtered_query = apply_snp_filter( + snps=query, filter_functions=filter_functions, snp_id=test_snp.id + ) + result = await base_store.session.execute(filtered_query) + snp: SNP = result.scalars().first() # THEN the SNP is returned assert snp assert snp.id == test_snp.id -def test_add_skip_and_limit(base_store: Store, test_snp: SNP): +async def test_add_skip_and_limit(base_store: Store, test_snp: SNP): """Test add_skip_and_limit function.""" # GIVEN a store with two SNPs + assert len(await base_store._get_query(SNP).all()) == 2 # WHEN adding skip and limit to the query query: Query = base_store._get_query(SNP) - snps: list[SNP] = add_skip_and_limit(query, skip=0, limit=1).all() + filtered_query = add_skip_and_limit(query, skip=0, limit=1) + result = await base_store.session.execute(filtered_query) + snps: list[SNP] = result.scalars().all() # THEN one SNP is returned assert snps diff --git a/tests/database/filters/test_user_filters.py b/tests/database/filters/test_user_filters.py index 7718d00..8eea428 100644 --- a/tests/database/filters/test_user_filters.py +++ b/tests/database/filters/test_user_filters.py @@ -3,47 +3,63 @@ from sqlalchemy.orm import Query from genotype_api.database.filters.user_filters import ( - filter_users_by_id, + apply_user_filter, filter_users_by_email, + filter_users_by_id, filter_users_by_name, ) from genotype_api.database.models import User from genotype_api.database.store import Store -def test_filter_users_by_id(base_store: Store, test_user: User): +async def test_filter_users_by_id(base_store: Store, test_user: User): """Test filtering users by id.""" # GIVEN a store with a user # WHEN filtering users by id query: Query = base_store._get_query(User) - user: User = filter_users_by_id(user_id=test_user.id, users=query).first() + filter_functions = filter_users_by_id(user_id=test_user.id, users=query) + filtered_query = apply_user_filter( + users=query, filter_functions=filter_functions, user_id=test_user.id + ) + result = await base_store.session.execute(filtered_query) + user: User = result.scalars().first() # THEN the user is returned assert user assert user.id == test_user.id -def test_filter_users_by_email(base_store: Store, test_user: User): +async def test_filter_users_by_email(base_store: Store, test_user: User): """Test filtering users by email.""" # GIVEN a store with a user # WHEN filtering users by email query: Query = base_store._get_query(User) - user: User = filter_users_by_email(email=test_user.email, users=query).first() + filter_functions = filter_users_by_email(email=test_user.email, users=query) + filtered_query = apply_user_filter( + users=query, filter_functions=filter_functions, email=test_user.email + ) + result = await base_store.session.execute(filtered_query) + user: User = result.scalars().first() # THEN the user is returned assert user assert user.email == test_user.email -def test_filter_users_by_name(base_store: Store, test_user: User): +async def test_filter_users_by_name(base_store: Store, test_user: User): """Test filtering users by name.""" # GIVEN a store with a user # WHEN filtering users by name query: Query = base_store._get_query(User) - user: User = filter_users_by_name(name=test_user.name, users=query).first() + filter_functions = filter_users_by_name(name=test_user.name, users=query) + filtered_query = apply_user_filter( + users=query, filter_functions=filter_functions, name=test_user.name + ) + result = await base_store.session.execute(filtered_query) + user: User = result.scalars().first() # THEN the user is returned assert user diff --git a/tests/store_helpers.py b/tests/store_helpers.py index bc19e2d..9d685c3 100644 --- a/tests/store_helpers.py +++ b/tests/store_helpers.py @@ -1,6 +1,6 @@ """Module that holds the store helper to be used to setup the testing environment.""" -from genotype_api.database.models import Sample, Analysis, User, SNP, Plate, Genotype +from genotype_api.database.models import SNP, Analysis, Genotype, Plate, Sample, User from genotype_api.database.store import Store @@ -11,42 +11,42 @@ class StoreHelpers: def add_entity(cls, store: Store, entity: Sample | Analysis | SNP | Plate | User | Genotype): store.session.add(entity) - def ensure_sample(self, store: Store, sample: Sample, analyses: list[Analysis] = None): + async def ensure_sample(self, store: Store, sample: Sample, analyses: list[Analysis] = None): """Add a sample to the store. Ensure its analyses are present.""" if analyses: for analysis in analyses: - if not store.get_analysis_by_id(analysis.id): + if not await store.get_analysis_by_id(analysis.id): self.add_entity(store=store, entity=analysis) self.add_entity(store=store, entity=sample) - store.session.commit() + await store.session.commit() - def ensure_snp(self, store: Store, snp: SNP): + async def ensure_snp(self, store: Store, snp: SNP): self.add_entity(store=store, entity=snp) - store.session.commit() + await store.session.commit() - def ensure_plate( + async def ensure_plate( self, store: Store, plate: Plate, analyses: list[Analysis] = None, user: User = None ): """Add a plate to the store ensure the associated user and analyses are present.""" - if user and not store.get_user_by_email(user.email): + if user and not await store.get_user_by_email(user.email): self.add_entity(store=store, entity=user) if analyses: for analysis in analyses: - if not store.get_analysis_by_id(analysis.id): + if not await store.get_analysis_by_id(analysis.id): self.add_entity(store=store, entity=analysis) self.add_entity(store=store, entity=plate) - store.session.commit() + await store.session.commit() - def ensure_user(self, store: Store, user: User, plates: list[Plate] = None): + async def ensure_user(self, store: Store, user: User, plates: list[Plate] = None): """Add a user to the store and ensure the associated plates are present.""" if plates: for plate in plates: - if not store.get_plate_by_id(plate.id): + if not await store.get_plate_by_id(plate.id): self.add_entity(store=store, entity=plate) self.add_entity(store=store, entity=user) - store.session.commit() + await store.session.commit() - def ensure_analysis( + async def ensure_analysis( self, store: Store, analysis: Analysis, @@ -55,20 +55,20 @@ def ensure_analysis( genotypes: list[Genotype] = None, ): """Add an analysis to the store and ensure the associated sample, plate and genotypes are present.""" - if sample and not store.get_sample_by_id(sample.id): + if sample and not await store.get_sample_by_id(sample.id): self.add_entity(store=store, entity=sample) - if plate and not store.get_plate_by_id(plate.id): + if plate and not await store.get_plate_by_id(plate.id): self.add_entity(store=store, entity=plate) if genotypes: for genotype in genotypes: - if not store.get_genotype_by_id(genotype.id): + if not await store.get_genotype_by_id(genotype.id): self.add_entity(store=store, entity=genotype) self.add_entity(store=store, entity=analysis) - store.session.commit() + await store.session.commit() - def ensure_genotype(self, store: Store, genotype: Genotype, analysis: Analysis = None): + async def ensure_genotype(self, store: Store, genotype: Genotype, analysis: Analysis = None): """Add a genotype to the database and ensure the associated analysis is present.""" - if analysis and not store.get_analysis_by_id(analysis.id): + if analysis and not await store.get_analysis_by_id(analysis.id): self.add_entity(store=store, entity=analysis) self.add_entity(store=store, entity=genotype) - store.session.commit() + await store.session.commit() diff --git a/tests/test_store_helpers.py b/tests/test_store_helpers.py index 2cb654f..22ed12a 100644 --- a/tests/test_store_helpers.py +++ b/tests/test_store_helpers.py @@ -1,39 +1,39 @@ """Module to test the store helpers.""" -from genotype_api.database.models import Plate, User, SNP, Analysis, Genotype, Sample +from genotype_api.database.models import SNP, Analysis, Genotype, Plate, Sample, User from genotype_api.database.store import Store from tests.store_helpers import StoreHelpers -def test_ensure_user(helpers: StoreHelpers, store: Store, test_user: User, test_plate: Plate): - +async def test_ensure_user(helpers: StoreHelpers, store: Store, test_user: User, test_plate: Plate): # GIVEN a user and plates # WHEN ensuring a user - helpers.ensure_user(store=store, user=test_user, plates=[test_plate]) + await helpers.ensure_user(store=store, user=test_user, plates=[test_plate]) # THEN a user and the associated plates are added - added_user: User = store.get_user_by_email(test_user.email) + added_user: User = await store.get_user_by_email(test_user.email) assert added_user - added_plate: Plate = store.get_plate_by_id(test_plate.id) + added_plate: Plate = await store.get_plate_by_id(test_plate.id) assert added_plate assert added_plate.signed_by == test_user.id -def test_ensure_snp(store: Store, helpers: StoreHelpers, test_snp: SNP): +async def test_ensure_snp(store: Store, helpers: StoreHelpers, test_snp: SNP): # GIVEN a snp # WHEN adding a snp to the store - helpers.ensure_snp(store=store, snp=test_snp) + await helpers.ensure_snp(store=store, snp=test_snp) # THEN a snp is added - snp: SNP = store.get_snps()[0] + snps = await store.get_snps() + snp: SNP = snps[0] assert snp assert snp.id == test_snp.id -def test_ensure_plate( +async def test_ensure_plate( store: Store, helpers: StoreHelpers, test_plate: Plate, @@ -43,36 +43,38 @@ def test_ensure_plate( # GIVEN plates, a user and analyses # WHEN adding a plate to the store - helpers.ensure_plate(store=store, plate=test_plate, analyses=[test_analysis], user=test_user) + await helpers.ensure_plate( + store=store, plate=test_plate, analyses=[test_analysis], user=test_user + ) # THEN a plate and associated user and analyses are added - added_user: User = store.get_user_by_email(test_user.email) + added_user: User = await store.get_user_by_email(test_user.email) assert added_user - added_analysis: Analysis = store.get_analysis_by_id(test_analysis.id) + added_analysis: Analysis = await store.get_analysis_by_id(test_analysis.id) assert added_analysis - added_plate: Plate = store.get_plate_by_id(test_plate.id) + added_plate: Plate = await store.get_plate_by_id(test_plate.id) assert added_plate -def test_ensure_genotype( +async def test_ensure_genotype( store: Store, helpers: StoreHelpers, test_genotype: Genotype, test_analysis: Analysis ): # GIVEN a genotype and an associated analysis # WHEN adding a genotype and analysis to the store - helpers.ensure_genotype(store=store, genotype=test_genotype, analysis=test_analysis) + await helpers.ensure_genotype(store=store, genotype=test_genotype, analysis=test_analysis) - # THEN a genotype and analysis has been added - added_genotype: Genotype = store.get_genotype_by_id(test_genotype.id) + # THEN a genotype and analysis have been added + added_genotype: Genotype = await store.get_genotype_by_id(test_genotype.id) assert added_genotype - added_analysis: Analysis = store.get_analysis_by_id(test_analysis.id) + added_analysis: Analysis = await store.get_analysis_by_id(test_analysis.id) assert added_analysis -def test_ensure_analysis( +async def test_ensure_analysis( store: Store, helpers: StoreHelpers, test_analysis: Analysis, @@ -83,7 +85,7 @@ def test_ensure_analysis( # GIVEN an analysis, sample, plate and genotypes # WHEN adding an analysis to the store - helpers.ensure_analysis( + await helpers.ensure_analysis( store=store, analysis=test_analysis, sample=test_sample, @@ -92,25 +94,25 @@ def test_ensure_analysis( ) # THEN an analysis and associated sample, plate and genotypes are added - added_analysis: Analysis = store.get_analysis_by_id(test_analysis.id) + added_analysis: Analysis = await store.get_analysis_by_id(test_analysis.id) assert added_analysis - added_sample: Sample = store.get_sample_by_id(test_sample.id) + added_sample: Sample = await store.get_sample_by_id(test_sample.id) assert added_sample - added_plate: Plate = store.get_plate_by_id(test_plate.id) + added_plate: Plate = await store.get_plate_by_id(test_plate.id) assert added_plate - added_genotype: Genotype = store.get_genotype_by_id(test_genotype.id) + added_genotype: Genotype = await store.get_genotype_by_id(test_genotype.id) assert added_genotype -def test_ensure_sample(store: Store, test_sample: Sample, helpers: StoreHelpers): +async def test_ensure_sample(store: Store, test_sample: Sample, helpers: StoreHelpers): # GIVEN a sample # WHEN adding a sample to the store - helpers.ensure_sample(store=store, sample=test_sample) + await helpers.ensure_sample(store=store, sample=test_sample) # THEN a sample is added - added_sample: Sample = store.get_sample_by_id(test_sample.id) + added_sample: Sample = await store.get_sample_by_id(test_sample.id) assert added_sample From 5df584cfe9c6c4dfa777fa35c1472ac6f08e2ff8 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 7 Oct 2024 22:23:23 +0200 Subject: [PATCH 26/65] Add async handling to user authentication --- genotype_api/security.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/genotype_api/security.py b/genotype_api/security.py index b30b479..58636d1 100644 --- a/genotype_api/security.py +++ b/genotype_api/security.py @@ -7,7 +7,7 @@ from genotype_api.config import security_settings from genotype_api.database.models import User -from genotype_api.database.store import get_store, Store +from genotype_api.database.store import Store, get_store from genotype_api.dto.user import CurrentUser @@ -69,7 +69,7 @@ async def get_active_user( ) -> CurrentUser: """Dependency for secure endpoints""" user_email = token_info["payload"]["email"] - db_user: User = store.get_user_by_email(email=user_email) + db_user: User = await store.get_user_by_email(email=user_email) if not db_user: raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not in DB") return CurrentUser( From f246d4978144d767408abd85549ff954bd410f84 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 7 Oct 2024 22:23:57 +0200 Subject: [PATCH 27/65] Consolidate SQLAlchemy model imports --- genotype_api/database/models.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/genotype_api/database/models.py b/genotype_api/database/models.py index e7dba9c..5e582f2 100644 --- a/genotype_api/database/models.py +++ b/genotype_api/database/models.py @@ -1,13 +1,8 @@ from collections import Counter from datetime import datetime -from sqlalchemy import Integer, DateTime -from sqlalchemy.orm import relationship -from sqlalchemy import ( - Column, - ForeignKey, - String, -) -from sqlalchemy.orm import DeclarativeBase + +from sqlalchemy import Column, DateTime, ForeignKey, Integer, String +from sqlalchemy.orm import DeclarativeBase, relationship from sqlalchemy_utils import EmailType From 27a792a817e91aa336c2601178b2b47715aa11ef Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 7 Oct 2024 22:35:38 +0200 Subject: [PATCH 28/65] remove duplicated functions --- genotype_api/database/crud/read.py | 36 ------------------------------ 1 file changed, 36 deletions(-) diff --git a/genotype_api/database/crud/read.py b/genotype_api/database/crud/read.py index 906247f..51cee52 100644 --- a/genotype_api/database/crud/read.py +++ b/genotype_api/database/crud/read.py @@ -268,39 +268,3 @@ async def get_snps_by_limit_and_skip(self, skip: int, limit: int) -> list[SNP]: ) result = await self.session.execute(filtered_query) return result.scalars().all() - - async def get_snps_by_limit_and_skip(self, skip: int, limit: int) -> list[SNP]: - snps: Query = self._get_query(SNP) - filter_functions = [SNPFilter.SKIP_AND_LIMIT] - filtered_query = apply_snp_filter( - snps=snps, filter_functions=filter_functions, skip=skip, limit=limit - ) - result = await self.session.execute(filtered_query) - return result.scalars().all() - - async def get_snps_by_limit_and_skip(self, skip: int, limit: int) -> list[SNP]: - snps: Query = self._get_query(SNP) - filter_functions = [SNPFilter.SKIP_AND_LIMIT] - filtered_query = apply_snp_filter( - snps=snps, filter_functions=filter_functions, skip=skip, limit=limit - ) - result = await self.session.execute(filtered_query) - return result.scalars().all() - - async def get_snps_by_limit_and_skip(self, skip: int, limit: int) -> list[SNP]: - snps: Query = self._get_query(SNP) - filter_functions = [SNPFilter.SKIP_AND_LIMIT] - filtered_query = apply_snp_filter( - snps=snps, filter_functions=filter_functions, skip=skip, limit=limit - ) - result = await self.session.execute(filtered_query) - return result.scalars().all() - - async def get_snps_by_limit_and_skip(self, skip: int, limit: int) -> list[SNP]: - snps: Query = self._get_query(SNP) - filter_functions = [SNPFilter.SKIP_AND_LIMIT] - filtered_query = apply_snp_filter( - snps=snps, filter_functions=filter_functions, skip=skip, limit=limit - ) - result = await self.session.execute(filtered_query) - return result.scalars().all() From 422737cfbed698aadd5c0b56168f8c9f44ca0255 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 7 Oct 2024 22:39:13 +0200 Subject: [PATCH 29/65] Delete this unreachable code --- genotype_api/database/crud/update.py | 1 - 1 file changed, 1 deletion(-) diff --git a/genotype_api/database/crud/update.py b/genotype_api/database/crud/update.py index b64be46..57f065b 100644 --- a/genotype_api/database/crud/update.py +++ b/genotype_api/database/crud/update.py @@ -96,4 +96,3 @@ async def update_user_email(self, user: User, email: EmailStr) -> User: await self.session.commit() await self.session.refresh(user) return user - return user From f34c650770647fa78271c5ada4df9773062f5c08 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 7 Oct 2024 22:49:05 +0200 Subject: [PATCH 30/65] Fix poetry conflict --- poetry.lock | 868 +++++++++++++++++++++++++------------------------ pyproject.toml | 3 - 2 files changed, 452 insertions(+), 419 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8b45ff1..4435757 100644 --- a/poetry.lock +++ b/poetry.lock @@ -31,24 +31,24 @@ sa = ["sqlalchemy (>=1.3,<1.4)"] [[package]] name = "annotated-types" -version = "0.6.0" +version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" files = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] [[package]] name = "anyio" -version = "4.3.0" +version = "4.6.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, + {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"}, + {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"}, ] [package.dependencies] @@ -56,55 +56,55 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "astroid" -version = "3.1.0" +version = "3.3.5" description = "An abstract syntax tree for Python with inference support." optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.9.0" files = [ - {file = "astroid-3.1.0-py3-none-any.whl", hash = "sha256:951798f922990137ac090c53af473db7ab4e70c770e6d7fae0cec59f74411819"}, - {file = "astroid-3.1.0.tar.gz", hash = "sha256:ac248253bfa4bd924a0de213707e7ebeeb3138abeb48d798784ead1e56d419d4"}, + {file = "astroid-3.3.5-py3-none-any.whl", hash = "sha256:a9d1c946ada25098d790e079ba2a1b112157278f3fb7e718ae6a9252f5835dc8"}, + {file = "astroid-3.3.5.tar.gz", hash = "sha256:5cfc40ae9f68311075d27ef68a4841bdc5cc7f6cf86671b49f00607d30188e2d"}, ] [[package]] name = "bcrypt" -version = "4.1.2" +version = "4.2.0" description = "Modern password hashing for your software and your servers" optional = false python-versions = ">=3.7" files = [ - {file = "bcrypt-4.1.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ac621c093edb28200728a9cca214d7e838529e557027ef0581685909acd28b5e"}, - {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea505c97a5c465ab8c3ba75c0805a102ce526695cd6818c6de3b1a38f6f60da1"}, - {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57fa9442758da926ed33a91644649d3e340a71e2d0a5a8de064fb621fd5a3326"}, - {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eb3bd3321517916696233b5e0c67fd7d6281f0ef48e66812db35fc963a422a1c"}, - {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6cad43d8c63f34b26aef462b6f5e44fdcf9860b723d2453b5d391258c4c8e966"}, - {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:44290ccc827d3a24604f2c8bcd00d0da349e336e6503656cb8192133e27335e2"}, - {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:732b3920a08eacf12f93e6b04ea276c489f1c8fb49344f564cca2adb663b3e4c"}, - {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1c28973decf4e0e69cee78c68e30a523be441972c826703bb93099868a8ff5b5"}, - {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b8df79979c5bae07f1db22dcc49cc5bccf08a0380ca5c6f391cbb5790355c0b0"}, - {file = "bcrypt-4.1.2-cp37-abi3-win32.whl", hash = "sha256:fbe188b878313d01b7718390f31528be4010fed1faa798c5a1d0469c9c48c369"}, - {file = "bcrypt-4.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:9800ae5bd5077b13725e2e3934aa3c9c37e49d3ea3d06318010aa40f54c63551"}, - {file = "bcrypt-4.1.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:71b8be82bc46cedd61a9f4ccb6c1a493211d031415a34adde3669ee1b0afbb63"}, - {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e3c6642077b0c8092580c819c1684161262b2e30c4f45deb000c38947bf483"}, - {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387e7e1af9a4dd636b9505a465032f2f5cb8e61ba1120e79a0e1cd0b512f3dfc"}, - {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f70d9c61f9c4ca7d57f3bfe88a5ccf62546ffbadf3681bb1e268d9d2e41c91a7"}, - {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2a298db2a8ab20056120b45e86c00a0a5eb50ec4075b6142db35f593b97cb3fb"}, - {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ba55e40de38a24e2d78d34c2d36d6e864f93e0d79d0b6ce915e4335aa81d01b1"}, - {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3566a88234e8de2ccae31968127b0ecccbb4cddb629da744165db72b58d88ca4"}, - {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b90e216dc36864ae7132cb151ffe95155a37a14e0de3a8f64b49655dd959ff9c"}, - {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:69057b9fc5093ea1ab00dd24ede891f3e5e65bee040395fb1e66ee196f9c9b4a"}, - {file = "bcrypt-4.1.2-cp39-abi3-win32.whl", hash = "sha256:02d9ef8915f72dd6daaef40e0baeef8a017ce624369f09754baf32bb32dba25f"}, - {file = "bcrypt-4.1.2-cp39-abi3-win_amd64.whl", hash = "sha256:be3ab1071662f6065899fe08428e45c16aa36e28bc42921c4901a191fda6ee42"}, - {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d75fc8cd0ba23f97bae88a6ec04e9e5351ff3c6ad06f38fe32ba50cbd0d11946"}, - {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a97e07e83e3262599434816f631cc4c7ca2aa8e9c072c1b1a7fec2ae809a1d2d"}, - {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e51c42750b7585cee7892c2614be0d14107fad9581d1738d954a262556dd1aab"}, - {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba4e4cc26610581a6329b3937e02d319f5ad4b85b074846bf4fef8a8cf51e7bb"}, - {file = "bcrypt-4.1.2.tar.gz", hash = "sha256:33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258"}, + {file = "bcrypt-4.2.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:096a15d26ed6ce37a14c1ac1e48119660f21b24cba457f160a4b830f3fe6b5cb"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c02d944ca89d9b1922ceb8a46460dd17df1ba37ab66feac4870f6862a1533c00"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d84cf6d877918620b687b8fd1bf7781d11e8a0998f576c7aa939776b512b98d"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1bb429fedbe0249465cdd85a58e8376f31bb315e484f16e68ca4c786dcc04291"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:655ea221910bcac76ea08aaa76df427ef8625f92e55a8ee44fbf7753dbabb328"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:1ee38e858bf5d0287c39b7a1fc59eec64bbf880c7d504d3a06a96c16e14058e7"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0da52759f7f30e83f1e30a888d9163a81353ef224d82dc58eb5bb52efcabc399"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3698393a1b1f1fd5714524193849d0c6d524d33523acca37cd28f02899285060"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:762a2c5fb35f89606a9fde5e51392dad0cd1ab7ae64149a8b935fe8d79dd5ed7"}, + {file = "bcrypt-4.2.0-cp37-abi3-win32.whl", hash = "sha256:5a1e8aa9b28ae28020a3ac4b053117fb51c57a010b9f969603ed885f23841458"}, + {file = "bcrypt-4.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:8f6ede91359e5df88d1f5c1ef47428a4420136f3ce97763e31b86dd8280fbdf5"}, + {file = "bcrypt-4.2.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:c52aac18ea1f4a4f65963ea4f9530c306b56ccd0c6f8c8da0c06976e34a6e841"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3bbbfb2734f0e4f37c5136130405332640a1e46e6b23e000eeff2ba8d005da68"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8d7bb9c42801035e61c109c345a28ed7e84426ae4865511eb82e913df18f58c2"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3d3a6d28cb2305b43feac298774b997e372e56c7c7afd90a12b3dc49b189151c"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9c1c4ad86351339c5f320ca372dfba6cb6beb25e8efc659bedd918d921956bae"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:27fe0f57bb5573104b5a6de5e4153c60814c711b29364c10a75a54bb6d7ff48d"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8ac68872c82f1add6a20bd489870c71b00ebacd2e9134a8aa3f98a0052ab4b0e"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cb2a8ec2bc07d3553ccebf0746bbf3d19426d1c6d1adbd4fa48925f66af7b9e8"}, + {file = "bcrypt-4.2.0-cp39-abi3-win32.whl", hash = "sha256:77800b7147c9dc905db1cba26abe31e504d8247ac73580b4aa179f98e6608f34"}, + {file = "bcrypt-4.2.0-cp39-abi3-win_amd64.whl", hash = "sha256:61ed14326ee023917ecd093ee6ef422a72f3aec6f07e21ea5f10622b735538a9"}, + {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:39e1d30c7233cfc54f5c3f2c825156fe044efdd3e0b9d309512cc514a263ec2a"}, + {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f4f4acf526fcd1c34e7ce851147deedd4e26e6402369304220250598b26448db"}, + {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:1ff39b78a52cf03fdf902635e4c81e544714861ba3f0efc56558979dd4f09170"}, + {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:373db9abe198e8e2c70d12b479464e0d5092cc122b20ec504097b5f2297ed184"}, + {file = "bcrypt-4.2.0.tar.gz", hash = "sha256:cf69eaf5185fd58f268f805b505ce31f9b9fc2d64b376642164e9244540c1221"}, ] [package.extras] @@ -124,85 +124,100 @@ files = [ [[package]] name = "cachetools" -version = "5.3.3" +version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -497,13 +512,13 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "dill" -version = "0.3.8" +version = "0.3.9" description = "serialize all of Python" optional = false python-versions = ">=3.8" files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, ] [package.extras] @@ -523,21 +538,21 @@ files = [ [[package]] name = "dnspython" -version = "2.6.1" +version = "2.7.0" description = "DNS toolkit" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, - {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, ] [package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] -dnssec = ["cryptography (>=41)"] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] -doq = ["aioquic (>=0.9.25)"] -idna = ["idna (>=3.6)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] trio = ["trio (>=0.23)"] wmi = ["wmi (>=1.5.1)"] @@ -553,13 +568,13 @@ files = [ [[package]] name = "ecdsa" -version = "0.18.0" +version = "0.19.0" description = "ECDSA cryptographic signature library (pure python)" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.6" files = [ - {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"}, - {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"}, + {file = "ecdsa-0.19.0-py2.py3-none-any.whl", hash = "sha256:2cea9b88407fdac7bbeca0833b189e4c9c53f2ef1e1eaa29f6224dbc809b707a"}, + {file = "ecdsa-0.19.0.tar.gz", hash = "sha256:60eaad1199659900dd0af521ed462b793bbdf867432b3948e87416ae4caf6bf8"}, ] [package.dependencies] @@ -571,13 +586,13 @@ gmpy2 = ["gmpy2"] [[package]] name = "email-validator" -version = "2.1.1" +version = "2.2.0" description = "A robust email address syntax and deliverability validation library." optional = false python-versions = ">=3.8" files = [ - {file = "email_validator-2.1.1-py3-none-any.whl", hash = "sha256:97d882d174e2a65732fb43bfce81a3a834cbc1bde8bf419e30ef5ea976370a05"}, - {file = "email_validator-2.1.1.tar.gz", hash = "sha256:200a70680ba08904be6d1eef729205cc0d687634399a5924d842533efb824b84"}, + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, ] [package.dependencies] @@ -597,48 +612,49 @@ files = [ [[package]] name = "fastapi" -version = "0.110.1" +version = "0.115.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.110.1-py3-none-any.whl", hash = "sha256:5df913203c482f820d31f48e635e022f8cbfe7350e4830ef05a3163925b1addc"}, - {file = "fastapi-0.110.1.tar.gz", hash = "sha256:6feac43ec359dfe4f45b2c18ec8c94edb8dc2dfc461d417d9e626590c071baad"}, + {file = "fastapi-0.115.0-py3-none-any.whl", hash = "sha256:17ea427674467486e997206a5ab25760f6b09e069f099b96f5b55a32fb6f1631"}, + {file = "fastapi-0.115.0.tar.gz", hash = "sha256:f93b4ca3529a8ebc6fc3fcf710e5efa8de3df9b41570958abf1d97d843138004"}, ] [package.dependencies] pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.37.2,<0.38.0" +starlette = ">=0.37.2,<0.39.0" typing-extensions = ">=4.8.0" [package.extras] -all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=2.11.2)", "python-multipart (>=0.0.7)", "uvicorn[standard] (>=0.12.0)"] [[package]] name = "filelock" -version = "3.13.3" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.3-py3-none-any.whl", hash = "sha256:5ffa845303983e7a0b7ae17636509bc97997d58afeafa72fb141a17b152284cb"}, - {file = "filelock-3.13.3.tar.gz", hash = "sha256:a79895a25bbefdf55d1a2a0a80968f7dbb28edcd6d4234a0afb3f37ecde4b546"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] -typing = ["typing-extensions (>=4.8)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "google-auth" -version = "2.29.0" +version = "2.35.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, - {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, + {file = "google_auth-2.35.0-py2.py3-none-any.whl", hash = "sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f"}, + {file = "google_auth-2.35.0.tar.gz", hash = "sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a"}, ] [package.dependencies] @@ -648,76 +664,91 @@ rsa = ">=3.1.4,<5" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +enterprise-cert = ["cryptography", "pyopenssl"] pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "greenlet" -version = "3.0.3" +version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [package.extras] @@ -726,13 +757,13 @@ test = ["objgraph", "psutil"] [[package]] name = "gunicorn" -version = "22.0.0" +version = "23.0.0" description = "WSGI HTTP Server for UNIX" optional = false python-versions = ">=3.7" files = [ - {file = "gunicorn-22.0.0-py3-none-any.whl", hash = "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9"}, - {file = "gunicorn-22.0.0.tar.gz", hash = "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63"}, + {file = "gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d"}, + {file = "gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec"}, ] [package.dependencies] @@ -820,13 +851,13 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve [[package]] name = "identify" -version = "2.5.35" +version = "2.6.1" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, - {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, + {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, + {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, ] [package.extras] @@ -834,15 +865,18 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.7" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -881,18 +915,15 @@ files = [ [[package]] name = "nodeenv" -version = "1.8.0" +version = "1.9.1" description = "Node.js virtual environment builder" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] -[package.dependencies] -setuptools = "*" - [[package]] name = "numpy" version = "1.26.4" @@ -940,13 +971,13 @@ files = [ [[package]] name = "openpyxl" -version = "3.1.2" +version = "3.1.5" description = "A Python library to read/write Excel 2010 xlsx/xlsm files" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, - {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, + {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"}, + {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"}, ] [package.dependencies] @@ -954,13 +985,13 @@ et-xmlfile = "*" [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -982,18 +1013,19 @@ totp = ["cryptography"] [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" @@ -1012,13 +1044,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.7.0" +version = "3.8.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" files = [ - {file = "pre_commit-3.7.0-py2.py3-none-any.whl", hash = "sha256:5eae9e10c2b5ac51577c3452ec0a490455c45a0533f7960f993a0d01e59decab"}, - {file = "pre_commit-3.7.0.tar.gz", hash = "sha256:e209d61b8acdcf742404408531f0c37d49d2c734fd7cff2d6076083d191cb060"}, + {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, + {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, ] [package.dependencies] @@ -1030,24 +1062,24 @@ virtualenv = ">=20.10.0" [[package]] name = "pyasn1" -version = "0.6.0" +version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, - {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] [[package]] name = "pyasn1-modules" -version = "0.4.0" +version = "0.4.1" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, - {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, + {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, + {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, ] [package.dependencies] @@ -1066,109 +1098,123 @@ files = [ [[package]] name = "pydantic" -version = "2.6.4" +version = "2.9.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, - {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, ] [package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.16.3" -typing-extensions = ">=4.6.1" +annotated-types = ">=0.6.0" +pydantic-core = "2.23.4" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.16.3" -description = "" +version = "2.23.4" +description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, - {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, - {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, - {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, - {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, - {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, - {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, - {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, - {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, - {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, - {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, - {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, - {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, - {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, ] [package.dependencies] @@ -1176,36 +1222,37 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydantic-settings" -version = "2.2.1" +version = "2.5.2" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_settings-2.2.1-py3-none-any.whl", hash = "sha256:0235391d26db4d2190cb9b31051c4b46882d28a51533f97440867f012d4da091"}, - {file = "pydantic_settings-2.2.1.tar.gz", hash = "sha256:00b9f6a5e95553590434c0fa01ead0b216c3e10bc54ae02e37f359948643c5ed"}, + {file = "pydantic_settings-2.5.2-py3-none-any.whl", hash = "sha256:2c912e55fd5794a59bf8c832b9de832dcfdf4778d79ff79b708744eed499a907"}, + {file = "pydantic_settings-2.5.2.tar.gz", hash = "sha256:f90b139682bee4d2065273d5185d71d37ea46cfe57e1b5ae184fc6a0b2484ca0"}, ] [package.dependencies] -pydantic = ">=2.3.0" +pydantic = ">=2.7.0" python-dotenv = ">=0.21.0" [package.extras] +azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] toml = ["tomli (>=2.0.1)"] yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "pylint" -version = "3.1.0" +version = "3.3.1" description = "python code static checker" optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.9.0" files = [ - {file = "pylint-3.1.0-py3-none-any.whl", hash = "sha256:507a5b60953874766d8a366e8e8c7af63e058b26345cfcb5f91f89d987fd6b74"}, - {file = "pylint-3.1.0.tar.gz", hash = "sha256:6a69beb4a6f63debebaab0a3477ecd0f559aa726af4954fc948c51f7a2549e23"}, + {file = "pylint-3.3.1-py3-none-any.whl", hash = "sha256:2f846a466dd023513240bc140ad2dd73bfc080a5d85a710afdb728c420a5a2b9"}, + {file = "pylint-3.3.1.tar.gz", hash = "sha256:9f3dcc87b1203e612b78d91a896407787e708b3f189b5fa0b307712d49ff0c6e"}, ] [package.dependencies] -astroid = ">=3.1.0,<=3.2.0-dev0" +astroid = ">=3.3.4,<=3.4.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, @@ -1237,15 +1284,18 @@ rsa = ["cryptography"] [[package]] name = "pyreadline3" -version = "3.4.1" +version = "3.5.4" description = "A python implementation of GNU readline." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"}, - {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, + {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, + {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, ] +[package.extras] +dev = ["build", "flake8", "mypy", "pytest", "twine"] + [[package]] name = "pytest" version = "8.3.3" @@ -1354,62 +1404,64 @@ dev = ["atomicwrites (==1.4.1)", "attrs (==23.2.0)", "coverage (==7.4.1)", "hatc [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] @@ -1447,22 +1499,6 @@ files = [ [package.dependencies] pyasn1 = ">=0.1.3" -[[package]] -name = "setuptools" -version = "69.2.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, - {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - [[package]] name = "six" version = "1.16.0" @@ -1619,35 +1655,35 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7 [[package]] name = "tomlkit" -version = "0.12.4" +version = "0.13.2" description = "Style preserving TOML library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomlkit-0.12.4-py3-none-any.whl", hash = "sha256:5cd82d48a3dd89dee1f9d64420aa20ae65cfbd00668d6f094d7578a78efbb77b"}, - {file = "tomlkit-0.12.4.tar.gz", hash = "sha256:7ca1cfc12232806517a8515047ba66a19369e71edf2439d0f5824f91032b6cc3"}, + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] [[package]] name = "typing-extensions" -version = "4.11.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -1720,13 +1756,13 @@ test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)" [[package]] name = "virtualenv" -version = "20.25.1" +version = "20.26.6" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, - {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, + {file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"}, + {file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"}, ] [package.dependencies] @@ -1735,7 +1771,7 @@ filelock = ">=3.12.2,<4" platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [metadata] diff --git a/pyproject.toml b/pyproject.toml index d6a7f4c..3a9d2cf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,12 +33,9 @@ sqlalchemy-utils = "^0.41.2" starlette = "^0.37.2" uvicorn = "^0.29.0" uvloop = "^0.19.0" -cryptography = "^42.0.8" aiomysql = "^0.2.0" pytest-asyncio = "^0.24.0" - - [tool.poetry.group.dev.dependencies] bump2version = "^1.0.1" coveralls = "^3.3.1" From 3bb85efa755b9b6bfd996ca6002776ebcc3e9d42 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 7 Oct 2024 22:52:36 +0200 Subject: [PATCH 31/65] Reformat --- genotype_api/api/endpoints/analyses.py | 6 ++---- genotype_api/api/endpoints/users.py | 3 +-- genotype_api/dto/plate.py | 4 ++-- genotype_api/services/endpoint_services/analysis_service.py | 4 +++- genotype_api/services/endpoint_services/user_service.py | 3 +-- 5 files changed, 9 insertions(+), 11 deletions(-) diff --git a/genotype_api/api/endpoints/analyses.py b/genotype_api/api/endpoints/analyses.py index 7b550b5..7146ae7 100644 --- a/genotype_api/api/endpoints/analyses.py +++ b/genotype_api/api/endpoints/analyses.py @@ -2,8 +2,7 @@ from http import HTTPStatus -from fastapi import (APIRouter, Depends, File, HTTPException, Query, - UploadFile, status) +from fastapi import APIRouter, Depends, File, HTTPException, Query, UploadFile, status from fastapi.responses import JSONResponse from genotype_api.database.store import Store, get_store @@ -11,8 +10,7 @@ from genotype_api.dto.user import CurrentUser from genotype_api.exceptions import AnalysisNotFoundError from genotype_api.security import get_active_user -from genotype_api.services.endpoint_services.analysis_service import \ - AnalysisService +from genotype_api.services.endpoint_services.analysis_service import AnalysisService router = APIRouter() diff --git a/genotype_api/api/endpoints/users.py b/genotype_api/api/endpoints/users.py index 9068c03..8fd34a6 100644 --- a/genotype_api/api/endpoints/users.py +++ b/genotype_api/api/endpoints/users.py @@ -7,8 +7,7 @@ from genotype_api.database.store import Store, get_store from genotype_api.dto.user import CurrentUser, UserRequest, UserResponse -from genotype_api.exceptions import (UserArchiveError, UserExistsError, - UserNotFoundError) +from genotype_api.exceptions import UserArchiveError, UserExistsError, UserNotFoundError from genotype_api.security import get_active_user from genotype_api.services.endpoint_services.user_service import UserService diff --git a/genotype_api/dto/plate.py b/genotype_api/dto/plate.py index b2a0d1e..e2ca8c8 100644 --- a/genotype_api/dto/plate.py +++ b/genotype_api/dto/plate.py @@ -3,9 +3,9 @@ from collections import Counter from datetime import datetime -from pydantic import BaseModel, validator, Field, EmailStr +from pydantic import BaseModel, EmailStr, Field, validator -from genotype_api.constants import Types, Sexes, Status +from genotype_api.constants import Sexes, Status, Types from genotype_api.database.models import Analysis diff --git a/genotype_api/services/endpoint_services/analysis_service.py b/genotype_api/services/endpoint_services/analysis_service.py index 1b32e5c..2e92647 100644 --- a/genotype_api/services/endpoint_services/analysis_service.py +++ b/genotype_api/services/endpoint_services/analysis_service.py @@ -49,7 +49,9 @@ async def get_upload_sequence_analyses(self, file: UploadFile) -> list[AnalysisR """ file_name: Path = check_file(file_path=file.filename, extension=FileExtension.VCF) content = await file.file.read() - sequence_analysis = SequenceAnalysis(vcf_file=content.decode("utf-8"), source=str(file_name)) + sequence_analysis = SequenceAnalysis( + vcf_file=content.decode("utf-8"), source=str(file_name) + ) analyses: list[Analysis] = list(sequence_analysis.generate_analyses()) await self.store.check_analyses_objects(analyses=analyses, analysis_type=Types.SEQUENCE) await self.store.create_analyses_samples(analyses=analyses) diff --git a/genotype_api/services/endpoint_services/user_service.py b/genotype_api/services/endpoint_services/user_service.py index 94d6133..36d62e6 100644 --- a/genotype_api/services/endpoint_services/user_service.py +++ b/genotype_api/services/endpoint_services/user_service.py @@ -5,8 +5,7 @@ from genotype_api.database.models import User from genotype_api.database.store import Store from genotype_api.dto.user import PlateOnUser, UserRequest, UserResponse -from genotype_api.exceptions import (UserArchiveError, UserExistsError, - UserNotFoundError) +from genotype_api.exceptions import UserArchiveError, UserExistsError, UserNotFoundError from genotype_api.services.endpoint_services.base_service import BaseService From 34476ff1dcaf18758287b40d2cb5b6f8a720d84b Mon Sep 17 00:00:00 2001 From: ahdamin Date: Mon, 7 Oct 2024 22:53:39 +0200 Subject: [PATCH 32/65] Reformat --- genotype_api/api/endpoints/plates.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/genotype_api/api/endpoints/plates.py b/genotype_api/api/endpoints/plates.py index a5d98ac..f218541 100644 --- a/genotype_api/api/endpoints/plates.py +++ b/genotype_api/api/endpoints/plates.py @@ -3,8 +3,7 @@ from http import HTTPStatus from typing import Literal -from fastapi import (APIRouter, Depends, File, HTTPException, Query, - UploadFile, status) +from fastapi import APIRouter, Depends, File, HTTPException, Query, UploadFile, status from fastapi.responses import JSONResponse from genotype_api.database.filter_models.plate_models import PlateOrderParams From 8066632040123eb36abbfd8194fc8b4ca81e66e5 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Tue, 8 Oct 2024 00:17:13 +0200 Subject: [PATCH 33/65] Add eager loading for user plates in get_user_by_id --- genotype_api/database/crud/read.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/genotype_api/database/crud/read.py b/genotype_api/database/crud/read.py index 51cee52..44d2257 100644 --- a/genotype_api/database/crud/read.py +++ b/genotype_api/database/crud/read.py @@ -218,7 +218,7 @@ async def get_sample_by_id(self, sample_id: str) -> Sample: return result.scalars().first() async def get_user_by_id(self, user_id: int) -> User: - users: Query = self._get_query(User) + users: Query = self._get_query(User).options(selectinload(User.plates)) filter_functions = [UserFilter.BY_ID] filtered_query = apply_user_filter( users=users, filter_functions=filter_functions, user_id=user_id From 3d09f8ec46825c8e7835ef809b1a9f9e9d87edd4 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Tue, 8 Oct 2024 11:07:12 +0200 Subject: [PATCH 34/65] Add exception handler for OperationalError --- genotype_api/api/app.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/genotype_api/api/app.py b/genotype_api/api/app.py index 96263bc..dfd877a 100644 --- a/genotype_api/api/app.py +++ b/genotype_api/api/app.py @@ -9,7 +9,7 @@ from fastapi import FastAPI, Request, status from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import JSONResponse -from sqlalchemy.exc import NoResultFound +from sqlalchemy.exc import NoResultFound, OperationalError from genotype_api.api.endpoints import analyses, plates, samples, snps, users from genotype_api.config import security_settings @@ -36,6 +36,15 @@ async def lifespan(app: FastAPI): ) +@app.exception_handler(OperationalError) +async def db_connection_exception_handler(request: Request, exc: OperationalError): + LOG.error(f"Database connection error: {exc}") + return JSONResponse( + content={"detail": "Database connection error. Please try again later."}, + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, # 503 indicates a service is unavailable + ) + + @app.exception_handler(NoResultFound) async def not_found_exception_handler(request: Request, exc: NoResultFound): return JSONResponse("Document not found", status_code=status.HTTP_404_NOT_FOUND) From 4149b3b15feec2596c344758600cd1760b9e40e7 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Tue, 8 Oct 2024 11:15:55 +0200 Subject: [PATCH 35/65] Remove comment --- genotype_api/api/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/genotype_api/api/app.py b/genotype_api/api/app.py index dfd877a..2cbe7c9 100644 --- a/genotype_api/api/app.py +++ b/genotype_api/api/app.py @@ -41,7 +41,7 @@ async def db_connection_exception_handler(request: Request, exc: OperationalErro LOG.error(f"Database connection error: {exc}") return JSONResponse( content={"detail": "Database connection error. Please try again later."}, - status_code=status.HTTP_503_SERVICE_UNAVAILABLE, # 503 indicates a service is unavailable + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, ) From 9348379e7ac6acbe1644a9d36c7d8ff983e3bfad Mon Sep 17 00:00:00 2001 From: ahdamin Date: Tue, 8 Oct 2024 11:24:54 +0200 Subject: [PATCH 36/65] Add retry logic with tenacity --- genotype_api/database/base_handler.py | 9 +++++++++ genotype_api/database/store.py | 9 +++++++++ poetry.lock | 17 ++++++++++++++++- pyproject.toml | 1 + 4 files changed, 35 insertions(+), 1 deletion(-) diff --git a/genotype_api/database/base_handler.py b/genotype_api/database/base_handler.py index 3e3d295..7c0e48f 100644 --- a/genotype_api/database/base_handler.py +++ b/genotype_api/database/base_handler.py @@ -1,10 +1,13 @@ from dataclasses import dataclass from typing import Type +from sqlalchemy.exc import OperationalError from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select from sqlalchemy.orm import DeclarativeBase, Query +from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed +from genotype_api.config import settings from genotype_api.database.models import Analysis, Sample @@ -15,6 +18,12 @@ class BaseHandler: def __init__(self, session: AsyncSession): self.session = session + @retry( + stop=stop_after_attempt(settings.max_retries), + wait=wait_fixed(settings.retry_delay), + retry=retry_if_exception_type(OperationalError), + reraise=True, + ) def _get_query(self, table: Type[DeclarativeBase]) -> Query: """Return a query for the given table.""" return select(table) diff --git a/genotype_api/database/store.py b/genotype_api/database/store.py index 379e5f6..23eaf70 100644 --- a/genotype_api/database/store.py +++ b/genotype_api/database/store.py @@ -1,7 +1,10 @@ """Module for the store handler.""" +from sqlalchemy.exc import OperationalError from sqlalchemy.ext.asyncio import AsyncSession +from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed +from genotype_api.config import settings from genotype_api.database.crud.create import CreateHandler from genotype_api.database.crud.delete import DeleteHandler from genotype_api.database.crud.read import ReadHandler @@ -24,6 +27,12 @@ def __init__(self, session: AsyncSession): UpdateHandler.__init__(self, session) @classmethod + @retry( + stop=stop_after_attempt(settings.max_retries), + wait=wait_fixed(settings.retry_delay), + retry=retry_if_exception_type(OperationalError), + reraise=True, + ) async def create(cls) -> "Store": """Asynchronously create and return a Store instance with a session.""" async with get_session() as session: # Correctly use async context manager diff --git a/poetry.lock b/poetry.lock index 4435757..238f05f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1653,6 +1653,21 @@ anyio = ">=3.4.0,<5" [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] +[[package]] +name = "tenacity" +version = "9.0.0" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, + {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, +] + +[package.extras] +doc = ["reno", "sphinx"] +test = ["pytest", "tornado (>=4.5)", "typeguard"] + [[package]] name = "tomlkit" version = "0.13.2" @@ -1777,4 +1792,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "4137cf8d00a4d14d8c728d07f899a30edda86c4c33c7b6252b6e94085bb04b6f" +content-hash = "78c8f76d8fe31061c6c1261dcb06819e4534eaae54d6d524ef339d5e188c500a" diff --git a/pyproject.toml b/pyproject.toml index 3a9d2cf..4963809 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,7 @@ uvicorn = "^0.29.0" uvloop = "^0.19.0" aiomysql = "^0.2.0" pytest-asyncio = "^0.24.0" +tenacity = "^9.0.0" [tool.poetry.group.dev.dependencies] bump2version = "^1.0.1" From 42fcf3e9f2c639591cac1e477a68ca3e5d057f03 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Tue, 8 Oct 2024 11:30:00 +0200 Subject: [PATCH 37/65] Remove commented-out code --- .../endpoint_services/sample_service.py | 25 +++++++++---------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/genotype_api/services/endpoint_services/sample_service.py b/genotype_api/services/endpoint_services/sample_service.py index 6acb0d4..025c350 100644 --- a/genotype_api/services/endpoint_services/sample_service.py +++ b/genotype_api/services/endpoint_services/sample_service.py @@ -4,14 +4,23 @@ from typing import Literal from genotype_api.constants import Sexes, Types -from genotype_api.database.filter_models.sample_models import SampleFilterParams, SampleSexesUpdate +from genotype_api.database.filter_models.sample_models import ( + SampleFilterParams, + SampleSexesUpdate, +) from genotype_api.database.models import Analysis, Sample from genotype_api.dto.genotype import GenotypeResponse from genotype_api.dto.sample import AnalysisOnSample, SampleCreate, SampleResponse -from genotype_api.exceptions import GenotypeDBError, InsufficientAnalysesError, SampleNotFoundError +from genotype_api.exceptions import ( + GenotypeDBError, + InsufficientAnalysesError, + SampleNotFoundError, +) from genotype_api.models import MatchResult, SampleDetail from genotype_api.services.endpoint_services.base_service import BaseService -from genotype_api.services.match_genotype_service.match_genotype import MatchGenotypeService +from genotype_api.services.match_genotype_service.match_genotype import ( + MatchGenotypeService, +) class SampleService(BaseService): @@ -115,21 +124,11 @@ async def get_match_results( analysis_type=comparison_set, date_min=date_min, date_max=date_max ) - # if not analyses: - # raise InsufficientAnalysesError - # Fetch the sample analysis with eager loading sample_analysis = await self.store.get_analysis_by_type_and_sample_id( sample_id=sample_id, analysis_type=analysis_type ) - # if sample_analysis is None: - # raise SampleNotFoundError - - # # Ensure that genotypes are eagerly loaded to avoid lazy loading - # if not sample_analysis.genotypes: - # raise GenotypeDBError - # Perform matching using the MatchGenotypeService matches: list[MatchResult] = MatchGenotypeService.get_matches( analyses=analyses, sample_analysis=sample_analysis From 993ff00bc83fe171452bab76725cdebf407f40fe Mon Sep 17 00:00:00 2001 From: ahdamin Date: Tue, 8 Oct 2024 13:54:12 +0200 Subject: [PATCH 38/65] Update to pydantic v2 field validators --- genotype_api/dto/plate.py | 7 +++---- genotype_api/models.py | 25 ++++++++++++------------- 2 files changed, 15 insertions(+), 17 deletions(-) diff --git a/genotype_api/dto/plate.py b/genotype_api/dto/plate.py index e2ca8c8..859fcaf 100644 --- a/genotype_api/dto/plate.py +++ b/genotype_api/dto/plate.py @@ -3,7 +3,7 @@ from collections import Counter from datetime import datetime -from pydantic import BaseModel, EmailStr, Field, validator +from pydantic import BaseModel, EmailStr, Field, field_validator from genotype_api.constants import Sexes, Status, Types from genotype_api.database.models import Analysis @@ -55,7 +55,7 @@ class PlateResponse(BaseModel): analyses: list[AnalysisOnPlate] | None = None plate_status_counts: PlateStatusCounts | None = None - @validator("plate_status_counts") + @field_validator("plate_status_counts", mode="before") def check_detail(cls, value, values): analyses = values.get("analyses") if not analyses: @@ -65,5 +65,4 @@ def check_detail(cls, value, values): status_counts = Counter(statuses) return PlateStatusCounts(**status_counts, total=len(analyses), commented=commented) - class Config: - validate_default = True + model_config = {"validate_assignment": True} diff --git a/genotype_api/models.py b/genotype_api/models.py index b1a1f1d..6f8f5de 100644 --- a/genotype_api/models.py +++ b/genotype_api/models.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel, validator +from pydantic import BaseModel, field_validator class SampleDetailStats(BaseModel): @@ -25,22 +25,21 @@ class SampleDetail(BaseModel): stats: SampleDetailStats | None = None status: SampleDetailStatus | None = None - @validator("stats") - def validate_stats(cls, value, values) -> SampleDetailStats: - matches = values.get("matches") - mismatches = values.get("mismatches") - unknown = values.get("unknown") + @field_validator("stats", mode="before") + def validate_stats(cls, value, data) -> SampleDetailStats: + matches = data.get("matches") + mismatches = data.get("mismatches") + unknown = data.get("unknown") return SampleDetailStats(matches=matches, mismatches=mismatches, unknown=unknown) - @validator("status") - def validate_status(cls, value, values) -> SampleDetailStatus: - sex = values.get("sex") - snps = values.get("snps") - nocalls = values.get("nocalls") + @field_validator("status", mode="before") + def validate_status(cls, value, data) -> SampleDetailStatus: + sex = data.get("sex") + snps = data.get("snps") + nocalls = data.get("nocalls") return SampleDetailStatus(sex=sex, snps=snps, nocalls=nocalls) - class Config: - validate_default = True + model_config = {"validate_assignment": True} class MatchCounts(BaseModel): From 6866fa317a0e554ad750dc83c823a4fa06099885 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Tue, 8 Oct 2024 15:42:09 +0200 Subject: [PATCH 39/65] revert pydantic updates --- genotype_api/dto/plate.py | 7 ++++--- genotype_api/models.py | 25 +++++++++++++------------ 2 files changed, 17 insertions(+), 15 deletions(-) diff --git a/genotype_api/dto/plate.py b/genotype_api/dto/plate.py index 859fcaf..e2ca8c8 100644 --- a/genotype_api/dto/plate.py +++ b/genotype_api/dto/plate.py @@ -3,7 +3,7 @@ from collections import Counter from datetime import datetime -from pydantic import BaseModel, EmailStr, Field, field_validator +from pydantic import BaseModel, EmailStr, Field, validator from genotype_api.constants import Sexes, Status, Types from genotype_api.database.models import Analysis @@ -55,7 +55,7 @@ class PlateResponse(BaseModel): analyses: list[AnalysisOnPlate] | None = None plate_status_counts: PlateStatusCounts | None = None - @field_validator("plate_status_counts", mode="before") + @validator("plate_status_counts") def check_detail(cls, value, values): analyses = values.get("analyses") if not analyses: @@ -65,4 +65,5 @@ def check_detail(cls, value, values): status_counts = Counter(statuses) return PlateStatusCounts(**status_counts, total=len(analyses), commented=commented) - model_config = {"validate_assignment": True} + class Config: + validate_default = True diff --git a/genotype_api/models.py b/genotype_api/models.py index 6f8f5de..b1a1f1d 100644 --- a/genotype_api/models.py +++ b/genotype_api/models.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel, field_validator +from pydantic import BaseModel, validator class SampleDetailStats(BaseModel): @@ -25,21 +25,22 @@ class SampleDetail(BaseModel): stats: SampleDetailStats | None = None status: SampleDetailStatus | None = None - @field_validator("stats", mode="before") - def validate_stats(cls, value, data) -> SampleDetailStats: - matches = data.get("matches") - mismatches = data.get("mismatches") - unknown = data.get("unknown") + @validator("stats") + def validate_stats(cls, value, values) -> SampleDetailStats: + matches = values.get("matches") + mismatches = values.get("mismatches") + unknown = values.get("unknown") return SampleDetailStats(matches=matches, mismatches=mismatches, unknown=unknown) - @field_validator("status", mode="before") - def validate_status(cls, value, data) -> SampleDetailStatus: - sex = data.get("sex") - snps = data.get("snps") - nocalls = data.get("nocalls") + @validator("status") + def validate_status(cls, value, values) -> SampleDetailStatus: + sex = values.get("sex") + snps = values.get("snps") + nocalls = values.get("nocalls") return SampleDetailStatus(sex=sex, snps=snps, nocalls=nocalls) - model_config = {"validate_assignment": True} + class Config: + validate_default = True class MatchCounts(BaseModel): From 9433cd46e34f877436e8bfa3c1c8b44d832452fd Mon Sep 17 00:00:00 2001 From: ahdamin Date: Tue, 8 Oct 2024 15:45:57 +0200 Subject: [PATCH 40/65] Add join condition for sample and analysis --- genotype_api/database/base_handler.py | 2 +- genotype_api/database/crud/create.py | 31 ++++++++++++++++++--------- genotype_api/database/crud/read.py | 17 +++++++++++---- genotype_api/database/crud/update.py | 6 ++++-- 4 files changed, 39 insertions(+), 17 deletions(-) diff --git a/genotype_api/database/base_handler.py b/genotype_api/database/base_handler.py index 7c0e48f..ede91ed 100644 --- a/genotype_api/database/base_handler.py +++ b/genotype_api/database/base_handler.py @@ -29,4 +29,4 @@ def _get_query(self, table: Type[DeclarativeBase]) -> Query: return select(table) def _get_join_analysis_on_sample(self) -> Query: - return self._get_query(table=Sample).join(Analysis) + return self._get_query(table=Sample).join(Analysis, Analysis.sample_id == Sample.id) diff --git a/genotype_api/database/crud/create.py b/genotype_api/database/crud/create.py index 252ee70..ffdda32 100644 --- a/genotype_api/database/crud/create.py +++ b/genotype_api/database/crud/create.py @@ -1,5 +1,7 @@ import logging +from sqlalchemy.future import select + from genotype_api.database.base_handler import BaseHandler from genotype_api.database.models import SNP, Analysis, Genotype, Plate, Sample, User from genotype_api.dto.user import UserRequest @@ -25,8 +27,9 @@ async def create_plate(self, plate: Plate) -> Plate: async def create_sample(self, sample: Sample) -> Sample: """Creates a sample in the database.""" - sample_in_db = await self._get_query(Analysis).filter(Sample.id == sample.id).one_or_none() - if sample_in_db: + sample_in_db_query = self._get_query(Analysis).filter(Sample.id == sample.id) + result = await self.session.execute(sample_in_db_query) + if sample_in_db := result.one_or_none(): raise SampleExistsError self.session.add(sample) await self.session.commit() @@ -34,14 +37,22 @@ async def create_sample(self, sample: Sample) -> Sample: return sample async def create_analyses_samples(self, analyses: list[Analysis]) -> list[Sample]: - """creating samples in an analysis if not already in db.""" - return [ - await self.create_sample(sample=Sample(id=analysis.sample_id)) - for analysis in analyses - if not await self.session.query(Sample) - .filter(Sample.id == analysis.sample_id) - .one_or_none() - ] + """Creating samples in an analysis if not already in db.""" + created_samples = [] + + for analysis in analyses: + # Sample already exists in the database + sample_in_db_query = self._get_query(Sample).filter(Sample.id == analysis.sample_id) + result = await self.session.execute(sample_in_db_query) + sample_in_db = result.one_or_none() + + # Sample doesn't exist + if not sample_in_db: + sample = Sample(id=analysis.sample_id) + created_sample = await self.create_sample(sample=sample) + created_samples.append(created_sample) + + return created_samples async def create_user(self, user: User) -> User: self.session.add(user) diff --git a/genotype_api/database/crud/read.py b/genotype_api/database/crud/read.py index 44d2257..589ce61 100644 --- a/genotype_api/database/crud/read.py +++ b/genotype_api/database/crud/read.py @@ -9,10 +9,19 @@ from genotype_api.database.base_handler import BaseHandler from genotype_api.database.filter_models.plate_models import PlateOrderParams from genotype_api.database.filter_models.sample_models import SampleFilterParams -from genotype_api.database.filters.analysis_filter import AnalysisFilter, apply_analysis_filter -from genotype_api.database.filters.genotype_filters import GenotypeFilter, apply_genotype_filter +from genotype_api.database.filters.analysis_filter import ( + AnalysisFilter, + apply_analysis_filter, +) +from genotype_api.database.filters.genotype_filters import ( + GenotypeFilter, + apply_genotype_filter, +) from genotype_api.database.filters.plate_filters import PlateFilter, apply_plate_filter -from genotype_api.database.filters.sample_filters import SampleFilter, apply_sample_filter +from genotype_api.database.filters.sample_filters import ( + SampleFilter, + apply_sample_filter, +) from genotype_api.database.filters.snp_filters import SNPFilter, apply_snp_filter from genotype_api.database.filters.user_filters import UserFilter, apply_user_filter from genotype_api.database.models import SNP, Analysis, Genotype, Plate, Sample, User @@ -147,7 +156,7 @@ async def get_filtered_samples(self, filter_params: SampleFilterParams) -> list[ select(Sample) .distinct() .options(selectinload(Sample.analyses).selectinload(Analysis.genotypes)) - .join(Analysis) + .join(Analysis, Analysis.sample_id == Sample.id) ) if filter_params.sample_id: query = self._get_samples(query, filter_params.sample_id) diff --git a/genotype_api/database/crud/update.py b/genotype_api/database/crud/update.py index 57f065b..8f3068f 100644 --- a/genotype_api/database/crud/update.py +++ b/genotype_api/database/crud/update.py @@ -8,7 +8,9 @@ from genotype_api.database.filter_models.sample_models import SampleSexesUpdate from genotype_api.database.models import Analysis, Plate, Sample, User from genotype_api.exceptions import SampleNotFoundError -from genotype_api.services.match_genotype_service.match_genotype import MatchGenotypeService +from genotype_api.services.match_genotype_service.match_genotype import ( + MatchGenotypeService, +) class UpdateHandler(BaseHandler): @@ -69,7 +71,7 @@ async def update_sample_sex(self, sexes_update: SampleSexesUpdate) -> Sample: select(Sample) .distinct() .options(selectinload(Sample.analyses).selectinload(Analysis.genotypes)) - .join(Analysis) + .join(Analysis, Analysis.sample_id == Sample.id) .filter(Sample.id == sexes_update.sample_id) ) From 28948bccda6128d6f64a99905bdcb8aef533d7f4 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Tue, 8 Oct 2024 15:46:14 +0200 Subject: [PATCH 41/65] Organize formats --- genotype_api/database/filters/sample_filters.py | 4 ++-- genotype_api/services/endpoint_services/sample_service.py | 6 +----- genotype_api/services/endpoint_services/user_service.py | 1 - 3 files changed, 3 insertions(+), 8 deletions(-) diff --git a/genotype_api/database/filters/sample_filters.py b/genotype_api/database/filters/sample_filters.py index 90ef3c3..0f8890a 100644 --- a/genotype_api/database/filters/sample_filters.py +++ b/genotype_api/database/filters/sample_filters.py @@ -2,11 +2,11 @@ from enum import Enum from typing import Callable -from sqlalchemy.orm import joinedload + from sqlalchemy import func from sqlalchemy.orm import Query -from genotype_api.database.models import Sample, Analysis +from genotype_api.database.models import Analysis, Sample def filter_samples_by_id(sample_id: str, samples: Query, **kwargs) -> Query: diff --git a/genotype_api/services/endpoint_services/sample_service.py b/genotype_api/services/endpoint_services/sample_service.py index 025c350..5ceda9e 100644 --- a/genotype_api/services/endpoint_services/sample_service.py +++ b/genotype_api/services/endpoint_services/sample_service.py @@ -11,11 +11,7 @@ from genotype_api.database.models import Analysis, Sample from genotype_api.dto.genotype import GenotypeResponse from genotype_api.dto.sample import AnalysisOnSample, SampleCreate, SampleResponse -from genotype_api.exceptions import ( - GenotypeDBError, - InsufficientAnalysesError, - SampleNotFoundError, -) +from genotype_api.exceptions import SampleNotFoundError from genotype_api.models import MatchResult, SampleDetail from genotype_api.services.endpoint_services.base_service import BaseService from genotype_api.services.match_genotype_service.match_genotype import ( diff --git a/genotype_api/services/endpoint_services/user_service.py b/genotype_api/services/endpoint_services/user_service.py index 36d62e6..65bff1d 100644 --- a/genotype_api/services/endpoint_services/user_service.py +++ b/genotype_api/services/endpoint_services/user_service.py @@ -3,7 +3,6 @@ from pydantic import EmailStr from genotype_api.database.models import User -from genotype_api.database.store import Store from genotype_api.dto.user import PlateOnUser, UserRequest, UserResponse from genotype_api.exceptions import UserArchiveError, UserExistsError, UserNotFoundError from genotype_api.services.endpoint_services.base_service import BaseService From 677f18a26390a22333b3db104e0dc28e2386bf6b Mon Sep 17 00:00:00 2001 From: ahdamin Date: Tue, 8 Oct 2024 16:04:42 +0200 Subject: [PATCH 42/65] Remove commented out lines --- genotype_api/services/endpoint_services/sample_service.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/genotype_api/services/endpoint_services/sample_service.py b/genotype_api/services/endpoint_services/sample_service.py index 5ceda9e..0f97cad 100644 --- a/genotype_api/services/endpoint_services/sample_service.py +++ b/genotype_api/services/endpoint_services/sample_service.py @@ -114,18 +114,12 @@ async def get_match_results( """ Get the match results for a specific analysis type and comparison set within a date range. """ - - # Fetch the analyses with eager loading using selectinload analyses = await self.store.get_analyses_by_type_between_dates( analysis_type=comparison_set, date_min=date_min, date_max=date_max ) - - # Fetch the sample analysis with eager loading sample_analysis = await self.store.get_analysis_by_type_and_sample_id( sample_id=sample_id, analysis_type=analysis_type ) - - # Perform matching using the MatchGenotypeService matches: list[MatchResult] = MatchGenotypeService.get_matches( analyses=analyses, sample_analysis=sample_analysis ) From 67b70d4a92b7875b6cc8fd114f213682f3d15421 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Wed, 9 Oct 2024 09:24:28 +0200 Subject: [PATCH 43/65] Add token validation checks --- genotype_api/security.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/genotype_api/security.py b/genotype_api/security.py index 58636d1..31f1199 100644 --- a/genotype_api/security.py +++ b/genotype_api/security.py @@ -68,6 +68,21 @@ async def get_active_user( store: Store = Depends(get_store), ) -> CurrentUser: """Dependency for secure endpoints""" + + if token_info is None or not isinstance(token_info, dict): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid or expired token", + ) + + # Now check for the presence of "payload" and "email" safely + payload = token_info.get("payload") + if not payload or "email" not in payload: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid or expired token", + ) + user_email = token_info["payload"]["email"] db_user: User = await store.get_user_by_email(email=user_email) if not db_user: From 0f66501e18a2027890fe308f7c9ddd5a4be009c8 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Wed, 9 Oct 2024 09:30:24 +0200 Subject: [PATCH 44/65] Remove redundant line --- genotype_api/database/database.py | 1 - 1 file changed, 1 deletion(-) diff --git a/genotype_api/database/database.py b/genotype_api/database/database.py index 6b17622..89c8026 100644 --- a/genotype_api/database/database.py +++ b/genotype_api/database/database.py @@ -59,4 +59,3 @@ async def drop_all_tables(): """Drop all tables in the database.""" async with engine.begin() as conn: await conn.run_sync(Base.metadata.drop_all) - await conn.run_sync(Base.metadata.drop_all) From 6fa777f8222dcb776e5e28cdbf62007ac533a212 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Wed, 9 Oct 2024 10:47:45 +0200 Subject: [PATCH 45/65] Remove unused local variable and imports --- genotype_api/database/crud/create.py | 9 ++++----- genotype_api/database/crud/read.py | 2 +- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/genotype_api/database/crud/create.py b/genotype_api/database/crud/create.py index ffdda32..aaa9735 100644 --- a/genotype_api/database/crud/create.py +++ b/genotype_api/database/crud/create.py @@ -1,10 +1,9 @@ import logging -from sqlalchemy.future import select +from sqlalchemy.orm import Query from genotype_api.database.base_handler import BaseHandler from genotype_api.database.models import SNP, Analysis, Genotype, Plate, Sample, User -from genotype_api.dto.user import UserRequest from genotype_api.exceptions import SampleExistsError LOG = logging.getLogger(__name__) @@ -27,9 +26,9 @@ async def create_plate(self, plate: Plate) -> Plate: async def create_sample(self, sample: Sample) -> Sample: """Creates a sample in the database.""" - sample_in_db_query = self._get_query(Analysis).filter(Sample.id == sample.id) - result = await self.session.execute(sample_in_db_query) - if sample_in_db := result.one_or_none(): + sample_in_db: Query = self._get_query(Analysis).filter(Sample.id == sample.id) + result = await self.session.execute(sample_in_db) + if result.one_or_none(): raise SampleExistsError self.session.add(sample) await self.session.commit() diff --git a/genotype_api/database/crud/read.py b/genotype_api/database/crud/read.py index 589ce61..60fd18a 100644 --- a/genotype_api/database/crud/read.py +++ b/genotype_api/database/crud/read.py @@ -1,5 +1,5 @@ import logging -from datetime import date, timedelta +from datetime import date from sqlalchemy import asc, desc, func from sqlalchemy.future import select From 18db6ea6b400348132f572b43c2e5836fda71965 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Wed, 9 Oct 2024 10:56:28 +0200 Subject: [PATCH 46/65] use AsyncGenerator for store instance --- genotype_api/database/store.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/genotype_api/database/store.py b/genotype_api/database/store.py index 23eaf70..56012b6 100644 --- a/genotype_api/database/store.py +++ b/genotype_api/database/store.py @@ -1,5 +1,7 @@ """Module for the store handler.""" +from typing import AsyncGenerator + from sqlalchemy.exc import OperationalError from sqlalchemy.ext.asyncio import AsyncSession from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed @@ -39,7 +41,7 @@ async def create(cls) -> "Store": return cls(session) # Return a Store instance with the session -async def get_store() -> Store: +async def get_store() -> AsyncGenerator[Store, None]: """Return a Store instance.""" store = await Store.create() try: From 09f3d36cfd37252328be7c2453cb2c03512e9f90 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Wed, 9 Oct 2024 11:02:56 +0200 Subject: [PATCH 47/65] Remove redundant line --- tests/database/crud/test_update.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/database/crud/test_update.py b/tests/database/crud/test_update.py index 3a8fe0b..4a13824 100644 --- a/tests/database/crud/test_update.py +++ b/tests/database/crud/test_update.py @@ -94,4 +94,3 @@ async def test_update_sample_sex(base_store: Store, sample_sex_update: SampleSex assert updated_sample.sex == sample_sex_update.sex for analysis in updated_sample.analyses: assert analysis.sex == sample_sex_update.genotype_sex - assert analysis.sex == sample_sex_update.genotype_sex From dcb807879de752e5b1251a78115239fe90dbe986 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Wed, 9 Oct 2024 11:13:22 +0200 Subject: [PATCH 48/65] Clean up comments --- genotype_api/database/crud/read.py | 15 +-------------- genotype_api/database/store.py | 2 +- genotype_api/security.py | 1 - .../services/endpoint_services/sample_service.py | 2 -- 4 files changed, 2 insertions(+), 18 deletions(-) diff --git a/genotype_api/database/crud/read.py b/genotype_api/database/crud/read.py index 60fd18a..d7b2ec3 100644 --- a/genotype_api/database/crud/read.py +++ b/genotype_api/database/crud/read.py @@ -81,7 +81,6 @@ async def get_analyses_by_type_between_dates( filtered_query = filtered_query.options(selectinload(Analysis.genotypes)) - # Execute the query asynchronously result = await self.session.execute(filtered_query) return result.scalars().all() @@ -97,7 +96,6 @@ async def get_analysis_by_type_and_sample_id( type=analysis_type, ) - # Add selectinload to eagerly load genotypes filtered_query = filtered_query.options(selectinload(Analysis.genotypes)) result = await self.session.execute(filtered_query) @@ -105,9 +103,7 @@ async def get_analysis_by_type_and_sample_id( async def get_plate_by_id(self, plate_id: int) -> Plate: plates: Query = self._get_query(Plate).options( - selectinload(Plate.analyses).selectinload( - Analysis.sample - ) # Eager loading of analyses and samples + selectinload(Plate.analyses).selectinload(Analysis.sample) ) filter_functions = [PlateFilter.BY_ID] filtered_query = apply_plate_filter( @@ -206,23 +202,14 @@ def _get_samples(query: Query, sample_id: str) -> Query: return query.filter(Sample.id.contains(sample_id)) async def get_sample_by_id(self, sample_id: str) -> Sample: - # Start by getting a base query for Sample samples: Query = self._get_query(Sample) - - # Define the filter functions for filtering by Sample ID filter_functions = [SampleFilter.BY_ID] - - # Apply the filters using apply_sample_filter filtered_query = apply_sample_filter( samples=samples, filter_functions=filter_functions, sample_id=sample_id ) - - # Ensure we load related analyses and genotypes using selectinload to avoid lazy loading filtered_query = filtered_query.options( selectinload(Sample.analyses).selectinload(Analysis.genotypes) ) - - # Execute the query asynchronously result = await self.session.execute(filtered_query) return result.scalars().first() diff --git a/genotype_api/database/store.py b/genotype_api/database/store.py index 56012b6..b4ba9b7 100644 --- a/genotype_api/database/store.py +++ b/genotype_api/database/store.py @@ -45,6 +45,6 @@ async def get_store() -> AsyncGenerator[Store, None]: """Return a Store instance.""" store = await Store.create() try: - yield store # Yield the store for the duration of the request + yield store finally: await store.session.close() diff --git a/genotype_api/security.py b/genotype_api/security.py index 31f1199..259df6d 100644 --- a/genotype_api/security.py +++ b/genotype_api/security.py @@ -75,7 +75,6 @@ async def get_active_user( detail="Invalid or expired token", ) - # Now check for the presence of "payload" and "email" safely payload = token_info.get("payload") if not payload or "email" not in payload: raise HTTPException( diff --git a/genotype_api/services/endpoint_services/sample_service.py b/genotype_api/services/endpoint_services/sample_service.py index 0f97cad..0d159ad 100644 --- a/genotype_api/services/endpoint_services/sample_service.py +++ b/genotype_api/services/endpoint_services/sample_service.py @@ -65,13 +65,11 @@ def _get_sample_response(self, sample: Sample) -> SampleResponse: ) async def get_sample(self, sample_id: str) -> SampleResponse: - # Use the ReadHandler to fetch the sample sample: Sample = await self.store.get_sample_by_id(sample_id) if not sample: raise SampleNotFoundError - # If sample has two analyses and no status, refresh its status if len(sample.analyses) == 2 and not sample.status: sample: Sample = await self.store.refresh_sample_status(sample=sample) From 3a230f4c5e1df252d2df54816e97452022267291 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Wed, 9 Oct 2024 17:08:12 +0200 Subject: [PATCH 49/65] Simplify async query execution in ReadHandler --- genotype_api/database/crud/read.py | 53 ++++++++++-------------------- 1 file changed, 17 insertions(+), 36 deletions(-) diff --git a/genotype_api/database/crud/read.py b/genotype_api/database/crud/read.py index d7b2ec3..f2fc595 100644 --- a/genotype_api/database/crud/read.py +++ b/genotype_api/database/crud/read.py @@ -39,8 +39,7 @@ async def get_analyses_by_plate_id(self, plate_id: int) -> list[Analysis]: filtered_query = apply_analysis_filter( analyses=analyses, filter_functions=filter_functions, plate_id=plate_id ) - result = await self.session.execute(filtered_query) - return result.scalars().all() + return (await self.session.execute(filtered_query)).scalars().all() async def get_analysis_by_id(self, analysis_id: int) -> Analysis: analyses: Query = self._get_query(Analysis) @@ -48,13 +47,11 @@ async def get_analysis_by_id(self, analysis_id: int) -> Analysis: filtered_query = apply_analysis_filter( analyses=analyses, filter_functions=filter_functions, analysis_id=analysis_id ) - result = await self.session.execute(filtered_query) - return result.scalars().first() + return (await self.session.execute(filtered_query)).scalars().first() async def get_analyses(self) -> list[Analysis]: filtered_query = self._get_query(Analysis) - result = await self.session.execute(filtered_query) - return result.scalars().all() + return (await self.session.execute(filtered_query)).scalars().all() async def get_analyses_with_skip_and_limit(self, skip: int, limit: int) -> list[Analysis]: analyses: Query = self._get_query(Analysis) @@ -62,8 +59,7 @@ async def get_analyses_with_skip_and_limit(self, skip: int, limit: int) -> list[ filtered_query = apply_analysis_filter( analyses=analyses, filter_functions=filter_functions, skip=skip, limit=limit ) - result = await self.session.execute(filtered_query) - return result.scalars().all() + return (await self.session.execute(filtered_query)).scalars().all() async def get_analyses_by_type_between_dates( self, analysis_type: Types, date_min: date, date_max: date @@ -80,9 +76,7 @@ async def get_analyses_by_type_between_dates( ) filtered_query = filtered_query.options(selectinload(Analysis.genotypes)) - - result = await self.session.execute(filtered_query) - return result.scalars().all() + return (await self.session.execute(filtered_query)).scalars().all() async def get_analysis_by_type_and_sample_id( self, sample_id: str, analysis_type: Types @@ -97,9 +91,7 @@ async def get_analysis_by_type_and_sample_id( ) filtered_query = filtered_query.options(selectinload(Analysis.genotypes)) - - result = await self.session.execute(filtered_query) - return result.scalars().first() + return (await self.session.execute(filtered_query)).scalars().first() async def get_plate_by_id(self, plate_id: int) -> Plate: plates: Query = self._get_query(Plate).options( @@ -109,8 +101,7 @@ async def get_plate_by_id(self, plate_id: int) -> Plate: filtered_query = apply_plate_filter( plates=plates, filter_functions=filter_functions, entry_id=plate_id ) - result = await self.session.execute(filtered_query) - return result.scalars().first() + return (await self.session.execute(filtered_query)).scalars().first() async def get_plate_by_plate_id(self, plate_id: str) -> Plate: plates: Query = self._get_query(Plate).options(selectinload(Plate.analyses)) @@ -118,8 +109,7 @@ async def get_plate_by_plate_id(self, plate_id: str) -> Plate: filtered_query = apply_plate_filter( plates=plates, filter_functions=filter_functions, plate_id=plate_id ) - result = await self.session.execute(filtered_query) - return result.scalars().first() + return (await self.session.execute(filtered_query)).scalars().first() async def get_ordered_plates(self, order_params: PlateOrderParams) -> list[Plate]: sort_func = desc if order_params.sort_order == "descend" else asc @@ -135,8 +125,7 @@ async def get_ordered_plates(self, order_params: PlateOrderParams) -> list[Plate limit=order_params.limit, sort_func=sort_func, ) - result = await self.session.execute(filtered_query) - return result.scalars().all() + return (await self.session.execute(filtered_query)).scalars().all() async def get_genotype_by_id(self, entry_id: int) -> Genotype: genotypes: Query = self._get_query(Genotype).options(selectinload(Genotype.analysis)) @@ -144,8 +133,7 @@ async def get_genotype_by_id(self, entry_id: int) -> Genotype: filtered_query = apply_genotype_filter( genotypes=genotypes, filter_functions=filter_functions, entry_id=entry_id ) - result = await self.session.execute(filtered_query) - return result.scalars().first() + return (await self.session.execute(filtered_query)).scalars().first() async def get_filtered_samples(self, filter_params: SampleFilterParams) -> list[Sample]: query = ( @@ -169,8 +157,7 @@ async def get_filtered_samples(self, filter_params: SampleFilterParams) -> list[ .offset(filter_params.skip) .limit(filter_params.limit) ) - result = await self.session.execute(filtered_query) - return result.scalars().all() + return (await self.session.execute(filtered_query)).scalars().all() @staticmethod def _get_incomplete_samples(query: Query) -> Query: @@ -210,8 +197,7 @@ async def get_sample_by_id(self, sample_id: str) -> Sample: filtered_query = filtered_query.options( selectinload(Sample.analyses).selectinload(Analysis.genotypes) ) - result = await self.session.execute(filtered_query) - return result.scalars().first() + return (await self.session.execute(filtered_query)).scalars().first() async def get_user_by_id(self, user_id: int) -> User: users: Query = self._get_query(User).options(selectinload(User.plates)) @@ -219,8 +205,7 @@ async def get_user_by_id(self, user_id: int) -> User: filtered_query = apply_user_filter( users=users, filter_functions=filter_functions, user_id=user_id ) - result = await self.session.execute(filtered_query) - return result.scalars().first() + return (await self.session.execute(filtered_query)).scalars().first() async def get_user_by_email(self, email: str) -> User | None: users: Query = self._get_query(User) @@ -228,8 +213,7 @@ async def get_user_by_email(self, email: str) -> User | None: filtered_query = apply_user_filter( users=users, filter_functions=filter_functions, email=email ) - result = await self.session.execute(filtered_query) - return result.scalars().first() + return (await self.session.execute(filtered_query)).scalars().first() async def get_users_with_skip_and_limit(self, skip: int, limit: int) -> list[User]: users: Query = self._get_query(User).options(selectinload(User.plates)) @@ -237,8 +221,7 @@ async def get_users_with_skip_and_limit(self, skip: int, limit: int) -> list[Use filtered_query = apply_user_filter( users=users, filter_functions=filter_functions, skip=skip, limit=limit ) - result = await self.session.execute(filtered_query) - return result.scalars().all() + return (await self.session.execute(filtered_query)).scalars().all() async def check_analyses_objects(self, analyses: list[Analysis], analysis_type: Types) -> None: """Raising 400 if any analysis in the list already exist in the database""" @@ -253,8 +236,7 @@ async def check_analyses_objects(self, analyses: list[Analysis], analysis_type: async def get_snps(self) -> list[SNP]: filtered_query = self._get_query(SNP) - result = await self.session.execute(filtered_query) - return result.scalars().all() + return (await self.session.execute(filtered_query)).scalars().all() async def get_snps_by_limit_and_skip(self, skip: int, limit: int) -> list[SNP]: snps: Query = self._get_query(SNP) @@ -262,5 +244,4 @@ async def get_snps_by_limit_and_skip(self, skip: int, limit: int) -> list[SNP]: filtered_query = apply_snp_filter( snps=snps, filter_functions=filter_functions, skip=skip, limit=limit ) - result = await self.session.execute(filtered_query) - return result.scalars().all() + return (await self.session.execute(filtered_query)).scalars().all() From 4573b2941a19dd5e7202350a4bfeca6c93c144d7 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 11 Oct 2024 00:13:41 +0200 Subject: [PATCH 50/65] Add async utility methods for row and scalar query results --- genotype_api/database/base_handler.py | 48 +++++++++++++++++++++------ 1 file changed, 38 insertions(+), 10 deletions(-) diff --git a/genotype_api/database/base_handler.py b/genotype_api/database/base_handler.py index ede91ed..8827658 100644 --- a/genotype_api/database/base_handler.py +++ b/genotype_api/database/base_handler.py @@ -1,13 +1,10 @@ from dataclasses import dataclass -from typing import Type +from typing import Any, List, Type, Union -from sqlalchemy.exc import OperationalError from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select from sqlalchemy.orm import DeclarativeBase, Query -from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed -from genotype_api.config import settings from genotype_api.database.models import Analysis, Sample @@ -18,15 +15,46 @@ class BaseHandler: def __init__(self, session: AsyncSession): self.session = session - @retry( - stop=stop_after_attempt(settings.max_retries), - wait=wait_fixed(settings.retry_delay), - retry=retry_if_exception_type(OperationalError), - reraise=True, - ) def _get_query(self, table: Type[DeclarativeBase]) -> Query: """Return a query for the given table.""" return select(table) def _get_join_analysis_on_sample(self) -> Query: return self._get_query(table=Sample).join(Analysis, Analysis.sample_id == Sample.id) + + # Full row fetch methods + async def fetch_all_rows(self, query: Query) -> List[DeclarativeBase]: + """Fetch all full rows matching the query.""" + result = await self.session.execute(query) + return result.scalars().all() + + async def fetch_first_row(self, query: Query) -> DeclarativeBase | None: + """Fetch the first full row matching the query or None if no match found.""" + result = await self.session.execute(query) + return result.scalars().first() + + async def fetch_one_or_none(self, query: Query) -> DeclarativeBase | None: + """Fetch one full row or None if no match found.""" + result = await self.session.execute(query) + return result.scalars().one_or_none() + + # Scalar value fetch methods + async def fetch_column_values(self, query: Query) -> List: + """Fetch all values from a single column.""" + result = await self.session.execute(query) + return result.all() + + async def fetch_first_value(self, query: Query) -> Any | None: + """Fetch the first value from a single column or None if no match found.""" + result = await self.session.execute(query) + return result.first() + + async def fetch_one_value_or_none(self, query: Query) -> Any | None: + """Fetch one value from a single column or None if no match found.""" + result = await self.session.execute(query) + return result.scalar_one_or_none() + + async def fetch_one_value(self, query: Query) -> Any: + """Fetch exactly one value from a single column or raise an error if not found.""" + result = await self.session.execute(query) + return result.scalar_one() From 2b58ea841c785e05574cd2b2886f23febbea0e7c Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 11 Oct 2024 00:22:14 +0200 Subject: [PATCH 51/65] Use fetch_one_or_none from CreateHandler --- genotype_api/database/crud/create.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/genotype_api/database/crud/create.py b/genotype_api/database/crud/create.py index aaa9735..34188b5 100644 --- a/genotype_api/database/crud/create.py +++ b/genotype_api/database/crud/create.py @@ -1,5 +1,6 @@ import logging +from sqlalchemy.future import select from sqlalchemy.orm import Query from genotype_api.database.base_handler import BaseHandler @@ -26,9 +27,9 @@ async def create_plate(self, plate: Plate) -> Plate: async def create_sample(self, sample: Sample) -> Sample: """Creates a sample in the database.""" - sample_in_db: Query = self._get_query(Analysis).filter(Sample.id == sample.id) - result = await self.session.execute(sample_in_db) - if result.one_or_none(): + sample_query: Query = select(Analysis).filter(Sample.id == sample.id) + sample_in_db = await self.fetch_one_or_none(sample_query) + if await sample_in_db: raise SampleExistsError self.session.add(sample) await self.session.commit() @@ -41,9 +42,8 @@ async def create_analyses_samples(self, analyses: list[Analysis]) -> list[Sample for analysis in analyses: # Sample already exists in the database - sample_in_db_query = self._get_query(Sample).filter(Sample.id == analysis.sample_id) - result = await self.session.execute(sample_in_db_query) - sample_in_db = result.one_or_none() + sample_query = select(Sample).filter(Sample.id == analysis.sample_id) + sample_in_db = await self.fetch_one_or_none(sample_query) # Sample doesn't exist if not sample_in_db: From 2e1ab2c74294088aa3e25bea865b90c22295c465 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 11 Oct 2024 00:26:22 +0200 Subject: [PATCH 52/65] use select and fetch --- genotype_api/database/crud/delete.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/genotype_api/database/crud/delete.py b/genotype_api/database/crud/delete.py index 4b8594e..57557c9 100644 --- a/genotype_api/database/crud/delete.py +++ b/genotype_api/database/crud/delete.py @@ -1,5 +1,7 @@ import logging +from sqlalchemy.future import select + from genotype_api.database.base_handler import BaseHandler from genotype_api.database.models import SNP, Analysis, Plate, Sample, User @@ -25,9 +27,8 @@ async def delete_user(self, user: User) -> None: await self.session.commit() async def delete_snps(self) -> int: - query = self._get_query(SNP) - result = await self.session.execute(query) - snps: list[SNP] = result.scalars().all() + query = select(SNP) + snps: list[SNP] = await self.fetch_all_rows(query) count: int = len(snps) for snp in snps: self.session.delete(snp) From 84d1633d8820068a3e46b4fca40f7cff0152cbe0 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 11 Oct 2024 00:30:18 +0200 Subject: [PATCH 53/65] Use fetch_one_or_none --- genotype_api/database/crud/update.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/genotype_api/database/crud/update.py b/genotype_api/database/crud/update.py index 8f3068f..0a12cfa 100644 --- a/genotype_api/database/crud/update.py +++ b/genotype_api/database/crud/update.py @@ -32,8 +32,7 @@ async def refresh_sample_status( async def update_sample_comment(self, sample_id: str, comment: str) -> Sample: query: Query = select(Sample).distinct().filter(Sample.id == sample_id) - result = await self.session.execute(query) - sample: Sample = result.scalars().one_or_none() + sample: Sample = await self.fetch_one_or_none(query) if not sample: raise SampleNotFoundError sample.comment = comment @@ -44,8 +43,7 @@ async def update_sample_comment(self, sample_id: str, comment: str) -> Sample: async def update_sample_status(self, sample_id: str, status: str | None) -> Sample: query: Query = select(Sample).distinct().filter(Sample.id == sample_id) - result = await self.session.execute(query) - sample: Sample = result.scalars().one_or_none() + sample: Sample = await self.fetch_one_or_none(query) if not sample: raise SampleNotFoundError sample.status = status @@ -74,9 +72,7 @@ async def update_sample_sex(self, sexes_update: SampleSexesUpdate) -> Sample: .join(Analysis, Analysis.sample_id == Sample.id) .filter(Sample.id == sexes_update.sample_id) ) - - result = await self.session.execute(query) - sample = result.scalars().one_or_none() + sample = await self.fetch_one_or_none(query) if not sample: raise SampleNotFoundError sample.sex = sexes_update.sex From f376ba5cb5a027235aa04f3e00b59a8e15f57068 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 11 Oct 2024 00:32:28 +0200 Subject: [PATCH 54/65] Add retry logic and session health checks in database session --- genotype_api/database/database.py | 40 +++++++++++++++---------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/genotype_api/database/database.py b/genotype_api/database/database.py index 89c8026..7895a3a 100644 --- a/genotype_api/database/database.py +++ b/genotype_api/database/database.py @@ -1,12 +1,13 @@ """Hold the database information and session manager.""" -import asyncio import logging from contextlib import asynccontextmanager from typing import AsyncGenerator +from sqlalchemy import text from sqlalchemy.exc import OperationalError from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine +from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed from genotype_api.config import settings from genotype_api.database.models import Base @@ -17,8 +18,8 @@ settings.db_uri, echo=settings.echo_sql, future=True, - pool_size=10, - max_overflow=20, + pool_recycle=3600, # Recycle connections after 3600 seconds (1 hour) + pool_pre_ping=True, # Enable connection health checks (pings) ) sessionmanager = async_sessionmaker( @@ -28,25 +29,24 @@ ) +@retry( + stop=stop_after_attempt(settings.max_retries), + wait=wait_fixed(settings.retry_delay), + retry=retry_if_exception_type(OperationalError), + reraise=True, +) @asynccontextmanager async def get_session() -> AsyncGenerator[AsyncSession, None]: - """Provides an asynchronous session context manager with retry logic.""" - retries = 0 - while retries < settings.max_retries: - async with sessionmanager() as session: - try: - yield session - break - except OperationalError as e: - retries += 1 - LOG.error(f"OperationalError: {e}, retrying {retries}/{settings.max_retries}...") - if retries >= settings.max_retries: - LOG.error("Max retries exceeded. Could not connect to the database.") - raise - await session.close() - await asyncio.sleep(settings.retry_delay) - finally: - await session.close() + """Yield a valid database session with retry logic for OperationalError.""" + async with sessionmanager() as session: + try: + # Test if the session is still valid by executing a simple query + await session.execute(text("SELECT 1")) + yield session + except OperationalError as e: + # If session is invalid, retry + LOG.error(f"OperationalError: {e}") + raise async def create_all_tables(): From 96abb8ffc61e6a1c88a8ffeee39f40b2ccab87ab Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 11 Oct 2024 00:34:34 +0200 Subject: [PATCH 55/65] Remove retry logic from Store session creation --- genotype_api/database/store.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/genotype_api/database/store.py b/genotype_api/database/store.py index b4ba9b7..4f9a38d 100644 --- a/genotype_api/database/store.py +++ b/genotype_api/database/store.py @@ -2,11 +2,8 @@ from typing import AsyncGenerator -from sqlalchemy.exc import OperationalError from sqlalchemy.ext.asyncio import AsyncSession -from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed -from genotype_api.config import settings from genotype_api.database.crud.create import CreateHandler from genotype_api.database.crud.delete import DeleteHandler from genotype_api.database.crud.read import ReadHandler @@ -29,16 +26,10 @@ def __init__(self, session: AsyncSession): UpdateHandler.__init__(self, session) @classmethod - @retry( - stop=stop_after_attempt(settings.max_retries), - wait=wait_fixed(settings.retry_delay), - retry=retry_if_exception_type(OperationalError), - reraise=True, - ) async def create(cls) -> "Store": """Asynchronously create and return a Store instance with a session.""" - async with get_session() as session: # Correctly use async context manager - return cls(session) # Return a Store instance with the session + async with get_session() as session: + return cls(session) async def get_store() -> AsyncGenerator[Store, None]: From 530147e8b85d2b052ff1c9cfe9254da4d49e23ed Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 11 Oct 2024 00:46:15 +0200 Subject: [PATCH 56/65] use select and fetch_all_rows --- tests/database/crud/test_create.py | 71 ++++++++++++++---------------- 1 file changed, 32 insertions(+), 39 deletions(-) diff --git a/tests/database/crud/test_create.py b/tests/database/crud/test_create.py index 0001dbf..b31bb54 100644 --- a/tests/database/crud/test_create.py +++ b/tests/database/crud/test_create.py @@ -1,5 +1,6 @@ """Module to test the create functionality of the genotype API CRUD.""" +from sqlalchemy.future import select from sqlalchemy.orm import Query from genotype_api.database.models import SNP, Analysis, Genotype, Plate, Sample, User @@ -8,104 +9,97 @@ async def test_create_analysis(store: Store, test_analysis: Analysis): # GIVEN an analysis and an empty store - analyses_query: Query = store._get_query(Analysis) - result = await store.session.execute(analyses_query) - assert not result.scalars().all() + analyses_query: Query = select(Analysis) + analyses = await store.fetch_all_rows(analyses_query) + assert not analyses # WHEN creating the analysis await store.create_analysis(analysis=test_analysis) - # THEN the analysis is created - result = await store.session.execute(analyses_query) - analyses = result.scalars().all() + analyses = await store.fetch_all_rows(analyses_query) assert analyses[0].id == test_analysis.id async def test_create_genotype(store: Store, test_genotype: Genotype): # GIVEN a genotype and an empty store - genotypes_query: Query = store._get_query(Genotype) - result = await store.session.execute(genotypes_query) - assert not result.scalars().all() + genotypes_query: Query = select(Genotype) + genotypes = await store.fetch_all_rows(genotypes_query) + assert not genotypes # WHEN creating the genotype await store.create_genotype(genotype=test_genotype) # THEN the genotype is created - result = await store.session.execute(genotypes_query) - genotypes = result.scalars().all() + genotypes = await store.fetch_all_rows(genotypes_query) assert genotypes[0].id == test_genotype.id async def test_create_snp(store: Store, test_snp: SNP): # GIVEN a SNP and an empty store - snps_query: Query = store._get_query(SNP) - result = await store.session.execute(snps_query) - assert not result.scalars().all() + snps_query: Query = select(SNP) + snps = await store.fetch_all_rows(snps_query) + assert not snps # WHEN creating the SNP await store.create_snps(snps=[test_snp]) # THEN the SNP is created - result = await store.session.execute(snps_query) - snps = result.scalars().all() + snps = await store.fetch_all_rows(snps_query) assert snps[0].id == test_snp.id async def test_create_user(store: Store, test_user: User): # GIVEN a user and an empty store - users_query: Query = store._get_query(User) - result = await store.session.execute(users_query) - assert not result.scalars().all() + users_query: Query = select(User) + users = await store.fetch_all_rows(users_query) + assert not users # WHEN creating the user await store.create_user(user=test_user) # THEN the user is created - result = await store.session.execute(users_query) - users = result.scalars().all() + users = await store.fetch_all_rows(users_query) assert users[0].id == test_user.id async def test_create_sample(store: Store, test_sample: Sample): # GIVEN a sample and an empty store - samples_query: Query = store._get_query(Sample) - result = await store.session.execute(samples_query) - assert not result.scalars().all() + samples_query: Query = select(Sample) + samples = await store.fetch_all_rows(samples_query) + assert not samples # WHEN creating the sample await store.create_sample(sample=test_sample) # THEN the sample is created - result = await store.session.execute(samples_query) - samples = result.scalars().all() + samples = await store.fetch_all_rows(samples_query) assert samples[0].id == test_sample.id async def test_create_plate(store: Store, test_plate: Plate): # GIVEN a plate and an empty store - plates_query: Query = store._get_query(Plate) - result = await store.session.execute(plates_query) - assert not result.scalars().all() + plates_query: Query = select(Plate) + plates = await store.fetch_all_rows(plates_query) + assert not plates # WHEN creating the plate await store.create_plate(plate=test_plate) # THEN the plate is created - result = await store.session.execute(plates_query) - plates = result.scalars().all() + plates = await store.fetch_all_rows(plates_query) assert plates[0].id == test_plate.id async def test_create_analyses_samples(store: Store, test_analysis: Analysis): # GIVEN an analysis in a store - samples_query: Query = store._get_query(Sample) - analyses_query: Query = store._get_query(Analysis) + samples_query: Query = select(Sample) + analyses_query: Query = select(Analysis) - result = await store.session.execute(samples_query) - assert not result.scalars().all() + samples = await store.fetch_all_rows(samples_query) + assert not samples - result = await store.session.execute(analyses_query) - assert not result.scalars().all() + analyses = await store.fetch_all_rows(analyses_query) + assert not analyses await store.create_analysis(test_analysis) @@ -113,7 +107,6 @@ async def test_create_analyses_samples(store: Store, test_analysis: Analysis): await store.create_analyses_samples(analyses=[test_analysis]) # THEN the samples are created - result = await store.session.execute(samples_query) - sample: Sample = result.scalars().all()[0] + sample: Sample = await store.fetch_all_rows(samples_query)[0] assert sample assert sample.id == test_analysis.sample_id From 96fea87e275a492b5698a22fb97cd8d2b90fe795 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 11 Oct 2024 00:58:07 +0200 Subject: [PATCH 57/65] Use select and fetch_all_rows --- tests/database/crud/test_delete.py | 38 +++++++++++++++++------------- 1 file changed, 21 insertions(+), 17 deletions(-) diff --git a/tests/database/crud/test_delete.py b/tests/database/crud/test_delete.py index b7f9c40..dd88f15 100644 --- a/tests/database/crud/test_delete.py +++ b/tests/database/crud/test_delete.py @@ -1,5 +1,6 @@ """Module to test the delete functionality of the genotype API CRUD.""" +from sqlalchemy.future import select from sqlalchemy.orm import Query from genotype_api.database.models import SNP, Analysis, Plate, Sample, User @@ -21,52 +22,55 @@ async def test_delete_analysis(base_store: Store, test_analysis: Analysis): async def test_delete_sample(base_store: Store, test_sample: Sample): # GIVEN a sample and a store with the sample - query: Query = base_store._get_query(Sample) - result = await base_store.session.execute(query) - assert test_sample in result.scalars().all() + query: Query = select(Sample) + sample = await base_store.fetch_all_rows(query) + assert test_sample in sample # WHEN deleting the sample await base_store.delete_sample(sample=test_sample) # THEN the sample is deleted - result = await base_store.session.execute(base_store._get_query(test_sample)) - assert test_sample not in result.scalars().all() + sample = await base_store.fetch_all_rows(query) + assert test_sample not in sample async def test_delete_plate(base_store: Store, test_plate: Plate): # GIVEN a plate and a store with the plate - result = await base_store.session.execute(base_store._get_query(test_plate)) - assert test_plate in result.scalars().all() + query: Query = select(Plate) + plate = await base_store.fetch_all_rows(query) + assert test_plate in plate # WHEN deleting the plate await base_store.delete_plate(plate=test_plate) # THEN the plate is deleted - result = await base_store.session.execute(base_store._get_query(test_plate)) - assert test_plate not in result.scalars().all() + plate = await base_store.fetch_all_rows(query) + assert test_plate not in plate async def test_delete_user(base_store: Store, test_user: User): # GIVEN a user and a store with the user - result = await base_store.session.execute(base_store._get_query(test_user)) - assert test_user in result.scalars().all() + query: Query = select(User) + user = await base_store.fetch_all_rows(query) + assert test_user in user # WHEN deleting the user base_store.delete_user(user=test_user) # THEN the user is deleted - result = await base_store.session.execute(base_store._get_query(test_user)) - assert test_user not in result.scalars().all() + user = await base_store.fetch_all_rows(query) + assert test_user not in user async def test_delete_snps(base_store: Store, test_snp: SNP): # GIVEN an SNP and a store with the SNP - result = await base_store.session.execute(base_store._get_query(test_snp)) - assert result.scalars().all() + query: Query = select(SNP) + snp = await base_store.fetch_all_rows(query) + assert snp # WHEN deleting the SNP base_store.delete_snps() # THEN all SNPs are deleted - result = await base_store.session.execute(base_store._get_query(test_snp)) - assert not result.scalars().all() + snp = await base_store.fetch_all_rows(query) + assert not snp From b4c17152d166a195f7e1a1a8e45c0a204cde2ee0 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 11 Oct 2024 01:04:13 +0200 Subject: [PATCH 58/65] Use select and fetch_all_rows --- .../database/filters/test_analysis_filters.py | 21 ++++++++----------- 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/tests/database/filters/test_analysis_filters.py b/tests/database/filters/test_analysis_filters.py index 7f2dd69..3e31ea7 100644 --- a/tests/database/filters/test_analysis_filters.py +++ b/tests/database/filters/test_analysis_filters.py @@ -1,5 +1,6 @@ """Module to test the analysis filters.""" +from sqlalchemy.future import select from sqlalchemy.orm import Query from genotype_api.database.filters.analysis_filter import ( @@ -17,10 +18,9 @@ async def test_filter_analyses_by_id(base_store: Store, test_analysis, helpers): # GIVEN an analysis # WHEN filtering analyses by id - query: Query = base_store._get_query(Analysis) + query: Query = select(Analysis) filtered_query = filter_analyses_by_id(analysis_id=test_analysis.id, analyses=query) - result = await base_store.session.execute(filtered_query) - analyses: list[Analysis] = result.scalars().all() + analyses: list[Analysis] = await base_store.fetch_all_rows(filtered_query) # THEN assert the analysis is returned assert analyses @@ -33,10 +33,9 @@ async def test_filter_analyses_by_type( # GIVEN an analysis # WHEN filtering analyses by type - query: Query = base_store._get_query(Analysis) + query: Query = select(Analysis) filtered_query = filter_analyses_by_type(type=test_analysis.type, analyses=query) - result = await base_store.session.execute(filtered_query) - analyses: list[Analysis] = result.scalars().all() + analyses: list[Analysis] = await base_store.fetch_all_rows(filtered_query) # THEN assert the analysis is returned assert analyses @@ -49,10 +48,9 @@ async def test_filter_analyses_by_plate_id( # GIVEN an analysis # WHEN filtering analyses by plate id - query: Query = base_store._get_query(Analysis) + query: Query = select(Analysis) filtered_query = filter_analyses_by_plate_id(plate_id=test_analysis.plate_id, analyses=query) - result = await base_store.session.execute(filtered_query) - analyses: list[Analysis] = result.scalars().all() + analyses: list[Analysis] = await base_store.fetch_all_rows(filtered_query) # THEN assert the analysis is returned assert analyses @@ -63,10 +61,9 @@ async def test_filter_analyses_by_sample_id(base_store: Store, test_analysis, he # GIVEN an analysis # WHEN filtering analyses by sample id - query: Query = base_store._get_query(Analysis) + query: Query = select(Analysis) filtered_query = filter_analyses_by_sample_id(sample_id=test_analysis.sample_id, analyses=query) - result = await base_store.session.execute(filtered_query) - analyses: list[Analysis] = result.scalars().all() + analyses: list[Analysis] = await base_store.fetch_all_rows(filtered_query) # THEN assert the analysis is returned assert analyses From b334139f7af90b51b944d8bc4f1f4872b5c5de33 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 11 Oct 2024 01:06:55 +0200 Subject: [PATCH 59/65] Use select and fetch_all_rows --- .../database/filters/test_genotype_filters.py | 9 +++--- tests/database/filters/test_user_filters.py | 32 ++++++------------- 2 files changed, 14 insertions(+), 27 deletions(-) diff --git a/tests/database/filters/test_genotype_filters.py b/tests/database/filters/test_genotype_filters.py index 53093b8..12a32e9 100644 --- a/tests/database/filters/test_genotype_filters.py +++ b/tests/database/filters/test_genotype_filters.py @@ -1,5 +1,6 @@ """Module to test the genotype filters.""" +from sqlalchemy.future import select from sqlalchemy.orm import Query from genotype_api.database.filters.genotype_filters import filter_genotypes_by_id @@ -13,11 +14,9 @@ async def test_filter_genotypes_by_id(store: Store, test_genotype: Genotype, hel await helpers.ensure_genotype(store=store, genotype=test_genotype) # WHEN filtering genotypes by id - query: Query = store._get_query(Genotype) - result = await store.session.execute(query) - genotypes: list[Genotype] = filter_genotypes_by_id( - entry_id=test_genotype.id, genotypes=result.scalars() - ).all() + query: Query = select(Genotype) + filtered_query = filter_genotypes_by_id(entry_id=test_genotype, genotypes=query) + genotypes: list[Genotype] = await store.fetch_all_rows(filtered_query) # THEN assert the genotype is returned assert genotypes diff --git a/tests/database/filters/test_user_filters.py b/tests/database/filters/test_user_filters.py index 8eea428..a57157d 100644 --- a/tests/database/filters/test_user_filters.py +++ b/tests/database/filters/test_user_filters.py @@ -1,9 +1,9 @@ """Module to test the user filters.""" +from sqlalchemy.future import select from sqlalchemy.orm import Query from genotype_api.database.filters.user_filters import ( - apply_user_filter, filter_users_by_email, filter_users_by_id, filter_users_by_name, @@ -17,13 +17,9 @@ async def test_filter_users_by_id(base_store: Store, test_user: User): # GIVEN a store with a user # WHEN filtering users by id - query: Query = base_store._get_query(User) - filter_functions = filter_users_by_id(user_id=test_user.id, users=query) - filtered_query = apply_user_filter( - users=query, filter_functions=filter_functions, user_id=test_user.id - ) - result = await base_store.session.execute(filtered_query) - user: User = result.scalars().first() + query: Query = select(User) + filtered_query = filter_users_by_id(user_id=test_user.id, users=query) + user: User = await base_store.fetch_first_row(filtered_query) # THEN the user is returned assert user @@ -35,13 +31,9 @@ async def test_filter_users_by_email(base_store: Store, test_user: User): # GIVEN a store with a user # WHEN filtering users by email - query: Query = base_store._get_query(User) - filter_functions = filter_users_by_email(email=test_user.email, users=query) - filtered_query = apply_user_filter( - users=query, filter_functions=filter_functions, email=test_user.email - ) - result = await base_store.session.execute(filtered_query) - user: User = result.scalars().first() + query: Query = select(User) + filtered_query = filter_users_by_email(email=test_user.email, users=query) + user: User = await base_store.fetch_first_row(filtered_query) # THEN the user is returned assert user @@ -53,13 +45,9 @@ async def test_filter_users_by_name(base_store: Store, test_user: User): # GIVEN a store with a user # WHEN filtering users by name - query: Query = base_store._get_query(User) - filter_functions = filter_users_by_name(name=test_user.name, users=query) - filtered_query = apply_user_filter( - users=query, filter_functions=filter_functions, name=test_user.name - ) - result = await base_store.session.execute(filtered_query) - user: User = result.scalars().first() + query: Query = select(User) + filtered_query = filter_users_by_name(name=test_user.name, users=query) + user: User = await base_store.fetch_first_row(filtered_query) # THEN the user is returned assert user From 7a6f809e4a29efb6ef6c7e431644e8c673198c17 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 11 Oct 2024 01:11:31 +0200 Subject: [PATCH 60/65] Add query generation methods and use BaseHandler async fetch methods --- genotype_api/database/crud/read.py | 177 ++++++++++++++++------------- 1 file changed, 95 insertions(+), 82 deletions(-) diff --git a/genotype_api/database/crud/read.py b/genotype_api/database/crud/read.py index f2fc595..fd6d32a 100644 --- a/genotype_api/database/crud/read.py +++ b/genotype_api/database/crud/read.py @@ -12,18 +12,31 @@ from genotype_api.database.filters.analysis_filter import ( AnalysisFilter, apply_analysis_filter, + filter_analyses_by_id, + filter_analyses_by_plate_id, ) from genotype_api.database.filters.genotype_filters import ( GenotypeFilter, apply_genotype_filter, ) -from genotype_api.database.filters.plate_filters import PlateFilter, apply_plate_filter +from genotype_api.database.filters.plate_filters import ( + PlateFilter, + apply_plate_filter, + filter_plates_by_id, + filter_plates_by_plate_id, +) from genotype_api.database.filters.sample_filters import ( SampleFilter, apply_sample_filter, + filter_samples_by_id, ) from genotype_api.database.filters.snp_filters import SNPFilter, apply_snp_filter -from genotype_api.database.filters.user_filters import UserFilter, apply_user_filter +from genotype_api.database.filters.user_filters import ( + UserFilter, + apply_user_filter, + filter_users_by_email, + filter_users_by_id, +) from genotype_api.database.models import SNP, Analysis, Genotype, Plate, Sample, User LOG = logging.getLogger(__name__) @@ -32,40 +45,31 @@ class ReadHandler(BaseHandler): async def get_analyses_by_plate_id(self, plate_id: int) -> list[Analysis]: - analyses: Query = self._get_query(Analysis).options( - selectinload(Analysis.genotypes), selectinload(Analysis.sample) - ) - filter_functions = [AnalysisFilter.BY_PLATE_ID] - filtered_query = apply_analysis_filter( - analyses=analyses, filter_functions=filter_functions, plate_id=plate_id - ) - return (await self.session.execute(filtered_query)).scalars().all() + analyses: Query = self._get_analysis_with_genotypes() + filtered_query = filter_analyses_by_plate_id(plate_id=plate_id, analyses=analyses) + return await self.fetch_all_rows(filtered_query) async def get_analysis_by_id(self, analysis_id: int) -> Analysis: - analyses: Query = self._get_query(Analysis) - filter_functions = [AnalysisFilter.BY_ID] - filtered_query = apply_analysis_filter( - analyses=analyses, filter_functions=filter_functions, analysis_id=analysis_id - ) - return (await self.session.execute(filtered_query)).scalars().first() + analyses: Query = select(Analysis) + filtered_query = filter_analyses_by_id(analysis_id=analysis_id, analyses=analyses) + return await self.fetch_first_row(filtered_query) async def get_analyses(self) -> list[Analysis]: - filtered_query = self._get_query(Analysis) - return (await self.session.execute(filtered_query)).scalars().all() + filtered_query = select(Analysis) + return await self.fetch_all_rows(filtered_query) async def get_analyses_with_skip_and_limit(self, skip: int, limit: int) -> list[Analysis]: - analyses: Query = self._get_query(Analysis) + analyses: Query = select(Analysis) filter_functions = [AnalysisFilter.SKIP_AND_LIMIT] filtered_query = apply_analysis_filter( analyses=analyses, filter_functions=filter_functions, skip=skip, limit=limit ) - return (await self.session.execute(filtered_query)).scalars().all() + return await self.fetch_all_rows(filtered_query) async def get_analyses_by_type_between_dates( self, analysis_type: Types, date_min: date, date_max: date ) -> list[Analysis]: - analyses: Query = self._get_query(Analysis) - + analyses: Query = select(Analysis) filter_functions = [AnalysisFilter.BY_TYPE, AnalysisFilter.BETWEEN_DATES] filtered_query = apply_analysis_filter( analyses=analyses, @@ -76,12 +80,12 @@ async def get_analyses_by_type_between_dates( ) filtered_query = filtered_query.options(selectinload(Analysis.genotypes)) - return (await self.session.execute(filtered_query)).scalars().all() + return await self.fetch_all_rows(filtered_query) async def get_analysis_by_type_and_sample_id( self, sample_id: str, analysis_type: Types ) -> Analysis: - analyses: Query = self._get_query(Analysis) + analyses: Query = select(Analysis) filter_functions = [AnalysisFilter.BY_TYPE, AnalysisFilter.BY_SAMPLE_ID] filtered_query = apply_analysis_filter( analyses=analyses, @@ -89,33 +93,22 @@ async def get_analysis_by_type_and_sample_id( sample_id=sample_id, type=analysis_type, ) - filtered_query = filtered_query.options(selectinload(Analysis.genotypes)) - return (await self.session.execute(filtered_query)).scalars().first() + return await self.fetch_first_row(filtered_query) async def get_plate_by_id(self, plate_id: int) -> Plate: - plates: Query = self._get_query(Plate).options( - selectinload(Plate.analyses).selectinload(Analysis.sample) - ) - filter_functions = [PlateFilter.BY_ID] - filtered_query = apply_plate_filter( - plates=plates, filter_functions=filter_functions, entry_id=plate_id - ) - return (await self.session.execute(filtered_query)).scalars().first() + plates: Query = self._get_plate_with_analyses_and_samples() + filtered_query = filter_plates_by_id(entry_id=plate_id, plates=plates) + return await self.fetch_first_row(filtered_query) async def get_plate_by_plate_id(self, plate_id: str) -> Plate: - plates: Query = self._get_query(Plate).options(selectinload(Plate.analyses)) - filter_functions = [PlateFilter.BY_PLATE_ID] - filtered_query = apply_plate_filter( - plates=plates, filter_functions=filter_functions, plate_id=plate_id - ) - return (await self.session.execute(filtered_query)).scalars().first() + plates: Query = self._get_plate_with_analyses + filtered_query = filter_plates_by_plate_id(plate_id=plate_id, plates=plates) + return await self.fetch_first_row(filtered_query) async def get_ordered_plates(self, order_params: PlateOrderParams) -> list[Plate]: sort_func = desc if order_params.sort_order == "descend" else asc - plates: Query = self._get_query(Plate).options( - selectinload(Plate.analyses).selectinload(Analysis.sample) - ) + plates: Query = self._get_plate_with_analyses_and_samples() filter_functions = [PlateFilter.ORDER, PlateFilter.SKIP_AND_LIMIT] filtered_query = apply_plate_filter( plates=plates, @@ -125,23 +118,18 @@ async def get_ordered_plates(self, order_params: PlateOrderParams) -> list[Plate limit=order_params.limit, sort_func=sort_func, ) - return (await self.session.execute(filtered_query)).scalars().all() + return await self.fetch_all_rows(filtered_query) async def get_genotype_by_id(self, entry_id: int) -> Genotype: - genotypes: Query = self._get_query(Genotype).options(selectinload(Genotype.analysis)) + genotypes: Query = self._get_genotype_with_analysis() filter_functions = [GenotypeFilter.BY_ID] filtered_query = apply_genotype_filter( genotypes=genotypes, filter_functions=filter_functions, entry_id=entry_id ) - return (await self.session.execute(filtered_query)).scalars().first() + return await self.fetch_first_row(filtered_query) async def get_filtered_samples(self, filter_params: SampleFilterParams) -> list[Sample]: - query = ( - select(Sample) - .distinct() - .options(selectinload(Sample.analyses).selectinload(Analysis.genotypes)) - .join(Analysis, Analysis.sample_id == Sample.id) - ) + query = self._get_samples_with_analyses_and_genotypes() if filter_params.sample_id: query = self._get_samples(query, filter_params.sample_id) if filter_params.plate_id: @@ -157,8 +145,9 @@ async def get_filtered_samples(self, filter_params: SampleFilterParams) -> list[ .offset(filter_params.skip) .limit(filter_params.limit) ) - return (await self.session.execute(filtered_query)).scalars().all() + return await self.fetch_all_rows(filtered_query) + # pylint: disable=E1102 @staticmethod def _get_incomplete_samples(query: Query) -> Query: """Returning sample query statement for samples with less than two analyses.""" @@ -176,12 +165,12 @@ def _get_plate_samples(query: Query, plate_id: str) -> Query: @staticmethod def _get_commented_samples(query: Query) -> Query: """Returning sample query statement for samples with no comment.""" - return query.filter(Sample.comment != None) + return query.filter(Sample.comment.is_not(None)) @staticmethod def _get_status_missing_samples(query: Query) -> Query: """Returning sample query statement for samples with no comment.""" - return query.filter(Sample.status == None) + return query.filter(Sample.status.is_(None)) @staticmethod def _get_samples(query: Query, sample_id: str) -> Query: @@ -189,39 +178,27 @@ def _get_samples(query: Query, sample_id: str) -> Query: return query.filter(Sample.id.contains(sample_id)) async def get_sample_by_id(self, sample_id: str) -> Sample: - samples: Query = self._get_query(Sample) - filter_functions = [SampleFilter.BY_ID] - filtered_query = apply_sample_filter( - samples=samples, filter_functions=filter_functions, sample_id=sample_id - ) - filtered_query = filtered_query.options( - selectinload(Sample.analyses).selectinload(Analysis.genotypes) - ) - return (await self.session.execute(filtered_query)).scalars().first() + samples: Query = self._get_samples_with_analyses_and_genotypes() + filtered_query = filter_samples_by_id(sample_id=sample_id, samples=samples) + return await self.fetch_first_row(filtered_query) async def get_user_by_id(self, user_id: int) -> User: - users: Query = self._get_query(User).options(selectinload(User.plates)) - filter_functions = [UserFilter.BY_ID] - filtered_query = apply_user_filter( - users=users, filter_functions=filter_functions, user_id=user_id - ) - return (await self.session.execute(filtered_query)).scalars().first() + users: Query = self._get_user_with_plates() + filtered_query = filter_users_by_id(user_id=user_id, users=users) + return await self.fetch_first_row(filtered_query) async def get_user_by_email(self, email: str) -> User | None: - users: Query = self._get_query(User) - filter_functions = [UserFilter.BY_EMAIL] - filtered_query = apply_user_filter( - users=users, filter_functions=filter_functions, email=email - ) - return (await self.session.execute(filtered_query)).scalars().first() + users: Query = select(User) + filtered_query = filter_users_by_email(email=email, users=users) + return await self.fetch_first_row(filtered_query) async def get_users_with_skip_and_limit(self, skip: int, limit: int) -> list[User]: - users: Query = self._get_query(User).options(selectinload(User.plates)) + users: Query = self._get_user_with_plates() filter_functions = [UserFilter.SKIP_AND_LIMIT] filtered_query = apply_user_filter( users=users, filter_functions=filter_functions, skip=skip, limit=limit ) - return (await self.session.execute(filtered_query)).scalars().all() + return await self.fetch_all_rows(filtered_query) async def check_analyses_objects(self, analyses: list[Analysis], analysis_type: Types) -> None: """Raising 400 if any analysis in the list already exist in the database""" @@ -235,13 +212,49 @@ async def check_analyses_objects(self, analyses: list[Analysis], analysis_type: await self.session.commit() async def get_snps(self) -> list[SNP]: - filtered_query = self._get_query(SNP) - return (await self.session.execute(filtered_query)).scalars().all() + filtered_query = select(SNP) + return await self.fetch_all_rows(filtered_query) async def get_snps_by_limit_and_skip(self, skip: int, limit: int) -> list[SNP]: - snps: Query = self._get_query(SNP) + snps: Query = select(SNP) filter_functions = [SNPFilter.SKIP_AND_LIMIT] filtered_query = apply_snp_filter( snps=snps, filter_functions=filter_functions, skip=skip, limit=limit ) - return (await self.session.execute(filtered_query)).scalars().all() + return await self.fetch_all_rows(filtered_query) + + @staticmethod + def _get_analysis_with_genotypes_and_sample() -> Query: + return select(Analysis).options( + selectinload(Analysis.genotypes), + selectinload(Analysis.sample), + ) + + @staticmethod + def _get_analysis_with_genotypes() -> Query: + return select(Analysis).options(selectinload(Analysis.genotypes)) + + @staticmethod + def _get_plate_with_analyses() -> Query: + return select(Plate).options(selectinload(Plate.analyses)) + + @staticmethod + def _get_plate_with_analyses_and_samples() -> Query: + return select(Plate).options(selectinload(Plate.analyses).selectinload(Analysis.sample)) + + @staticmethod + def _get_genotype_with_analysis() -> Query: + return select(Genotype).options(selectinload(Genotype.analysis)) + + @staticmethod + def _get_samples_with_analyses_and_genotypes() -> Query: + return ( + select(Sample) + .distinct() + .options(selectinload(Sample.analyses).selectinload(Analysis.genotypes)) + .join(Analysis, Analysis.sample_id == Sample.id) + ) + + @staticmethod + def _get_user_with_plates() -> Query: + return select(User).options(selectinload(User.plates)) From aad7b0d418a0efd299470e057190da5e2db199ef Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 11 Oct 2024 16:20:22 +0200 Subject: [PATCH 61/65] Add: _get_samples_with_analyses --- genotype_api/database/crud/read.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/genotype_api/database/crud/read.py b/genotype_api/database/crud/read.py index fd6d32a..8cd5eda 100644 --- a/genotype_api/database/crud/read.py +++ b/genotype_api/database/crud/read.py @@ -25,11 +25,7 @@ filter_plates_by_id, filter_plates_by_plate_id, ) -from genotype_api.database.filters.sample_filters import ( - SampleFilter, - apply_sample_filter, - filter_samples_by_id, -) +from genotype_api.database.filters.sample_filters import filter_samples_by_id from genotype_api.database.filters.snp_filters import SNPFilter, apply_snp_filter from genotype_api.database.filters.user_filters import ( UserFilter, @@ -255,6 +251,10 @@ def _get_samples_with_analyses_and_genotypes() -> Query: .join(Analysis, Analysis.sample_id == Sample.id) ) + @staticmethod + def _get_samples_with_analyses() -> Query: + return select(Sample).options(selectinload(Sample.analyses)) + @staticmethod def _get_user_with_plates() -> Query: return select(User).options(selectinload(User.plates)) From 8fd9f2155104a686fcce73b582f187181028bfbd Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 11 Oct 2024 16:23:15 +0200 Subject: [PATCH 62/65] Use: select & async fetch functions from BaseHandler --- tests/database/filters/test_plate_filters.py | 11 ++- tests/database/filters/test_sample_filters.py | 89 ++++++++----------- tests/database/filters/test_snp_filters.py | 14 +-- 3 files changed, 51 insertions(+), 63 deletions(-) diff --git a/tests/database/filters/test_plate_filters.py b/tests/database/filters/test_plate_filters.py index 9434568..28ad560 100644 --- a/tests/database/filters/test_plate_filters.py +++ b/tests/database/filters/test_plate_filters.py @@ -1,5 +1,6 @@ """Module to test the plate filters.""" +from sqlalchemy.future import select from sqlalchemy.orm import Query from genotype_api.database.filters.plate_filters import ( @@ -16,13 +17,12 @@ async def test_filter_plates_by_id(base_store: Store, test_plate: Plate): # GIVEN a store with a plate # WHEN filtering plates by id - query: Query = base_store._get_query(Plate) + query: Query = select(Plate) filter_functions = filter_plates_by_id(entry_id=test_plate.id, plates=query) filtered_query = apply_plate_filter( plates=query, filter_functions=filter_functions, entry_id=test_plate.id ) - result = await base_store.session.execute(filtered_query) - plate: Plate = result.scalars().first() + plate: Plate = await base_store.fetch_first_row(filtered_query) # THEN the plate is returned assert plate @@ -34,13 +34,12 @@ async def test_filter_plates_by_plate_id(base_store: Store, test_plate: Plate): # GIVEN a store with a plate # WHEN filtering plates by plate id - query: Query = base_store._get_query(Plate) + query: Query = select(Plate) filter_functions = filter_plates_by_plate_id(plate_id=test_plate.id, plates=query) filtered_query = apply_plate_filter( plates=query, filter_functions=filter_functions, plate_id=test_plate.id ) - result = await base_store.session.execute(filtered_query) - plate: Plate = result.scalars().first() + plate: Plate = await base_store.fetch_first_row(filtered_query) # THEN the plate is returned assert plate diff --git a/tests/database/filters/test_sample_filters.py b/tests/database/filters/test_sample_filters.py index dd95566..b7391ab 100644 --- a/tests/database/filters/test_sample_filters.py +++ b/tests/database/filters/test_sample_filters.py @@ -1,5 +1,6 @@ """Module to test the sample filters.""" +from sqlalchemy.future import select from sqlalchemy.orm import Query from genotype_api.database.filters.sample_filters import ( @@ -21,13 +22,9 @@ async def test_filter_samples_by_id(base_store: Store, test_sample: Sample): # GIVEN a store with a sample # WHEN filtering samples by id - query: Query = base_store._get_query(Sample) - filter_functions = filter_samples_by_id(sample_id=test_sample.id, samples=query) - filtered_query = apply_sample_filter( - samples=query, filter_functions=filter_functions, sample_id=test_sample.id - ) - result = await base_store.session.execute(filtered_query) - sample: Sample = result.scalars().first() + query: Query = select(Sample) + filtered_query = filter_samples_by_id(sample_id=test_sample.id, samples=query) + sample: Sample = await base_store.fetch_first_row(filtered_query) # THEN the sample is returned assert sample @@ -39,13 +36,12 @@ async def test_filter_samples_contain_id(base_store: Store, test_sample: Sample) # GIVEN a store with a sample # WHEN filtering samples by id - query: Query = base_store._get_query(Sample) + query: Query = select(Sample) filter_functions = filter_samples_contain_id(sample_id=test_sample.id, samples=query) filtered_query = apply_sample_filter( samples=query, filter_functions=filter_functions, sample_id=test_sample.id ) - result = await base_store.session.execute(filtered_query) - sample: Sample = result.scalars().first() + sample: Sample = await base_store.fetch_first_row(filtered_query) # THEN the sample is returned assert sample @@ -55,16 +51,16 @@ async def test_filter_samples_contain_id(base_store: Store, test_sample: Sample) async def test_filter_samples_contain_id_when_no_id(base_store: Store, test_sample: Sample): """Test filtering samples by id when no id is provided.""" # GIVEN a store with two samples - assert len(await base_store._get_query(Sample).all()) == 2 + query: Query = select(Sample) + samples: list[Sample] = await base_store.fetch_all_rows(query) + assert len(samples) == 2 # WHEN filtering samples by id - query: Query = base_store._get_query(Sample) filter_functions = filter_samples_contain_id(sample_id=None, samples=query) filtered_query = apply_sample_filter( samples=query, filter_functions=filter_functions, sample_id=None ) - result = await base_store.session.execute(filtered_query) - samples: list[Sample] = result.scalars().all() + samples: list[Sample] = await base_store.fetch_all_rows(filtered_query) # THEN all samples are returned assert len(samples) == 2 @@ -75,18 +71,18 @@ async def test_filter_samples_having_comment( ): """Test filtering samples by having comment.""" # GIVEN a store with samples having a comment and one without - assert len(await base_store._get_query(Sample).all()) == 2 + query: Query = select(Sample) + samples: list[Sample] = await base_store.fetch_all_rows(query) + assert len(samples) == 2 sample_without_comment: Sample = test_sample sample_without_comment.comment = None sample_without_comment.id = "sample_without_status" await helpers.ensure_sample(store=base_store, sample=sample_without_comment) # WHEN filtering samples by having comment - query: Query = base_store._get_query(Sample) filter_functions = filter_samples_having_comment(samples=query, is_commented=True) filtered_query = apply_sample_filter(samples=query, filter_functions=filter_functions) - result = await base_store.session.execute(filtered_query) - samples: list[Sample] = result.scalars().all() + samples: list[Sample] = await base_store.fetch_all_rows(filtered_query) # THEN samples with comments are returned assert samples @@ -97,14 +93,14 @@ async def test_filter_samples_having_comment( async def test_filter_samples_having_comment_none_provided(base_store: Store, test_sample: Sample): """Test filtering samples by having comment.""" # GIVEN a store with samples having a comment and one without - assert len(await base_store._get_query(Sample).all()) == 2 + query: Query = select(Sample) + samples: list[Sample] = await base_store.fetch_all_rows(query) + assert len(samples) == 2 # WHEN filtering samples by having comment - query: Query = base_store._get_query(Sample) filter_functions = filter_samples_having_comment(samples=query, is_commented=None) filtered_query = apply_sample_filter(samples=query, filter_functions=filter_functions) - result = await base_store.session.execute(filtered_query) - samples: list[Sample] = result.scalars().all() + samples: list[Sample] = await base_store.fetch_all_rows(filtered_query) # THEN the sample is returned assert len(samples) == 2 @@ -121,11 +117,9 @@ async def test_filter_samples_without_status( await helpers.ensure_sample(store=base_store, sample=sample_without_status) # WHEN filtering samples by having a status - query: Query = base_store._get_query(Sample) - filter_functions = filter_samples_without_status(samples=query, is_missing=True) - filtered_query = apply_sample_filter(samples=query, filter_functions=filter_functions) - result = await base_store.session.execute(filtered_query) - samples: list[Sample] = result.scalars().all() + query: Query = select(Sample) + filtered_query = filter_samples_without_status(samples=query, is_missing=True) + samples: list[Sample] = await base_store.fetch_all_rows(filtered_query) # THEN no sample is returned assert samples @@ -136,14 +130,15 @@ async def test_filter_samples_without_status( async def test_filter_samples_without_status_none_provided(base_store: Store, test_sample: Sample): """Test filtering samples by having status.""" # GIVEN a store with a sample that has a status - assert len(await base_store._get_query(Sample).all()) == 2 + query: Query = select(Sample) + samples: list[Sample] = await base_store.fetch_all_rows(query) + assert len(samples) == 2 # WHEN filtering samples by having a status - query: Query = base_store._get_query(Sample) + query: Query = select(Sample) filter_functions = filter_samples_without_status(samples=query, is_missing=None) filtered_query = apply_sample_filter(samples=query, filter_functions=filter_functions) - result = await base_store.session.execute(filtered_query) - samples: list[Sample] = result.scalars().all() + samples: list[Sample] = await base_store.fetch_all_rows(filtered_query) # THEN all samples are returned assert len(samples) == 2 @@ -156,13 +151,9 @@ async def test_filter_samples_analysed_on_plate( # GIVEN a store with analysed samples # WHEN filtering samples analysed on a plate - query: Query = base_store._get_join_analysis_on_sample() - filter_functions = filter_samples_analysed_on_plate(samples=query, plate_id=test_plate.id) - filtered_query = apply_sample_filter( - samples=query, filter_functions=filter_functions, plate_id=test_plate.id - ) - result = await base_store.session.execute(filtered_query) - sample: Sample = result.scalars().first() + query: Query = base_store._get_samples_with_analyses() + filtered_query = filter_samples_analysed_on_plate(samples=query, plate_id=test_plate.id) + sample: Sample = await base_store.fetch_first_row(filtered_query) # THEN one sample is returned assert sample.analyses[0].plate_id == test_plate.id @@ -174,16 +165,14 @@ async def test_filter_samples_analysed_on_plate_none_provided( ): """Test filtering samples by having comment.""" # GIVEN a store with analysed samples - assert len(await base_store._get_query(Sample).all()) == 2 + query: Query = select(Sample) + samples: list[Sample] = await base_store.fetch_all_rows(query) + assert len(samples) == 2 # WHEN filtering samples analysed on a plate - query: Query = base_store._get_join_analysis_on_sample() - filter_functions = filter_samples_analysed_on_plate(samples=query, plate_id=None) - filtered_query = apply_sample_filter( - samples=query, filter_functions=filter_functions, plate_id=None - ) - result = await base_store.session.execute(filtered_query) - samples: list[Sample] = result.scalars().all() + query: Query = base_store._get_samples_with_analyses() + filtered_query = filter_samples_analysed_on_plate(samples=query, plate_id=None) + samples: list[Sample] = await base_store.fetch_all_rows(filtered_query) # THEN all samples are returned assert len(samples) == 2 @@ -192,13 +181,13 @@ async def test_filter_samples_analysed_on_plate_none_provided( async def test_add_skip_and_limit(base_store: Store, test_sample: Sample): """Test add_skip_and_limit function.""" # GIVEN a store with two samples - assert len(await base_store._get_query(Sample).all()) == 2 + query: Query = select(Sample) + samples: list[Sample] = await base_store.fetch_all_rows(query) + assert len(samples) == 2 # WHEN adding skip and limit to the query - query: Query = base_store._get_query(Sample) filtered_query = add_skip_and_limit(query, skip=0, limit=1) - result = await base_store.session.execute(filtered_query) - samples: list[Sample] = result.scalars().all() + samples: list[Sample] = await base_store.fetch_all_rows(filtered_query) # THEN one sample is returned assert samples diff --git a/tests/database/filters/test_snp_filters.py b/tests/database/filters/test_snp_filters.py index f09b952..85c0ada 100644 --- a/tests/database/filters/test_snp_filters.py +++ b/tests/database/filters/test_snp_filters.py @@ -1,5 +1,6 @@ """Module to test the SNP filters.""" +from sqlalchemy.future import select from sqlalchemy.orm import Query from genotype_api.database.filters.snp_filters import ( @@ -17,13 +18,12 @@ async def test_filter_snps_by_id(base_store: Store, test_snp: SNP): # GIVEN a store with a SNP # WHEN filtering a SNP by id - query: Query = base_store._get_query(SNP) + query: Query = select(SNP) filter_functions = filter_snps_by_id(snp_id=test_snp.id, snps=query) filtered_query = apply_snp_filter( snps=query, filter_functions=filter_functions, snp_id=test_snp.id ) - result = await base_store.session.execute(filtered_query) - snp: SNP = result.scalars().first() + snp: SNP = base_store.fetch_first_row(filtered_query) # THEN the SNP is returned assert snp @@ -34,13 +34,13 @@ async def test_add_skip_and_limit(base_store: Store, test_snp: SNP): """Test add_skip_and_limit function.""" # GIVEN a store with two SNPs - assert len(await base_store._get_query(SNP).all()) == 2 + query: Query = select(SNP) + snps: list[SNP] = await base_store.fetch_all_rows(query) + assert len(snps) == 2 # WHEN adding skip and limit to the query - query: Query = base_store._get_query(SNP) filtered_query = add_skip_and_limit(query, skip=0, limit=1) - result = await base_store.session.execute(filtered_query) - snps: list[SNP] = result.scalars().all() + snps: list[SNP] = base_store.fetch_all_rows(filtered_query) # THEN one SNP is returned assert snps From aebc195d8d26841296703b6d06d645343df2f53e Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 11 Oct 2024 16:39:33 +0200 Subject: [PATCH 63/65] Remove unnecessary filter_functions --- tests/database/filters/test_plate_filters.py | 11 ++--------- tests/database/filters/test_sample_filters.py | 19 +++++-------------- tests/database/filters/test_snp_filters.py | 5 +---- 3 files changed, 8 insertions(+), 27 deletions(-) diff --git a/tests/database/filters/test_plate_filters.py b/tests/database/filters/test_plate_filters.py index 28ad560..80bf02c 100644 --- a/tests/database/filters/test_plate_filters.py +++ b/tests/database/filters/test_plate_filters.py @@ -4,7 +4,6 @@ from sqlalchemy.orm import Query from genotype_api.database.filters.plate_filters import ( - apply_plate_filter, filter_plates_by_id, filter_plates_by_plate_id, ) @@ -18,10 +17,7 @@ async def test_filter_plates_by_id(base_store: Store, test_plate: Plate): # WHEN filtering plates by id query: Query = select(Plate) - filter_functions = filter_plates_by_id(entry_id=test_plate.id, plates=query) - filtered_query = apply_plate_filter( - plates=query, filter_functions=filter_functions, entry_id=test_plate.id - ) + filtered_query = filter_plates_by_id(entry_id=test_plate.id, plates=query) plate: Plate = await base_store.fetch_first_row(filtered_query) # THEN the plate is returned @@ -35,10 +31,7 @@ async def test_filter_plates_by_plate_id(base_store: Store, test_plate: Plate): # WHEN filtering plates by plate id query: Query = select(Plate) - filter_functions = filter_plates_by_plate_id(plate_id=test_plate.id, plates=query) - filtered_query = apply_plate_filter( - plates=query, filter_functions=filter_functions, plate_id=test_plate.id - ) + filtered_query = filter_plates_by_plate_id(plate_id=test_plate.id, plates=query) plate: Plate = await base_store.fetch_first_row(filtered_query) # THEN the plate is returned diff --git a/tests/database/filters/test_sample_filters.py b/tests/database/filters/test_sample_filters.py index b7391ab..1b99840 100644 --- a/tests/database/filters/test_sample_filters.py +++ b/tests/database/filters/test_sample_filters.py @@ -37,10 +37,7 @@ async def test_filter_samples_contain_id(base_store: Store, test_sample: Sample) # WHEN filtering samples by id query: Query = select(Sample) - filter_functions = filter_samples_contain_id(sample_id=test_sample.id, samples=query) - filtered_query = apply_sample_filter( - samples=query, filter_functions=filter_functions, sample_id=test_sample.id - ) + filtered_query = filter_samples_contain_id(sample_id=test_sample.id, samples=query) sample: Sample = await base_store.fetch_first_row(filtered_query) # THEN the sample is returned @@ -56,10 +53,7 @@ async def test_filter_samples_contain_id_when_no_id(base_store: Store, test_samp assert len(samples) == 2 # WHEN filtering samples by id - filter_functions = filter_samples_contain_id(sample_id=None, samples=query) - filtered_query = apply_sample_filter( - samples=query, filter_functions=filter_functions, sample_id=None - ) + filtered_query = filter_samples_contain_id(sample_id=None, samples=query) samples: list[Sample] = await base_store.fetch_all_rows(filtered_query) # THEN all samples are returned @@ -80,8 +74,7 @@ async def test_filter_samples_having_comment( await helpers.ensure_sample(store=base_store, sample=sample_without_comment) # WHEN filtering samples by having comment - filter_functions = filter_samples_having_comment(samples=query, is_commented=True) - filtered_query = apply_sample_filter(samples=query, filter_functions=filter_functions) + filtered_query = filter_samples_having_comment(samples=query, is_commented=True) samples: list[Sample] = await base_store.fetch_all_rows(filtered_query) # THEN samples with comments are returned @@ -98,8 +91,7 @@ async def test_filter_samples_having_comment_none_provided(base_store: Store, te assert len(samples) == 2 # WHEN filtering samples by having comment - filter_functions = filter_samples_having_comment(samples=query, is_commented=None) - filtered_query = apply_sample_filter(samples=query, filter_functions=filter_functions) + filtered_query = filter_samples_having_comment(samples=query, is_commented=None) samples: list[Sample] = await base_store.fetch_all_rows(filtered_query) # THEN the sample is returned @@ -136,8 +128,7 @@ async def test_filter_samples_without_status_none_provided(base_store: Store, te # WHEN filtering samples by having a status query: Query = select(Sample) - filter_functions = filter_samples_without_status(samples=query, is_missing=None) - filtered_query = apply_sample_filter(samples=query, filter_functions=filter_functions) + filtered_query = filter_samples_without_status(samples=query, is_missing=None) samples: list[Sample] = await base_store.fetch_all_rows(filtered_query) # THEN all samples are returned diff --git a/tests/database/filters/test_snp_filters.py b/tests/database/filters/test_snp_filters.py index 85c0ada..1d7d106 100644 --- a/tests/database/filters/test_snp_filters.py +++ b/tests/database/filters/test_snp_filters.py @@ -19,10 +19,7 @@ async def test_filter_snps_by_id(base_store: Store, test_snp: SNP): # WHEN filtering a SNP by id query: Query = select(SNP) - filter_functions = filter_snps_by_id(snp_id=test_snp.id, snps=query) - filtered_query = apply_snp_filter( - snps=query, filter_functions=filter_functions, snp_id=test_snp.id - ) + filtered_query = filter_snps_by_id(snp_id=test_snp.id, snps=query) snp: SNP = base_store.fetch_first_row(filtered_query) # THEN the SNP is returned From e356387e643ae68876960d8d28e14ff6df91980e Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 11 Oct 2024 16:57:23 +0200 Subject: [PATCH 64/65] fix: missing parentheses in query --- genotype_api/database/crud/read.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/genotype_api/database/crud/read.py b/genotype_api/database/crud/read.py index 8cd5eda..ef2c9de 100644 --- a/genotype_api/database/crud/read.py +++ b/genotype_api/database/crud/read.py @@ -98,7 +98,7 @@ async def get_plate_by_id(self, plate_id: int) -> Plate: return await self.fetch_first_row(filtered_query) async def get_plate_by_plate_id(self, plate_id: str) -> Plate: - plates: Query = self._get_plate_with_analyses + plates: Query = self._get_plate_with_analyses() filtered_query = filter_plates_by_plate_id(plate_id=plate_id, plates=plates) return await self.fetch_first_row(filtered_query) From 43d86818bec9aa51318980e1a20ecd07945cdfe2 Mon Sep 17 00:00:00 2001 From: ahdamin Date: Fri, 11 Oct 2024 17:32:17 +0200 Subject: [PATCH 65/65] Make delete awaitable --- genotype_api/database/crud/delete.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/genotype_api/database/crud/delete.py b/genotype_api/database/crud/delete.py index 57557c9..e48752c 100644 --- a/genotype_api/database/crud/delete.py +++ b/genotype_api/database/crud/delete.py @@ -11,19 +11,19 @@ class DeleteHandler(BaseHandler): async def delete_analysis(self, analysis: Analysis) -> None: - self.session.delete(analysis) + await self.session.delete(analysis) await self.session.commit() async def delete_plate(self, plate: Plate) -> None: - self.session.delete(plate) + await self.session.delete(plate) await self.session.commit() async def delete_sample(self, sample: Sample) -> None: - self.session.delete(sample) + await self.session.delete(sample) await self.session.commit() async def delete_user(self, user: User) -> None: - self.session.delete(user) + await self.session.delete(user) await self.session.commit() async def delete_snps(self) -> int: @@ -31,6 +31,6 @@ async def delete_snps(self) -> int: snps: list[SNP] = await self.fetch_all_rows(query) count: int = len(snps) for snp in snps: - self.session.delete(snp) + await self.session.delete(snp) await self.session.commit() return count